diff --git a/.lycheeignore b/.lycheeignore index a9a04527..dfbce3c0 100644 --- a/.lycheeignore +++ b/.lycheeignore @@ -1,2 +1,7 @@ http://localhost* -https://localhost* \ No newline at end of file +https://localhost* +# Testnet explorer returns 401 but links are valid +https://testnet-explorer.tangle.tools/* +# Internal MDX routes without .mdx extension (valid Nextra routes, not filesystem paths) +.*/claim-airdrop$ +.*/join_operator/join$ \ No newline at end of file diff --git a/CONTENT_AUDIT.md b/CONTENT_AUDIT.md new file mode 100644 index 00000000..d8c82c4f --- /dev/null +++ b/CONTENT_AUDIT.md @@ -0,0 +1,68 @@ +# Documentation Audit (v2 Migration) + +Last updated: 2026-01-19 + +## High-Priority Fixes + +| Area | File | Status | Notes / Needed Inputs | +| ---- | ---- | ------ | --------------------- | +| Network parameters | `pages/network/network-parameters.mdx` | Updated (testnet + sources) | Mainnet/local addresses still needed. | +| Token allocation | `pages/network/tokenomics/allocation.mdx` | Updated (tnt-core values) | Need distribution contract addresses by environment. | +| Inflation model | `pages/network/tokenomics/inflation.mdx` | Updated | Reflects v2 budgeted incentives; verify against current policy. | +| Operator incentives | `pages/network/incentives-operators.mdx` | Updated | Confirm default fee split and operator commission rules. | +| Developer incentives | `pages/network/incentives-developers.mdx` | Updated | Confirm metric weights and eligibility rules. | +| Participation credits | `pages/network/points-mechanics.mdx` | Updated (testnet) | Mainnet/local addresses still needed. | +| Launch page | `pages/network/launch.mdx` | Needs investigation | Reported runtime error; content updated with v2 links. | +| Governance docs | `pages/network/governance.mdx` | Removed | Replace with new governance system when ready. | +| Differences doc | `pages/network/overview.mdx` | Updated | Content merged; old doc removed. | +| Use cases | `pages/vision/use-cases.mdx` | Updated | Review for AI alignment and add any missing product references. | + +## Follow-Up Candidates + +| Area | File | Status | Notes | +| ---- | ---- | ------ | ----- | +| Token usage + gas model | `pages/network/tokenomics/usage.mdx` | Updated | TNT is not the gas token; usage text cleaned up. | + +## Operator Section Cleanup (v2) + +| Task | Files | Status | Notes | +| ---- | ----- | ------ | ----- | +| Remove v1 node docs | `pages/operators/node-basics/*` | Done | Deleted (dirs removed). | +| Remove node monitoring docs | `pages/operators/monitoring/*` | Done | Deleted (dirs removed). | +| Remove operator onboarding page | `pages/operators/onboarding.mdx` | Done | Deleted. | +| Remove operator Tangle AVS docs | `pages/operators/tangle-avs/*` | Done | Deleted (dirs removed). | +| Update operator entry points | `pages/operators/_meta.ts`, `components/OperatorIntro.tsx` | Done | Now points to Blueprint Manager + operator registration. | +| Align operator docs to Blueprint Manager | `pages/operators/benchmarking.mdx`, `pages/operators/operator/join_operator/*` | Done | Updated to CLI + pricing-engine workflow. | +| Audit Blueprint Manager setup details | `pages/operators/manager/*` | Pending | Verify RPC/WSS endpoints, chain IDs, and runtime commands are v2. | +| Review remaining operator pages | `pages/operators/quality-of-service.mdx`, `pages/operators/pricing/overview.mdx` | Pending | Ensure no stale commands or v1 references. | + +## Economic Security Cleanup (Stake) + +| Task | Files | Status | Notes | +| ---- | ----- | ------ | ----- | +| Remove liquid staking docs | `pages/staking/lrt-*`, `pages/staking/lrt_developers/*` | Done | Deleted. | +| Remove liquid staking docs | `pages/staking/lst-*`, `pages/staking/join_a_pool/*`, `pages/staking/create_a_pool/*`, `pages/staking/lst_developers/*` | Done | Deleted. | +| Remove PolkadotJS stake docs | `pages/staking/how_to_stake/how_to_stake_polkadotjs/*`, `pages/staking/credits/claiming.mdx`, `pages/staking/nominator.mdx` | Done | Deleted. | +| Remove outdated diagrams | `public/images/liquid-staking/*`, `public/images/staking/how-to-stake-polkadotjs/*`, `public/images/staking-workflow.png` | Done | Deleted. | +| Update stake intro + concepts | `pages/staking/introduction.mdx`, `pages/staking/how-staking-works.mdx`, `pages/staking/staking-concepts.mdx` | Done | Updated for v2 staking. | +| Add liquid staking placeholder | `pages/staking/liquid-staking.mdx`, `pages/staking/_meta.ts` | Removed | Placeholder removed and replaced by v2 liquid staking docs. | +| Add liquid staking docs (v2) | `pages/staking/liquid-staking/*` | Done | Based on LiquidDelegationVault + LiquidDelegationFactory. | + +## Blueprint + SDK Alignment (v2) + +| Task | Files | Status | Notes | +| ---- | ----- | ------ | ----- | +| Update local testing guide to Anvil harness | `pages/developers/testing-with-tangle.mdx` | Done | Uses Blueprint SDK harness + tnt-core fixtures. | +| Update blueprint introduction for v2 terminology + payments | `pages/developers/blueprints/introduction.mdx` | Done | Aligns with tnt-core payment flow and service terminology. | + +## Legacy Substrate References (Needs Review) + +| Area | File | Status | Notes | +| ---- | ---- | ------ | ----- | +| Blueprint Manager doc | `pages/developers/blueprints/manager.mdx` | Done | Updated to tnt-core contract terminology. | +| Blueprint pricing engine | `pages/developers/blueprints/pricing-engine.mdx` | Done | Rewritten to match blueprint-sdk pricing engine. | +| Address formats | `pages/developers/technicals/addresses.mdx` | Done | Deleted. | +| Transaction fees | `pages/developers/technicals/transaction-fees.mdx` | Done | Deleted. | +| Precompile docs | `pages/developers/precompiles/*` | Done | Deleted. | +| Slashing flow | `pages/network/slashing.mdx` | Done | Deleted. | +| Hardhat deployment guide | `pages/developers/technicals/deploy-using-hardhat.mdx` | Done | Deleted. | diff --git a/PROJECT_CHECKLIST.md b/PROJECT_CHECKLIST.md new file mode 100644 index 00000000..19445f0b --- /dev/null +++ b/PROJECT_CHECKLIST.md @@ -0,0 +1,38 @@ +# Tangle Docs Cohesion + IA Checklist + +Last updated: 2026-01-19 + +Status legend: [ ] Not started, [~] In progress, [x] Done + +## Navigation + IA +- [x] Top nav limited to Vision, Workbench, Runtime, Protocol. +- [x] Protocol dropdown includes Protocol Foundation, Build, Operate, Economic Security. +- [x] Sidebar shows only children for the active section and hides the section label. +- [x] Remove AI top-level tab and split into Workbench + Runtime. + +## Content Cohesion +- [x] Apply canonical framing across Overview pages. +- [x] Replace "Vibe" with "workbench" across docs. +- [x] Add explicit "Today vs Future" callout in Overview and Protocol. +- [x] Add trust/execution safety narrative (policies, audit logs, approvals, evaluation loop). +- [x] Add role-based "Start here" blocks for each persona. +- [x] Add minimal glossary and enforce term consistency. +- [x] Replace "staking" terminology with "staking" in human-facing docs. +- [x] Add liquid staking docs aligned with v2 contracts. + +## Accuracy (Against Internal Codebases) +- [x] Audit workbench codebase and summarize key capabilities. +- [x] Audit runtime orchestration codebase and summarize key capabilities. +- [x] Align runtime docs with orchestration + sidecar responsibilities. +- [x] Surface autoscaling/observability capabilities in operator docs. +- [x] Align QoS observability + keystore guidance with blueprint-sdk behavior. + +## Visuals +- [x] Use autonomous-work-loop diagram as primary system visual. +- [x] Remove SaaS-to-marketplace diagram. +- [x] Add captions and labels where the system diagram appears. + +## Cleanup + QA +- [x] Scan for outdated validator references and remove. +- [x] Verify no closed-source repo names appear in published docs. +- [x] Run terminology scan for "Vibe" and replace. diff --git a/PROJECT_SPEC.md b/PROJECT_SPEC.md new file mode 100644 index 00000000..5c4c64a8 --- /dev/null +++ b/PROJECT_SPEC.md @@ -0,0 +1,62 @@ +# Tangle Docs Cohesion + IA Spec + +Last updated: 2026-01-19 + +## Problem Statement +The docs feel fragmented across product, runtime, and protocol. Roles blur, the story mixes present and future, and navigation makes it hard to understand where to start. The result is weak cohesion and low trust for new readers. + +## Goals +- Present a single, consistent mission and system model across all sections. +- Make roles and journeys obvious: workbench user, runtime operator, protocol builder, partner/investor. +- Separate "what exists today" from "what evolves over time." +- Keep top navigation minimal while ensuring section sidebars are deep and focused. +- Emphasize AI-native value (autonomous work) without over-indexing on staking. +- Ensure trust: sandbox safety, auditability, approvals, and evaluation loops are explicit. + +## Non-Goals +- Full product roadmap, pricing, or competitive positioning beyond brief comparisons. +- Detailed protocol economics documentation (kept in protocol docs only). +- Marketing site rewrite (this spec is for docs). + +## Canonical Framing (Use Everywhere) +Tangle is the shared operating layer for autonomous work. Teams and agents collaborate in shared workbenches or separate ones, work runs in secure sandboxes, and the protocol pays the operators who host the runtime. Workflows improve through agent and task evaluations collected from each run. + +## System Model +- **Workbench (Experience Layer)**: Multiplayer workspaces where humans and agents collaborate. +- **Sandbox Runtime (Execution Layer)**: Secure, isolated sandboxes that execute tasks with policies and limits. +- **Protocol (Coordination Layer)**: Operator coordination, payment routing, and economic security. +- **Evaluation Loop**: Each run produces task and agent evaluations that improve workflows over time. + +## Information Architecture +Top nav (minimal): +- Vision +- Workbench +- Runtime +- Protocol (dropdown: Protocol Foundation, Build, Operate, Economic Security) + +Sidebar behavior: +- Show only the active section's children. +- Do not show the section label in the sidebar. + +Section mapping: +- Vision: mission, use cases, architecture, core concepts, glossary. +- Workbench: intro, workflows, simulations, profiles, integrations. +- Runtime: intro, orchestration, security model, observability, scaling. +- Protocol: foundation, build, operate, economic security (staking + liquid staking). + +## Content Guidelines +- Avoid naming closed-source repositories or internal codebases. +- Use "workbench" and "sandbox runtime" consistently; avoid "Vibe". +- Keep EigenLayer mentions out of primary navigation and framing. +- Always state what is live today vs what is planned. +- ELI5 clarity on "who does what" for each page. + +## Diagram Standards +- Primary system diagram: autonomous work loop (workbench -> sandbox -> protocol -> evaluation). +- Use one diagram consistently across hero + architecture pages. +- Diagrams must label the three layers clearly and show the evaluation loop. + +## Open Questions +- What is the smallest viable glossary for launch? +- Which operator trust signals should be surfaced early (logs, receipts, approvals)? +- Where should "today vs future" live: Overview or Protocol? diff --git a/components/CommonActions.tsx b/components/CommonActions.tsx deleted file mode 100644 index 8b975ecc..00000000 --- a/components/CommonActions.tsx +++ /dev/null @@ -1,127 +0,0 @@ -import { cn } from "@/lib/utils"; -import Link from "next/link"; -import React from "react"; -import { FaGithub } from "react-icons/fa"; -import { - HiOutlineBookOpen as BookOpenIcon, - HiOutlineServerStack as ServerIcon, -} from "react-icons/hi2"; - -const features = [ - { - Icon: ServerIcon, - title: "Node Deployment", - description: `Want to spin up a full node on the Tangle Network? We've made it easier than ever!`, - href: "/node/docker-node", - action: "Deploy a Docker Node", - }, - { - Icon: ServerIcon, - title: "Validators", - description: `Start your journey on Tangle Network. This guide will walk you through the steps to become a validator, ensuring network security and integrity.`, - href: "/node/quickstart/", - action: "Launch a Quick Validator Node", - }, - { - Icon: FaGithub, - title: "Tangle Open Source", - description: `Multy-party threshold ECDSA (GG20) Substrate node`, - href: "https://github.com/tangle-network/tangle", - action: "View the Repo", - }, - { - Icon: ServerIcon, - title: "Accounts", - description: `Tangle uses Polkadot Apps to manage Accounts.`, - href: "https://polkadot.js.org/apps/?rpc=wss://testnet-rpc.tangle.tools#/explorer", - action: "Go to Polkadot Apps", - }, - { - Icon: BookOpenIcon, - title: "Staking", - description: `Through Polkadot Apps you can create `, - href: "https://polkadot.js.org/apps/?rpc=wss://testnet-rpc.tangle.tools#/democracy", - action: "Manage Staking", - }, - { - Icon: BookOpenIcon, - title: "Governance", - description: `Through governance, you can create proposals for updating cross-chain applications.`, - href: "https://polkadot.js.org/apps/?rpc=wss://testnet-rpc.tangle.tools#/democracy", - action: "Interact with Governance", - }, - // { - // Icon: BeakerIcon, - // title: "Faucet", - // description: `Our easy-to-use testnet faucet allows you to claim test tokens with just a few clicks. Start experimenting with Hubble Bridge today.`, - // href: "https://faucet.tangle.tools", - // action: "Go to Faucet", - // }, -]; - -type CardProps = { - Icon: React.ElementType; - title: string; - description: string; - href: string; - action: string; -}; - -const Card = ({ Icon, title, href, action }: CardProps) => ( -
-

- {title} -

- - {/*}

{description}

- {/* Wrap the entire footer content with Link */} - -
-
- {/* Arrow */} - - -
-); - -// CommonActions component that renders a grid of Cards -export const CommonActions = () => { - return ( -
- {features.map((feature, index) => ( - - ))} -
- ); -}; - -export default CommonActions; diff --git a/components/EvmToSubstrateConverter.tsx b/components/EvmToSubstrateConverter.tsx deleted file mode 100644 index 7264c7cd..00000000 --- a/components/EvmToSubstrateConverter.tsx +++ /dev/null @@ -1,122 +0,0 @@ -import { hexToU8a, stringToU8a, u8aConcat } from "@polkadot/util"; -import { blake2AsU8a, encodeAddress } from "@polkadot/util-crypto"; -import Link from "next/link"; -import { Callout } from "nextra/components"; -import { useState } from "react"; -import { BlockCopyButton } from "./ui/block-copy-button"; -import { Button } from "./ui/button"; -import { - Card, - CardContent, - CardDescription, - CardFooter, - CardHeader, - CardTitle, -} from "./ui/card"; - -const TANGLE_PREFIX = 5845; - -const evmToTangle = (evmAddress: string) => { - const addr = hexToU8a(evmAddress); - const data = stringToU8a("evm:"); - const res = blake2AsU8a(u8aConcat(data, addr)); - const tangleAddress = encodeAddress(res, TANGLE_PREFIX); - return tangleAddress; -}; - -const AddressConverter = () => { - const [evmAddress, setEvmAddress] = useState(""); - const [tangleAddress, setTangleAddress] = useState(""); - - const convertAddress = () => { - if (!evmAddress) { - setTangleAddress("Please enter an EVM address."); - return; - } - - try { - const convertedAddress = evmToTangle(evmAddress); - setTangleAddress(convertedAddress); - } catch { - setTangleAddress("Invalid EVM address."); - } - }; - - return ( -
- - - EVM to Tangle Address Converter - - Enter an EVM address to convert it to the prefixed form unique to - Tangle Network. To convert an SS58 address to a public key or other - networks, you can use{" "} - - SS58.org - - - - -
-
- - setEvmAddress(e.target.value)} - placeholder="Enter EVM address" - /> -
-
- -
- {tangleAddress || "Waiting..."} -
-
-
- -
-
-
- - - - - Please note that the conversion from an EVM address to a Tangle - address using the provided tool is a one-way operation, and you cannot - derive the original EVM address from a Tangle address. -
- - Learn more about Addresses on Tangle. - -
-
-
- ); -}; - -export default AddressConverter; diff --git a/components/LandingPage.tsx b/components/LandingPage.tsx index 8f39d96d..c92936f4 100644 --- a/components/LandingPage.tsx +++ b/components/LandingPage.tsx @@ -1,68 +1,34 @@ import Image from "next/image"; -import { FaWallet } from "react-icons/fa"; import { GiPlatform, GiToken } from "react-icons/gi"; import { GrNodes } from "react-icons/gr"; -import { MdApps, MdAppShortcut } from "react-icons/md"; -import { RiToolsLine } from "react-icons/ri"; import { SiBlueprint } from "react-icons/si"; import CallToActionCard from "./CallToActionCard"; const getStartedCards = [ - { - icon: , - title: "Get started building Blueprints", - description: - "Tangle Network is a decentralized cloud infrastructure that allows users to deploy and monetize Blueprints across any blockchain ecosystem.", - link: "../developers/blueprints/introduction", - }, - { - icon: , - title: "Restake TNT or other assets", - description: - "Restaking secures the actively validated services on Tangle, and earns rewards.", - link: "../restake/introduction", - }, - { - icon: , - title: "Run a Node, Validator, or Service Operator", - description: - "Noderunners can earn staking rewards, secure the network, and operators earn income from services.", - link: "../operators/introduction", - }, { icon: , - title: "Learn more about the network and platform", + title: "Start in the agentic workbench", description: - "Discover Tangle network's unique decentralized cloud infrastructure.", - link: "../network/overview", - }, -]; - -const resourcesCards = [ - { - icon: , - title: "TNT, Wallets and More", - description: "Your source for Wallets, apps, staking and more.", - link: "/resources/resources", + "Create autonomous work with teammates and agents in a shared workspace.", + link: "/vibe/introduction", }, { - icon: , - title: "Developer Tools", - description: "RPCs, faucets, and block explorers.", - link: "/developers/endpoints", + icon: , + title: "Run work in secure sandboxes", + description: "Work executes in isolated runtimes with policies and limits.", + link: "/infrastructure/introduction", }, { - icon: , - title: "Tangle DApp", - description: "Nominate your TNT at Tangle DApp", - link: "http://app.tangle.tools/", + icon: , + title: "Operate the runtime", + description: "Host secure sandboxes and get paid through the protocol.", + link: "/operators/introduction", }, { - icon: , - title: "Polkadot Apps", - description: - "For advanced interactions, Polkadot Apps supports Tangle Network.", - link: "https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/explorer", + icon: , + title: "Build and publish Blueprints", + description: "Package services and workflows to run on the protocol.", + link: "/developers/blueprints/introduction", }, ]; @@ -77,15 +43,17 @@ const LandingPage = () => { TANGLE DOCUMENTATION

- Your Guide to Tangle's{" "} + Tangle is the{" "} - Decentralized Cloud Infrastructure - + shared operating layer + {" "} + for autonomous work

- Create and monetize Blueprints that deploy securely across any - blockchain ecosystem. Instance services on-demand using crypto - rails. + Teams and agents collaborate in shared workbenches or separate + ones, work runs in secure sandboxes, and the protocol pays the + operators who host the runtime. Workflows improve through agent + and task evaluations collected from each run.

@@ -104,34 +72,73 @@ const LandingPage = () => { {/* Horizontal Line */}
- {/* Call-to-Action Cards Row */} -
-

- Get Started + {/* System Overview */} +
+

+ System Overview

-
-
- {getStartedCards.map((card, index) => ( - - ))} +

+ Teams and agents collaborate in shared workbenches or separate ones, + work runs in secure sandboxes, and the protocol pays the operators + who host the runtime. +

+
+
+ Autonomous work loop +
+
+ Autonomous work loop: workbench -> sandbox runtime -> + protocol (payments + evaluation). +
+
+
+ + {/* What Runs Where */} +
+

+ What Runs Where +

+
+
+

+ Workbench +

+

+ Workflows, profiles, simulations, and reviews. +

+
+
+

+ Sandbox Runtime +

+

+ Agent sessions, tool calls, and file edits. +

+
+
+

+ Protocol +

+

+ Service registry, operator payments, staking, and incentives. +

- {/* Resources Cards Row */} + {/* Call-to-Action Cards Row */}

- Resources + Get Started

- {resourcesCards.map((card, index) => ( + {getStartedCards.map((card, index) => ( { - return ( -
-
- {type} {cardTitle} -
-
-

{network}

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - {explorerUrls.map((explorer, index) => ( - - - - - ))} - -
Network Type{type}
Native Asset Symbol{symbol}
Native Asset Decimals{decimals}
Chain ID{chainId}
Public RPC URL - {rpcUrl} -
Public WSS URL - {wssUrl} - {wssUrl2} -
- {index === 0 ? "Interfaces & Explorers" : ""} - - {explorer.name} -
-
- ); -}; - -const NetworkInfo = () => { - // Define the network details here or fetch from an API - const networks = [ - { - cardTitle: "Network Information", - network: "Tangle Network", - type: "Mainnet", - symbol: "TNT", - decimals: 18, - chainId: "5845", - rpcUrl: "https://rpc.tangle.tools", - wssUrl: "wss://rpc.tangle.tools", - wssUrl2: "wss://tangle-mainnet-rpc.n.dwellir.com/", - explorerUrls: [ - { name: "BlockScout", url: "https://explorer.tangle.tools" }, - { - name: "PolkadotJS", - url: "https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer", - }, - ], - fundingInfo: { - url: "https://discord.gg/PQDYv5GT", - }, - }, - ]; - - return ( -
- {networks.map((network, index) => ( - - ))} -
- ); -}; - -export default NetworkInfo; diff --git a/components/Navigation.tsx b/components/Navigation.tsx index 82c373f9..ac4869f3 100644 --- a/components/Navigation.tsx +++ b/components/Navigation.tsx @@ -1,10 +1,14 @@ import { Navbar } from "nextra-theme-docs"; import { ComponentProps } from "react"; +const NAV_ITEMS = new Set(["vision", "vibe", "infrastructure", "protocol"]); + function Navigation(props: ComponentProps) { + const filteredItems = props.items.filter((item) => NAV_ITEMS.has(item.name)); + return ( <> - + ); } diff --git a/components/NetworkConfig.module.css b/components/NetworkConfig.module.css deleted file mode 100644 index 7ae2f80c..00000000 --- a/components/NetworkConfig.module.css +++ /dev/null @@ -1,154 +0,0 @@ -.networkInfo { - font-family: "Arial", sans-serif; - max-width: 1200px; /* Increase the max-width */ - margin: 0 auto; -} - -.networkInfo h1, -.networkInfo h2 { - color: #000; - text-align: left; -} - -.titleContainer { - display: flex; /* Use flexbox to lay out the title and icon */ - align-items: center; /* Align items vertically in the center */ - cursor: pointer; /* Change the cursor to indicate it's clickable */ - border-bottom: 2px solid white; -} - -.networkCard { - border-radius: 8px; - padding: 29px; - margin: 20px 0 0 0; - background: linear-gradient(45deg, rgb(168 0 198), rgba(255, 0, 0, 0) 70%), - linear-gradient(-45deg, rgb(190 0 255 / 90%), rgba(0, 0, 255, 0) 70%), - linear-gradient(135deg, rgb(0 242 245), rgba(0, 255, 0, 0) 70%), - linear-gradient(-135deg, rgb(117 119 251 / 32%), rgba(255, 255, 0, 0) 70%); - background-blend-mode: multiply; - box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.1); -} - -.networkColumn { - flex: 0 0 48%; /* Set a fixed width for columns */ - max-width: 48%; /* Ensure columns don't exceed this width */ - padding: 0 10px; -} -.networkTitle { - margin-right: 0.5em; /* Add some space between the title and the icon */ -} - -.networkCard h2 { - margin-top: 0; - color: #fff; - font-size: 2.8em; - font-weight: 600; - line-height: 1.2em; - padding-bottom: 8px; - margin-bottom: 2px; -} - -.networkCard h3 { - margin-top: 20px; - margin-bottom: 10px; -} - -.networkType { - color: #ffffff; - text-transform: uppercase; - font-family: Arial, Helvetica, sans-serif; - font-size: 0.8em; - margin-bottom: 0; - font-weight: 700; -} -.section { - background: #ffffff; - border-radius: 8px; - box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.2); - margin-bottom: 20px; - padding: 20px; -} - -.table { - width: 100%; - border-collapse: collapse; - border: 0; - margin-bottom: 8px; -} - -.tableRow { - text-align: left; -} - -.tableHeader { - color: #fff; - font-weight: 400; - text-align: left; - padding: 10px; - border-bottom: 1px solid #e1e4e8; -} - -.tableCell { - padding: 10px; - text-align: left; - border-bottom: 1px solid #e1e4e8; - color: #fff; - font-weight: 600; -} - -@media (max-width: 768px) { - .networkInfo { - padding: 10px; - } - - .table, - .th, - .td { - font-size: 14px; - } - - .networkColumn { - flex: 0 0 100%; /* Make columns take full width on smaller screens */ - max-width: 100%; - padding: 0; - } -} - -.tabsContainer { - display: flex; - justify-content: flex-start; - margin-bottom: 20px; - border-bottom: 1px white solid; -} - -.tabButton { - color: #fff; - border: none; - text-align: center; - text-decoration: none; - display: inline-block; - font-size: 16px; - cursor: pointer; - transition: background-color 0.3s ease; - padding-left: 10px; - padding: 10px 33px 10px 33px; - margin-right: 10px; -} - -.activeTab { - color: #fff; - font-weight: 600; - border-bottom: 1px solid white; -} - -.networkTabContent { - padding: 20px; -} - -.mainnetStatus { - font-size: 1em; - font-style: italic; - font-weight: 600; - margin-left: 0.5em; - color: #fff; -} diff --git a/components/NetworkResources.tsx b/components/NetworkResources.tsx index 8275a4df..e4646482 100644 --- a/components/NetworkResources.tsx +++ b/components/NetworkResources.tsx @@ -1,14 +1,8 @@ import React, { useState, useEffect } from "react"; import Link from "next/link"; import { BlockCopyButton } from "./ui/block-copy-button"; -import { - FlaskConical, - WalletMinimal, - Waypoints, - SendToBack, -} from "lucide-react"; +import { FlaskConical, WalletMinimal, Waypoints } from "lucide-react"; import WalletTable from "./WalletTable"; -import EvmToSubstrateConverter from "./EvmToSubstrateConverter"; type NetworkDetail = { property: string; @@ -33,14 +27,9 @@ const NETWORK_DATA = { }, }, { - property: "PolkadotJS Apps", - value: { - type: "link", - url: "https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/explorer", - text: "polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools", - }, + property: "Block Explorers", + value: "", }, - { property: "Block Explorers", value: "" }, { property: "EVM Explorer", value: { @@ -49,22 +38,12 @@ const NETWORK_DATA = { text: "explorer.tangle.tools", }, }, - { - property: "Substrate Block Explorer", - value: { - type: "link", - url: "https://tangle.statescan.io/", - text: "tangle.statescan.io", - }, - }, { property: "Asset Details", value: "" }, - { property: "Native Asset Symbol", value: "TNT" }, - { property: "Native Asset Decimals", value: "18" }, + { property: "Gas Token Symbol", value: "ETH" }, + { property: "Gas Token Decimals", value: "18" }, { property: "Developer Resources", value: "" }, - { property: "Address Prefix", value: { type: "wss", url: "tg" } }, - { property: "Network Type", value: "Substrate aka Polkadot SDK with EVM" }, - { property: "Chain ID", value: { type: "wss", url: "5845" } }, - { property: "Standard Account", value: "*25519" }, + { property: "Chain ID", value: { type: "wss", url: "8453" } }, + { property: "Host Chain", value: "Base Mainnet" }, { property: "Public RPC URL", value: { type: "wss", url: "https://rpc.tangle.tools" }, @@ -74,31 +53,19 @@ const NETWORK_DATA = { value: { type: "wss", url: "wss://rpc.tangle.tools" }, }, { - property: "Public WSS URL by Dwellir", - value: { type: "wss", url: "wss://tangle-mainnet-rpc.n.dwellir.com" }, - }, - { - property: "Runtime Types", + property: "Protocol Contracts", value: { type: "link", - url: "https://www.npmjs.com/package/@tangle-network/tangle-substrate-types", - text: "@tangle-network/tangle-substrate-types", + url: "https://github.com/tangle-network/tnt-core/tree/v2", + text: "github.com/tangle-network/tnt-core", }, }, { - property: "Telemetry", + property: "Blueprint SDK", value: { type: "link", - url: "https://telemetry.polkadot.io/#list/0x44f68476df71ebf765b630bf08dc1e0fedb2bf614a1aa0563b3f74f20e47b3e0", - text: "Telemetry", - }, - }, - { - property: "GitHub Repo", - value: { - type: "link", - url: "https://github.com/tangle-network/tangle", - text: "github.com/tangle-network/tangle", + url: "https://github.com/tangle-network/blueprint/tree/v2", + text: "github.com/tangle-network/blueprint", }, }, ] satisfies NetworkDetail[], @@ -113,39 +80,23 @@ const NETWORK_DATA = { }, }, { - property: "PolkadotJS Apps", - value: { - type: "link", - url: "https://polkadot.js.org/apps/?rpc=wss://testnet-rpc.tangle.tools#/explorer", - text: "polkadot.js.org/apps/?rpc=wss://testnet-rpc.tangle.tools", - }, + property: "Block Explorers", + value: "", }, - { property: "Block Explorers", value: "" }, { - property: "EVM Explorers", + property: "EVM Explorer", value: { type: "link", url: "https://testnet-explorer.tangle.tools", text: "testnet-explorer.tangle.tools", }, }, - { - property: "Substrate Explorer", - value: { - type: "link", - url: "https://polkadot.js.org/apps/?rpc=wss://testnet-rpc.tangle.tools#/explorer", - text: "polkadot.js.org/apps/?rpc=wss://testnet-rpc.tangle.tools", - }, - }, { property: "Asset Details", value: "" }, - { property: "Native Asset Symbol", value: "tTNT" }, - { property: "Native Asset Decimals", value: "18" }, + { property: "Gas Token Symbol", value: "ETH" }, + { property: "Gas Token Decimals", value: "18" }, { property: "Developer Resources", value: "" }, - { property: "Address Prefix", value: "tg" }, - { property: "Network Type", value: "Substrate aka Polkadot SDK with EVM" }, - { property: "Chain ID", value: "3799" }, - { property: "Address Prefix", value: "tg" }, - { property: "Standard Account", value: "*25519" }, + { property: "Chain ID", value: { type: "wss", url: "84532" } }, + { property: "Host Chain", value: "Base Sepolia" }, { property: "Public RPC URL", value: { type: "wss", url: "https://testnet-rpc.tangle.tools" }, @@ -155,27 +106,19 @@ const NETWORK_DATA = { value: { type: "wss", url: "wss://testnet-rpc.tangle.tools" }, }, { - property: "Runtime Types", - value: { - type: "link", - url: "https://www.npmjs.com/package/@tangle-network/tangle-substrate-types", - text: "@tangle-network/tangle-substrate-types", - }, - }, - { - property: "Telemetry", + property: "Protocol Contracts", value: { type: "link", - url: "https://telemetry.polkadot.io/#list/0x3d22af97d919611e03bbcbda96f65988758865423e89b2d99547a6bb61452db3", - text: "Polkadot Telemetry", + url: "https://github.com/tangle-network/tnt-core/tree/v2", + text: "github.com/tangle-network/tnt-core", }, }, { - property: "GitHub Repo", + property: "Blueprint SDK", value: { type: "link", - url: "https://github.com/tangle-network/tangle", - text: "github.com/tangle-network/tangle", + url: "https://github.com/tangle-network/blueprint/tree/v2", + text: "github.com/tangle-network/blueprint", }, }, ] satisfies NetworkDetail[], @@ -190,7 +133,7 @@ const NetworkTabs = () => { const hash = window.location.hash.substring(1); // If hash matches one of our tabs, set it as active - if (["mainnet", "testnet", "wallets", "evmToSubstrate"].includes(hash)) { + if (["mainnet", "testnet", "wallets"].includes(hash)) { setActiveTab(hash); } else { // Explicitly set to mainnet if no valid hash is present @@ -329,28 +272,11 @@ const NetworkTabs = () => { Wallets -
  • - {" "} - handleTabClick("evmToSubstrate")} - className={`inline-block p-4 rounded-t-lg ${ - activeTab === "evmToSubstrate" - ? "text-blue-600 bg-gray-100 active dark:bg-gray-800 dark:text-blue-500" - : "hover:text-gray-600 hover:bg-gray-50 dark:hover:bg-gray-800 dark:hover:text-gray-300" - }`} - > - - Address Converter - -
  • {activeTab === "wallets" ? ( - ) : activeTab === "evmToSubstrate" ? ( - ) : activeTab === "mainnet" || activeTab === "testnet" ? ( renderTable(NETWORK_DATA[activeTab]) ) : null} diff --git a/components/OperatorIntro.tsx b/components/OperatorIntro.tsx index 7b565e83..9f61066e 100644 --- a/components/OperatorIntro.tsx +++ b/components/OperatorIntro.tsx @@ -4,25 +4,19 @@ import CallToActionCard from "./CallToActionCard"; const OperatorIntroCards = () => { const cards = [ - // { - // icon: , - // title: Service Operators, - // description: "Run Blueprint instances and earn job revenue.", - // link: "../operators/service-operator/service-provider", - // }, { - icon: , - title: Validators, + icon: , + title: Operator Registration, description: - "Secure the network by participating in Nominated Proof-of-Stake (nPoS).", - link: "../operators/validator/introduction", + "Register as an operator and manage your self-stake and blueprint opt-ins.", + link: "../operators/operator/join_operator/join", }, { - icon: , - title: Run a Node, + icon: , + title: Blueprint Manager, description: - "Get started with Tangle by running a node, a great way to get familiar with operating.", - link: "../operators/node-basics/quickstart", + "Run services with the Blueprint Manager runtime and publish heartbeats.", + link: "../operators/manager/introduction", }, ]; diff --git a/components/RepoArea.tsx b/components/RepoArea.tsx index 0acf910e..a384800f 100644 --- a/components/RepoArea.tsx +++ b/components/RepoArea.tsx @@ -1,4 +1,3 @@ -import { HiCodeBracket as CodeIcon } from "react-icons/hi2"; import { DetailedFeatureLink } from "./Feature"; import { GitHubIcon } from "./Icons"; @@ -8,50 +7,36 @@ export const RepoArea = () => { -
    - ); -}; - -export const StatsdApp = () => { - return ( -
    - + href="https://github.com/webb-tools/tangle-docs" + /> + href="https://github.com/tangle-network/dapp" + />
    ); }; diff --git a/components/Social.tsx b/components/Social.tsx index b9e91811..8208d081 100644 --- a/components/Social.tsx +++ b/components/Social.tsx @@ -4,9 +4,9 @@ import { TelegramIcon, TwitterIcon } from "./Icons"; function Github() { return ( diff --git a/components/WalletTable.tsx b/components/WalletTable.tsx index 5619743e..8792a8cc 100644 --- a/components/WalletTable.tsx +++ b/components/WalletTable.tsx @@ -2,21 +2,6 @@ import React from "react"; import Link from "next/link"; const WALLET_DATA = [ - { - name: "Subwallet", - supports: "Substrate", - url: "https://www.subwallet.app/download.html", - }, - { - name: "Polkadot.js Extension", - supports: "Substrate", - url: "https://polkadot.js.org/", - }, - { - name: "Talisman Wallet", - supports: "Substrate", - url: "https://www.talisman.xyz/", - }, { name: "Rainbow Wallet", supports: "EVM", @@ -31,11 +16,9 @@ const WalletTable = () => {

    Wallets

    - The Tangle Network is a versatile blockchain that integrates both - Substrate and Ethereum Virtual Machine (EVM) functionalities, offering a - wide range of features and compatibility with numerous wallets. This - document outlines the wallets available for use on the Tangle Network, - including details for both the Substrate and EVM sides of the network. + Tangle v2 is EVM-first. Use standard EVM wallets to connect, sign + transactions, and add the network with the Chain ID and RPC URLs listed + above. @@ -71,19 +54,9 @@ const WalletTable = () => {

    Network Details for Adding to Wallets

    - Substrate Wallets: - Follow the specific wallet instructions to add the Tangle Network as a - custom network. We work to ensure these wallets have the latest - information and in-app support to connect. - - {" "} - EVM Wallets - Add the Tangle Network using the ChainID and a RPC server address on - this page. - - Please follow the standard process in your wallet to add a new network, - using the ChainID and the RPC server addresses provided above. + Add the Tangle Network using the Chain ID and RPC server address on this + page. Follow the standard flow in your wallet to add a new network.
    ); diff --git a/globals.css b/globals.css index e52b04bd..ee6c8ce1 100644 --- a/globals.css +++ b/globals.css @@ -100,6 +100,22 @@ } } +/* Sidebar: show only the active section's children */ +.nextra-menu-desktop > li.open.active > button { + display: none; +} + +.nextra-menu-desktop > li.open.active > div > ul { + margin-left: 0; + margin-right: 0; + padding-left: 0; + padding-right: 0; +} + +.nextra-menu-desktop > li.open.active > div > ul::before { + content: none; +} + @layer components { .list, .list:focus-visible, diff --git a/pages/_meta.ts b/pages/_meta.ts index b18e986b..ef9a5cab 100644 --- a/pages/_meta.ts +++ b/pages/_meta.ts @@ -10,24 +10,58 @@ const meta: Meta = { layout: "raw", }, }, - network: { - title: "Network", + vision: { + title: "Vision", type: "page", }, - developers: { - title: "Developers", + vibe: { + title: "Workbench", + type: "page", + }, + infrastructure: { + title: "Runtime", type: "page", }, - restake: { - title: "Restaking", + protocol: { + type: "menu", + title: "Protocol", + items: { + overview: { + title: "Protocol Foundation", + href: "/network/overview", + }, + build: { + title: "Build", + href: "/developers/blueprints/introduction", + }, + operate: { + title: "Operate", + href: "/operators/introduction", + }, + "economic-security": { + title: "Economic Security", + href: "/staking/introduction", + }, + }, + }, + ai: { + title: "AI", + display: "hidden", + }, + developers: { + title: "Build", type: "page", }, operators: { - title: "Operators", + title: "Operate", type: "page", }, - resources: { - title: "Resources", + staking: { + title: "Economic Security", + type: "page", + }, + network: { + title: "Protocol", type: "page", }, }; diff --git a/pages/ai/_meta.ts b/pages/ai/_meta.ts new file mode 100644 index 00000000..7165fac5 --- /dev/null +++ b/pages/ai/_meta.ts @@ -0,0 +1,43 @@ +import type { Meta } from "nextra"; + +const meta: Meta = { + index: "AI Introduction", + "-- workbench": { + type: "separator", + title: "Agentic Workbench", + }, + "workbench-intro": { + title: "Introduction", + href: "/vibe/introduction", + }, + "workbench-workflows": { + title: "Agent Workflows", + href: "/vibe/workflows", + }, + "workbench-simulations": { + title: "Simulations", + href: "/vibe/simulations", + }, + "workbench-profiles": { + title: "Profiles and Policies", + href: "/vibe/profiles", + }, + "workbench-integrations": { + title: "Integrations", + href: "/vibe/integrations", + }, + "-- runtime": { + type: "separator", + title: "Sandbox Runtime", + }, + "runtime-intro": { + title: "Runtime Introduction", + href: "/infrastructure/introduction", + }, + "runtime-orchestration": { + title: "Orchestration", + href: "/infrastructure/orchestration", + }, +}; + +export default meta; diff --git a/pages/ai/index.mdx b/pages/ai/index.mdx new file mode 100644 index 00000000..fdfbcaed --- /dev/null +++ b/pages/ai/index.mdx @@ -0,0 +1,51 @@ +--- +title: AI +description: The shared operating layer for autonomous work, unifying the agentic workbench, secure sandboxes, and operator payments. +--- + +# AI + +Tangle is the shared operating layer for autonomous work. Teams and agents collaborate in shared workbenches or separate ones, work runs in secure sandboxes, and the protocol pays the operators who host the runtime. Workflows improve through agent and task evaluations collected from each run. + +
    + Autonomous work loop +
    + Autonomous work loop: workbench -> sandbox runtime -> protocol (payments + + evaluation). +
    +
    + +## Agentic Workbench + +The agentic workbench is multiplayer by design. It is where teammates across engineering, product, and business collaborate with agents in a shared workspace. + +Key traits: + +- **Shared workspaces** for humans and agents across teams. +- **Agent profiles** that control models, tools, and budgets. +- **Background execution** so long tasks keep running. +- **Simulations** for evaluating workflows across many tasks. +- **Integrations** for connecting internal tools and data. + +## Secure Sandbox Runtime + +Work executes inside isolated sandboxes so tasks are contained and repeatable. The runtime is built to host autonomous work safely at scale. + +Core capabilities: + +- **Sandboxed execution** with explicit policies and resource limits. +- **Task isolation** so background work can be reviewed and accepted before merge. +- **Orchestrator + sidecar** separation for lifecycle control and execution. + +## Evaluation Loop + +Each run produces task and agent evaluations. That data feeds back into the workbench to improve prompts, policies, and workflows over time. + +## Learn More + +- [Workbench details](/vibe/introduction) +- [Runtime and sandboxing](/infrastructure/introduction) +- [Operator onboarding](/operators/introduction) diff --git a/pages/developers/_meta.ts b/pages/developers/_meta.ts index 83d1aa4c..46c47872 100644 --- a/pages/developers/_meta.ts +++ b/pages/developers/_meta.ts @@ -1,42 +1,44 @@ import { Meta } from "nextra"; const meta: Meta = { - "-- intro": { + "-- overview": { type: "separator", - title: "Introduction", + title: "Getting Started", }, blueprints: "Blueprints", - cli: "Blueprint CLI", - "-- gadget-tutorial": { + "blueprint-sdk": "Blueprint SDK", + "-- build": { type: "separator", - title: "Blueprint Developers", + title: "Build with Blueprints", }, - "blueprint-sdk": "Introduction", "blueprint-contexts": "Contexts", "blueprint-runner": "Blueprint Runner", - "blueprint-qos": "Quality of Service Integration", + "blueprint-qos": "Quality of Service", "p2p-networking": "P2P Networking", - "tangle-avs": "Build a Tangle Blueprint", - "eigenlayer-avs": "Build an Eigenlayer AVS", - "testing-with-tangle": "Testing with Tangle", + "-- tooling": { + type: "separator", + title: "Tooling", + }, + cli: "CLI", deployment: "Deployment", + "testing-with-tangle": "Testing", troubleshooting: "Troubleshooting", - "-- solution-developers": { + "-- protocol": { type: "separator", - title: "Solution Developers", + title: "Protocol Integration", }, - endpoints: "Endpoints and Integration", - precompiles: "EVM Precompiles", - technicals: "EVM Development", + endpoints: "Endpoints", + "protocol-architecture": "Protocol Architecture", + "system-architecture": "System Architecture", "-- contribute": { type: "separator", title: "Contribute", }, contribute: "Bug Reports", github: { - title: "Tangle on Github", + title: "Tangle Network on GitHub", type: "page", - href: "https://github.com/tangle-network/tangle", + href: "https://github.com/tangle-network", newWindow: true, }, }; diff --git a/pages/developers/api/reference/BlueprintHookBase.mdx b/pages/developers/api/reference/BlueprintHookBase.mdx new file mode 100644 index 00000000..2ae6ecbf --- /dev/null +++ b/pages/developers/api/reference/BlueprintHookBase.mdx @@ -0,0 +1,142 @@ +--- +title: BlueprintHookBase +description: Auto-generated Solidity API reference. +--- + +# BlueprintHookBase + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IBlueprintHook.sol + +### BlueprintHookBase + +Base implementation with sensible defaults + +_For full features, extend BlueprintServiceManagerBase instead_ + +#### Functions + +#### onBlueprintCreated + +```solidity +function onBlueprintCreated(uint64, address) external virtual +``` + +#### onOperatorRegister + +```solidity +function onOperatorRegister(uint64, address, bytes) external virtual returns (bool) +``` + +#### onOperatorUnregister + +```solidity +function onOperatorUnregister(uint64, address) external virtual +``` + +#### onServiceRequest + +```solidity +function onServiceRequest(uint64, uint64, address, address[], bytes) external payable virtual returns (bool) +``` + +#### onServiceApprove + +```solidity +function onServiceApprove(uint64, address, uint8) external virtual +``` + +#### onServiceReject + +```solidity +function onServiceReject(uint64, address) external virtual +``` + +#### onServiceActivated + +```solidity +function onServiceActivated(uint64, uint64, address, address[]) external virtual +``` + +#### onServiceTerminated + +```solidity +function onServiceTerminated(uint64, address) external virtual +``` + +#### canJoin + +```solidity +function canJoin(uint64, address, uint16) external view virtual returns (bool) +``` + +#### canLeave + +```solidity +function canLeave(uint64, address) external view virtual returns (bool) +``` + +#### onJobSubmitted + +```solidity +function onJobSubmitted(uint64, uint64, uint8, address, bytes) external payable virtual returns (bool) +``` + +#### onJobResult + +```solidity +function onJobResult(uint64, uint64, address, bytes) external virtual returns (bool) +``` + +#### onJobCompleted + +```solidity +function onJobCompleted(uint64, uint64, uint32) external virtual +``` + +#### onSlashProposed + +```solidity +function onSlashProposed(uint64, address, uint256, bytes32) external virtual returns (bool) +``` + +#### onSlashApplied + +```solidity +function onSlashApplied(uint64, address, uint256) external virtual +``` + +#### getDeveloperPaymentAddress + +```solidity +function getDeveloperPaymentAddress(uint64) external view virtual returns (address payable) +``` + +#### isPaymentTokenAllowed + +```solidity +function isPaymentTokenAllowed(address) external view virtual returns (bool) +``` + +#### getRequiredResultCount + +```solidity +function getRequiredResultCount(uint64, uint8) external view virtual returns (uint32) +``` + +#### requiresAggregation + +```solidity +function requiresAggregation(uint64, uint8) external view virtual returns (bool) +``` + +#### getAggregationThreshold + +```solidity +function getAggregationThreshold(uint64, uint8) external view virtual returns (uint16, uint8) +``` + +#### onAggregatedResult + +```solidity +function onAggregatedResult(uint64, uint64, uint256, bytes) external virtual +``` diff --git a/pages/developers/api/reference/IBlueprintHook.mdx b/pages/developers/api/reference/IBlueprintHook.mdx new file mode 100644 index 00000000..82d48ae6 --- /dev/null +++ b/pages/developers/api/reference/IBlueprintHook.mdx @@ -0,0 +1,227 @@ +--- +title: IBlueprintHook +description: Auto-generated Solidity API reference. +--- + +# IBlueprintHook + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IBlueprintHook.sol + +### IBlueprintHook + +Simplified hook interface for basic blueprint customization + +\_For full control, implement IBlueprintServiceManager directly. +This interface provides a simpler subset for common use cases. + +Migration path: + +- Simple blueprints: Use IBlueprintHook / BlueprintHookBase +- Full-featured blueprints: Use IBlueprintServiceManager / BlueprintServiceManagerBase\_ + +#### Functions + +#### onBlueprintCreated + +```solidity +function onBlueprintCreated(uint64 blueprintId, address owner) external +``` + +Called when blueprint is created + +#### onOperatorRegister + +```solidity +function onOperatorRegister(uint64 blueprintId, address operator, bytes data) external returns (bool accept) +``` + +Called when an operator registers + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | --------------------------- | +| accept | bool | True to accept registration | + +#### onOperatorUnregister + +```solidity +function onOperatorUnregister(uint64 blueprintId, address operator) external +``` + +Called when an operator unregisters + +#### onServiceRequest + +```solidity +function onServiceRequest(uint64 requestId, uint64 blueprintId, address requester, address[] operators, bytes config) external payable returns (bool accept) +``` + +Called when a service is requested + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | ---------------------- | +| accept | bool | True to accept request | + +#### onServiceApprove + +```solidity +function onServiceApprove(uint64 requestId, address operator, uint8 stakingPercent) external +``` + +Called when an operator approves a service request + +#### onServiceReject + +```solidity +function onServiceReject(uint64 requestId, address operator) external +``` + +Called when an operator rejects a service request + +#### onServiceActivated + +```solidity +function onServiceActivated(uint64 serviceId, uint64 requestId, address owner, address[] operators) external +``` + +Called when service becomes active + +#### onServiceTerminated + +```solidity +function onServiceTerminated(uint64 serviceId, address owner) external +``` + +Called when service is terminated + +#### canJoin + +```solidity +function canJoin(uint64 serviceId, address operator, uint16 exposureBps) external view returns (bool) +``` + +Check if operator can join a dynamic service + +#### canLeave + +```solidity +function canLeave(uint64 serviceId, address operator) external view returns (bool) +``` + +Check if operator can leave a dynamic service + +#### onJobSubmitted + +```solidity +function onJobSubmitted(uint64 serviceId, uint64 callId, uint8 jobIndex, address caller, bytes inputs) external payable returns (bool accept) +``` + +Called when a job is submitted + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | ------------------ | +| accept | bool | True to accept job | + +#### onJobResult + +```solidity +function onJobResult(uint64 serviceId, uint64 callId, address operator, bytes result) external returns (bool accept) +``` + +Called when an operator submits a result + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | --------------------- | +| accept | bool | True to accept result | + +#### onJobCompleted + +```solidity +function onJobCompleted(uint64 serviceId, uint64 callId, uint32 resultCount) external +``` + +Called when a job is marked complete + +#### onSlashProposed + +```solidity +function onSlashProposed(uint64 serviceId, address operator, uint256 amount, bytes32 evidence) external returns (bool approve) +``` + +Called before a slash is applied + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | --------------------- | +| approve | bool | True to approve slash | + +#### onSlashApplied + +```solidity +function onSlashApplied(uint64 serviceId, address operator, uint256 amount) external +``` + +Called after a slash is applied + +#### getDeveloperPaymentAddress + +```solidity +function getDeveloperPaymentAddress(uint64 serviceId) external view returns (address payable) +``` + +Get the developer payment address + +#### isPaymentTokenAllowed + +```solidity +function isPaymentTokenAllowed(address token) external view returns (bool) +``` + +Check if a payment token is allowed + +#### getRequiredResultCount + +```solidity +function getRequiredResultCount(uint64 serviceId, uint8 jobIndex) external view returns (uint32) +``` + +Get the number of results required for job completion + +#### requiresAggregation + +```solidity +function requiresAggregation(uint64 serviceId, uint8 jobIndex) external view returns (bool) +``` + +Check if a job requires BLS aggregated results + +#### getAggregationThreshold + +```solidity +function getAggregationThreshold(uint64 serviceId, uint8 jobIndex) external view returns (uint16 thresholdBps, uint8 thresholdType) +``` + +Get the aggregation threshold configuration for a job + +##### Return Values + +| Name | Type | Description | +| ------------- | ------ | --------------------------------------------------------------------- | +| thresholdBps | uint16 | Threshold in basis points (6700 = 67%) | +| thresholdType | uint8 | 0 = CountBased (% of operators), 1 = StakeWeighted (% of total stake) | + +#### onAggregatedResult + +```solidity +function onAggregatedResult(uint64 serviceId, uint64 callId, uint256 signerBitmap, bytes output) external +``` + +Called when an aggregated result is submitted diff --git a/pages/developers/api/reference/IBlueprintServiceManager.mdx b/pages/developers/api/reference/IBlueprintServiceManager.mdx new file mode 100644 index 00000000..0ff78a57 --- /dev/null +++ b/pages/developers/api/reference/IBlueprintServiceManager.mdx @@ -0,0 +1,656 @@ +--- +title: IBlueprintServiceManager +description: Auto-generated Solidity API reference. +--- + +# IBlueprintServiceManager + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IBlueprintServiceManager.sol + +### IBlueprintServiceManager + +Full interface for blueprint-specific service managers + +\_Blueprint developers implement this to customize all aspects of their blueprint. +This is the primary integration point for blueprint developers - implement the hooks +you need and leave others as default (via BlueprintServiceManagerBase). + +The lifecycle flow: + +1. Blueprint created → onBlueprintCreated +2. Operators register → onRegister +3. Service requested → onRequest +4. Operators approve → onApprove +5. Service activated → onServiceInitialized +6. Jobs submitted → onJobCall +7. Results submitted → onJobResult +8. Service terminated → onServiceTermination\_ + +#### Functions + +#### onBlueprintCreated + +```solidity +function onBlueprintCreated(uint64 blueprintId, address owner, address tangleCore) external +``` + +Called when blueprint is created + +_Store the blueprintId and tangleCore address for future reference_ + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | --------------------------------------- | +| blueprintId | uint64 | The new blueprint ID | +| owner | address | The blueprint owner | +| tangleCore | address | The address of the Tangle core contract | + +#### onRegister + +```solidity +function onRegister(address operator, bytes registrationInputs) external payable +``` + +Called when an operator registers to this blueprint + +_Validate operator requirements here (stake, reputation, etc.)_ + +##### Parameters + +| Name | Type | Description | +| ------------------ | ------- | ------------------------------------------------------ | +| operator | address | The operator's address | +| registrationInputs | bytes | Custom registration data (blueprint-specific encoding) | + +#### onUnregister + +```solidity +function onUnregister(address operator) external +``` + +Called when an operator unregisters from this blueprint + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ---------------------- | +| operator | address | The operator's address | + +#### onUpdatePreferences + +```solidity +function onUpdatePreferences(address operator, bytes newPreferences) external payable +``` + +Called when an operator updates their preferences (RPC address, etc.) + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | ------------------------ | +| operator | address | The operator's address | +| newPreferences | bytes | Updated preferences data | + +#### getHeartbeatInterval + +```solidity +function getHeartbeatInterval(uint64 serviceId) external view returns (bool useDefault, uint64 interval) +``` + +Get the heartbeat interval for a service + +_Operators must submit heartbeats within this interval_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------ | ------------------------------------------------------- | +| useDefault | bool | True to use protocol default, false to use custom value | +| interval | uint64 | Heartbeat interval in blocks (0 = disabled) | + +#### getHeartbeatThreshold + +```solidity +function getHeartbeatThreshold(uint64 serviceId) external view returns (bool useDefault, uint8 threshold) +``` + +Get the heartbeat threshold for a service + +_Percentage of operators that must respond within interval_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ---------- | ----- | ---------------------------- | +| useDefault | bool | True to use protocol default | +| threshold | uint8 | Threshold percentage (0-100) | + +#### getSlashingWindow + +```solidity +function getSlashingWindow(uint64 serviceId) external view returns (bool useDefault, uint64 window) +``` + +Get the slashing window for a service + +_Time window for disputes before slash is finalized_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------ | ---------------------------- | +| useDefault | bool | True to use protocol default | +| window | uint64 | Slashing window in blocks | + +#### getExitConfig + +```solidity +function getExitConfig(uint64 serviceId) external view returns (bool useDefault, uint64 minCommitmentDuration, uint64 exitQueueDuration, bool forceExitAllowed) +``` + +Get the exit configuration for operator departures + +_Defines minimum commitment and exit queue timing_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| --------------------- | ------ | -------------------------------------------------------- | +| useDefault | bool | True to use protocol default | +| minCommitmentDuration | uint64 | Minimum time operator must stay after joining (seconds) | +| exitQueueDuration | uint64 | Time between scheduling exit and completing it (seconds) | +| forceExitAllowed | bool | Whether service owner can force-exit operators | + +#### onRequest + +```solidity +function onRequest(uint64 requestId, address requester, address[] operators, bytes requestInputs, uint64 ttl, address paymentAsset, uint256 paymentAmount) external payable +``` + +Called when a service is requested + +_Validate service configuration, operator selection, payment amount_ + +##### Parameters + +| Name | Type | Description | +| ------------- | --------- | --------------------------------------------------- | +| requestId | uint64 | The request ID | +| requester | address | Who is requesting the service | +| operators | address[] | Requested operators | +| requestInputs | bytes | Service configuration (blueprint-specific encoding) | +| ttl | uint64 | Time-to-live for the service | +| paymentAsset | address | Payment token address (address(0) for native) | +| paymentAmount | uint256 | Payment amount | + +#### onApprove + +```solidity +function onApprove(address operator, uint64 requestId, uint8 stakingPercent) external payable +``` + +Called when an operator approves a service request + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | ----------------------------------------------------- | +| operator | address | The approving operator | +| requestId | uint64 | The request ID | +| stakingPercent | uint8 | Percentage of stake committed to this service (0-100) | + +#### onReject + +```solidity +function onReject(address operator, uint64 requestId) external +``` + +Called when an operator rejects a service request + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------- | +| operator | address | The rejecting operator | +| requestId | uint64 | The request ID | + +#### onServiceInitialized + +```solidity +function onServiceInitialized(uint64 blueprintId, uint64 requestId, uint64 serviceId, address owner, address[] permittedCallers, uint64 ttl) external +``` + +Called when service becomes active (all operators approved) + +##### Parameters + +| Name | Type | Description | +| ---------------- | --------- | -------------------------------- | +| blueprintId | uint64 | The blueprint ID | +| requestId | uint64 | The original request ID | +| serviceId | uint64 | The new service ID | +| owner | address | The service owner | +| permittedCallers | address[] | Addresses allowed to submit jobs | +| ttl | uint64 | Service time-to-live | + +#### onServiceTermination + +```solidity +function onServiceTermination(uint64 serviceId, address owner) external +``` + +Called when service is terminated + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ----------------- | +| serviceId | uint64 | The service ID | +| owner | address | The service owner | + +#### canJoin + +```solidity +function canJoin(uint64 serviceId, address operator) external view returns (bool allowed) +``` + +Check if an operator can join a dynamic service + +_Called before operator joins - return false to reject_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator wanting to join | + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | ------------------------- | +| allowed | bool | True if operator can join | + +#### onOperatorJoined + +```solidity +function onOperatorJoined(uint64 serviceId, address operator, uint16 exposureBps) external +``` + +Called after an operator successfully joins a service + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | --------------------------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator that joined | +| exposureBps | uint16 | The operator's stake exposure in basis points | + +#### canLeave + +```solidity +function canLeave(uint64 serviceId, address operator) external view returns (bool allowed) +``` + +Check if an operator can leave a dynamic service + +_Called before operator leaves - return false to reject +Note: This is called AFTER the exit queue check. Use getExitConfig to customize timing._ + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ----------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator wanting to leave | + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | -------------------------- | +| allowed | bool | True if operator can leave | + +#### onOperatorLeft + +```solidity +function onOperatorLeft(uint64 serviceId, address operator) external +``` + +Called after an operator successfully leaves a service + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator that left | + +#### onExitScheduled + +```solidity +function onExitScheduled(uint64 serviceId, address operator, uint64 executeAfter) external +``` + +Called when an operator schedules their exit from a service + +_Allows manager to track pending exits, notify other parties, etc._ + +##### Parameters + +| Name | Type | Description | +| ------------ | ------- | ----------------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator scheduling exit | +| executeAfter | uint64 | Timestamp when exit can be executed | + +#### onExitCanceled + +```solidity +function onExitCanceled(uint64 serviceId, address operator) external +``` + +Called when an operator cancels their scheduled exit + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator canceling exit | + +#### onJobCall + +```solidity +function onJobCall(uint64 serviceId, uint8 job, uint64 jobCallId, bytes inputs) external payable +``` + +Called when a job is submitted + +_Validate job inputs, check caller permissions, etc._ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | ---------------------------------------- | +| serviceId | uint64 | The service ID | +| job | uint8 | The job index in the blueprint | +| jobCallId | uint64 | Unique ID for this job call | +| inputs | bytes | Job inputs (blueprint-specific encoding) | + +#### onJobResult + +```solidity +function onJobResult(uint64 serviceId, uint8 job, uint64 jobCallId, address operator, bytes inputs, bytes outputs) external payable +``` + +Called when an operator submits a job result + +_Validate result format, check operator eligibility, aggregate results_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | -------------------------------------------- | +| serviceId | uint64 | The service ID | +| job | uint8 | The job index | +| jobCallId | uint64 | The job call ID | +| operator | address | The operator submitting | +| inputs | bytes | Original job inputs | +| outputs | bytes | Result outputs (blueprint-specific encoding) | + +#### onUnappliedSlash + +```solidity +function onUnappliedSlash(uint64 serviceId, bytes offender, uint8 slashPercent) external +``` + +Called when a slash is queued but not yet applied + +_This is the dispute window - gather evidence, notify parties_ + +##### Parameters + +| Name | Type | Description | +| ------------ | ------ | ------------------------------------------------------------- | +| serviceId | uint64 | The service ID | +| offender | bytes | The operator being slashed (encoded as bytes for flexibility) | +| slashPercent | uint8 | Percentage of stake to slash | + +#### onSlash + +```solidity +function onSlash(uint64 serviceId, bytes offender, uint8 slashPercent) external +``` + +Called when a slash is finalized and applied + +##### Parameters + +| Name | Type | Description | +| ------------ | ------ | -------------------- | +| serviceId | uint64 | The service ID | +| offender | bytes | The slashed operator | +| slashPercent | uint8 | Percentage slashed | + +#### querySlashingOrigin + +```solidity +function querySlashingOrigin(uint64 serviceId) external view returns (address slashingOrigin) +``` + +Query the account authorized to propose slashes for a service + +_Override to allow custom slashing authorities (dispute contracts, etc.)_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| -------------- | ------- | ----------------------------------------------- | +| slashingOrigin | address | Address that can slash (default: this contract) | + +#### queryDisputeOrigin + +```solidity +function queryDisputeOrigin(uint64 serviceId) external view returns (address disputeOrigin) +``` + +Query the account authorized to dispute slashes + +_Override to allow custom dispute resolution_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ------------------------------------------------- | +| disputeOrigin | address | Address that can dispute (default: this contract) | + +#### queryDeveloperPaymentAddress + +```solidity +function queryDeveloperPaymentAddress(uint64 serviceId) external view returns (address payable developerPaymentAddress) +``` + +Get the developer payment address for a service + +_Override to route payments to different addresses per service_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ----------------------- | --------------- | ---------------------------------- | +| developerPaymentAddress | address payable | Address to receive developer share | + +#### queryIsPaymentAssetAllowed + +```solidity +function queryIsPaymentAssetAllowed(uint64 serviceId, address asset) external view returns (bool isAllowed) +``` + +Check if a payment asset is allowed for this blueprint + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ------------------------------------------------- | +| serviceId | uint64 | The service ID | +| asset | address | The payment asset address (address(0) for native) | + +##### Return Values + +| Name | Type | Description | +| --------- | ---- | ----------------------------------------- | +| isAllowed | bool | True if the asset can be used for payment | + +#### getRequiredResultCount + +```solidity +function getRequiredResultCount(uint64 serviceId, uint8 jobIndex) external view returns (uint32 required) +``` + +Get the number of results required to complete a job + +_Override for consensus requirements (e.g., 2/3 majority)_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | +| jobIndex | uint8 | The job index | + +##### Return Values + +| Name | Type | Description | +| -------- | ------ | ----------------------------------------------------- | +| required | uint32 | Number of results needed (0 = service operator count) | + +#### requiresAggregation + +```solidity +function requiresAggregation(uint64 serviceId, uint8 jobIndex) external view returns (bool required) +``` + +Check if a job requires BLS aggregated results + +_When true, operators must submit individual signatures that are aggregated +off-chain, then submitted via submitAggregatedResult instead of submitResult_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | +| jobIndex | uint8 | The job index | + +##### Return Values + +| Name | Type | Description | +| -------- | ---- | ------------------------------------------------ | +| required | bool | True if BLS aggregation is required for this job | + +#### getAggregationThreshold + +```solidity +function getAggregationThreshold(uint64 serviceId, uint8 jobIndex) external view returns (uint16 thresholdBps, uint8 thresholdType) +``` + +Get the aggregation threshold configuration for a job + +_Only relevant if requiresAggregation returns true_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | +| jobIndex | uint8 | The job index | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------ | --------------------------------------------------------------------- | +| thresholdBps | uint16 | Threshold in basis points (6700 = 67%) | +| thresholdType | uint8 | 0 = CountBased (% of operators), 1 = StakeWeighted (% of total stake) | + +#### onAggregatedResult + +```solidity +function onAggregatedResult(uint64 serviceId, uint8 job, uint64 jobCallId, bytes output, uint256 signerBitmap, uint256[2] aggregatedSignature, uint256[4] aggregatedPubkey) external +``` + +Called when an aggregated job result is submitted + +_Validate the aggregated result, verify BLS signature, check threshold_ + +##### Parameters + +| Name | Type | Description | +| ------------------- | ---------- | ----------------------------------------------- | +| serviceId | uint64 | The service ID | +| job | uint8 | The job index | +| jobCallId | uint64 | The job call ID | +| output | bytes | The aggregated output | +| signerBitmap | uint256 | Bitmap of which operators signed | +| aggregatedSignature | uint256[2] | The aggregated BLS signature (G1 point x, y) | +| aggregatedPubkey | uint256[4] | The aggregated public key of signers (G2 point) | + +#### getMinOperatorStake + +```solidity +function getMinOperatorStake() external view returns (bool useDefault, uint256 minStake) +``` + +Get the minimum stake required for operators to register for this blueprint + +_Called during operator registration to validate stake requirements_ + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ----------------------------------------------------------- | +| useDefault | bool | True to use protocol default from staking module | +| minStake | uint256 | Custom minimum stake amount (only used if useDefault=false) | diff --git a/pages/developers/api/reference/IERC7540.mdx b/pages/developers/api/reference/IERC7540.mdx new file mode 100644 index 00000000..f4ba2d5f --- /dev/null +++ b/pages/developers/api/reference/IERC7540.mdx @@ -0,0 +1,14 @@ +--- +title: IERC7540 +description: Auto-generated Solidity API reference. +--- + +# IERC7540 + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IERC7540.sol + +### IERC7540 + +Full ERC7540 interface combining deposit, redeem, and operator management + +_Extends ERC4626 with asynchronous request patterns_ diff --git a/pages/developers/api/reference/IERC7540Deposit.mdx b/pages/developers/api/reference/IERC7540Deposit.mdx new file mode 100644 index 00000000..b9cf7f1a --- /dev/null +++ b/pages/developers/api/reference/IERC7540Deposit.mdx @@ -0,0 +1,90 @@ +--- +title: IERC7540Deposit +description: Auto-generated Solidity API reference. +--- + +# IERC7540Deposit + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IERC7540.sol + +### IERC7540Deposit + +Interface for asynchronous deposit requests + +_See https://eips.ethereum.org/EIPS/eip-7540_ + +#### Functions + +#### requestDeposit + +```solidity +function requestDeposit(uint256 assets, address controller, address owner) external returns (uint256 requestId) +``` + +Request an asynchronous deposit + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------------- | +| assets | uint256 | Amount of assets to deposit | +| controller | address | Address that controls the request | +| owner | address | Address that owns the assets | + +##### Return Values + +| Name | Type | Description | +| --------- | ------- | ---------------------------------- | +| requestId | uint256 | Unique identifier for this request | + +#### pendingDepositRequest + +```solidity +function pendingDepositRequest(uint256 requestId, address controller) external view returns (uint256 assets) +``` + +Get pending deposit request amount + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| requestId | uint256 | The request identifier | +| controller | address | The controller address | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ------------------------ | +| assets | uint256 | Amount of assets pending | + +#### claimableDepositRequest + +```solidity +function claimableDepositRequest(uint256 requestId, address controller) external view returns (uint256 assets) +``` + +Get claimable deposit request amount + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| requestId | uint256 | The request identifier | +| controller | address | The controller address | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | -------------------------- | +| assets | uint256 | Amount of assets claimable | + +#### Events + +#### DepositRequest + +```solidity +event DepositRequest(address controller, address owner, uint256 requestId, address sender, uint256 assets) +``` + +Emitted when a deposit request is created diff --git a/pages/developers/api/reference/IERC7540Operator.mdx b/pages/developers/api/reference/IERC7540Operator.mdx new file mode 100644 index 00000000..9924218c --- /dev/null +++ b/pages/developers/api/reference/IERC7540Operator.mdx @@ -0,0 +1,66 @@ +--- +title: IERC7540Operator +description: Auto-generated Solidity API reference. +--- + +# IERC7540Operator + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IERC7540.sol + +### IERC7540Operator + +Interface for operator management in ERC7540 + +#### Functions + +#### isOperator + +```solidity +function isOperator(address controller, address operator) external view returns (bool status) +``` + +Check if operator is approved for controller + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| controller | address | The controller address | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | ---------------- | +| status | bool | True if approved | + +#### setOperator + +```solidity +function setOperator(address operator, bool approved) external returns (bool success) +``` + +Grant or revoke operator permissions + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------------------- | +| operator | address | The operator address | +| approved | bool | True to approve, false to revoke | + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | ------------------ | +| success | bool | True if successful | + +#### Events + +#### OperatorSet + +```solidity +event OperatorSet(address controller, address operator, bool approved) +``` + +Emitted when operator approval changes diff --git a/pages/developers/api/reference/IERC7540Redeem.mdx b/pages/developers/api/reference/IERC7540Redeem.mdx new file mode 100644 index 00000000..b8114962 --- /dev/null +++ b/pages/developers/api/reference/IERC7540Redeem.mdx @@ -0,0 +1,90 @@ +--- +title: IERC7540Redeem +description: Auto-generated Solidity API reference. +--- + +# IERC7540Redeem + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IERC7540.sol + +### IERC7540Redeem + +Interface for asynchronous redemption requests + +_See https://eips.ethereum.org/EIPS/eip-7540_ + +#### Functions + +#### requestRedeem + +```solidity +function requestRedeem(uint256 shares, address controller, address owner) external returns (uint256 requestId) +``` + +Request an asynchronous redemption + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------------- | +| shares | uint256 | Amount of shares to redeem | +| controller | address | Address that controls the request | +| owner | address | Address that owns the shares | + +##### Return Values + +| Name | Type | Description | +| --------- | ------- | ---------------------------------- | +| requestId | uint256 | Unique identifier for this request | + +#### pendingRedeemRequest + +```solidity +function pendingRedeemRequest(uint256 requestId, address controller) external view returns (uint256 shares) +``` + +Get pending redeem request amount + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| requestId | uint256 | The request identifier | +| controller | address | The controller address | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ------------------------ | +| shares | uint256 | Amount of shares pending | + +#### claimableRedeemRequest + +```solidity +function claimableRedeemRequest(uint256 requestId, address controller) external view returns (uint256 shares) +``` + +Get claimable redeem request amount + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| requestId | uint256 | The request identifier | +| controller | address | The controller address | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | -------------------------- | +| shares | uint256 | Amount of shares claimable | + +#### Events + +#### RedeemRequest + +```solidity +event RedeemRequest(address controller, address owner, uint256 requestId, address sender, uint256 shares) +``` + +Emitted when a redeem request is created diff --git a/pages/developers/api/reference/IFacetSelectors.mdx b/pages/developers/api/reference/IFacetSelectors.mdx new file mode 100644 index 00000000..08992cdf --- /dev/null +++ b/pages/developers/api/reference/IFacetSelectors.mdx @@ -0,0 +1,22 @@ +--- +title: IFacetSelectors +description: Auto-generated Solidity API reference. +--- + +# IFacetSelectors + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IFacetSelectors.sol + +### IFacetSelectors + +Standard interface for facet selector discovery + +#### Functions + +#### selectors + +```solidity +function selectors() external pure returns (bytes4[]) +``` + +Return the selectors this facet wants registered diff --git a/pages/developers/api/reference/IMBSMRegistry.mdx b/pages/developers/api/reference/IMBSMRegistry.mdx new file mode 100644 index 00000000..4d8e110f --- /dev/null +++ b/pages/developers/api/reference/IMBSMRegistry.mdx @@ -0,0 +1,100 @@ +--- +title: IMBSMRegistry +description: Auto-generated Solidity API reference. +--- + +# IMBSMRegistry + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IMBSMRegistry.sol + +### IMBSMRegistry + +Minimal interface for the Master Blueprint Service Manager registry + +#### Functions + +#### getMBSM + +```solidity +function getMBSM(uint64 blueprintId) external view returns (address mbsmAddress) +``` + +Get the MBSM address currently pinned for a blueprint + +##### Parameters + +| Name | Type | Description | +| ----------- | ------ | ------------------------ | +| blueprintId | uint64 | The blueprint identifier | + +##### Return Values + +| Name | Type | Description | +| ----------- | ------- | ----------------------------------------- | +| mbsmAddress | address | The pinned MBSM (or latest if not pinned) | + +#### getPinnedRevision + +```solidity +function getPinnedRevision(uint64 blueprintId) external view returns (uint32 revision) +``` + +Get the revision pinned for a blueprint (0 = latest) + +#### getLatestMBSM + +```solidity +function getLatestMBSM() external view returns (address mbsmAddress) +``` + +Get the latest registered MBSM address + +##### Return Values + +| Name | Type | Description | +| ----------- | ------- | --------------- | +| mbsmAddress | address | The latest MBSM | + +#### getMBSMByRevision + +```solidity +function getMBSMByRevision(uint32 revision) external view returns (address mbsmAddress) +``` + +Get an MBSM by explicit revision + +##### Parameters + +| Name | Type | Description | +| -------- | ------ | --------------------------------- | +| revision | uint32 | The registry revision (1-indexed) | + +##### Return Values + +| Name | Type | Description | +| ----------- | ------- | --------------------------------------- | +| mbsmAddress | address | The registered address for the revision | + +#### getLatestRevision + +```solidity +function getLatestRevision() external view returns (uint32) +``` + +Get the latest revision number registered in the registry + +#### pinBlueprint + +```solidity +function pinBlueprint(uint64 blueprintId, uint32 revision) external +``` + +Pin a blueprint to a specific revision (0 disallowed) + +#### unpinBlueprint + +```solidity +function unpinBlueprint(uint64 blueprintId) external +``` + +Unpin a blueprint (reverting to latest) diff --git a/pages/developers/api/reference/IMasterBlueprintServiceManager.mdx b/pages/developers/api/reference/IMasterBlueprintServiceManager.mdx new file mode 100644 index 00000000..9da62957 --- /dev/null +++ b/pages/developers/api/reference/IMasterBlueprintServiceManager.mdx @@ -0,0 +1,30 @@ +--- +title: IMasterBlueprintServiceManager +description: Auto-generated Solidity API reference. +--- + +# IMasterBlueprintServiceManager + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IMasterBlueprintServiceManager.sol + +### IMasterBlueprintServiceManager + +Interface for the protocol-wide master blueprint service manager + +#### Functions + +#### onBlueprintCreated + +```solidity +function onBlueprintCreated(uint64 blueprintId, address owner, bytes encodedDefinition) external +``` + +Called when a new blueprint is created + +##### Parameters + +| Name | Type | Description | +| ----------------- | ------- | ------------------------------------- | +| blueprintId | uint64 | The newly assigned blueprint ID | +| owner | address | The blueprint owner | +| encodedDefinition | bytes | ABI-encoded blueprint definition data | diff --git a/pages/developers/api/reference/IMetricsRecorder.mdx b/pages/developers/api/reference/IMetricsRecorder.mdx new file mode 100644 index 00000000..fc4ed35a --- /dev/null +++ b/pages/developers/api/reference/IMetricsRecorder.mdx @@ -0,0 +1,210 @@ +--- +title: IMetricsRecorder +description: Auto-generated Solidity API reference. +--- + +# IMetricsRecorder + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IMetricsRecorder.sol + +### IMetricsRecorder + +Minimal interface for recording protocol activity metrics + +_Implemented by TangleMetrics, called by core contracts_ + +#### Functions + +#### recordStake + +```solidity +function recordStake(address delegator, address operator, address asset, uint256 amount) external +``` + +Record a stake/delegation event + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------------------------------- | +| delegator | address | The delegator address | +| operator | address | The operator receiving delegation | +| asset | address | The asset being staked (address(0) for native) | +| amount | uint256 | The amount staked | + +#### recordUnstake + +```solidity +function recordUnstake(address delegator, address operator, address asset, uint256 amount) external +``` + +Record an unstake event + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ------------------------------ | +| delegator | address | The delegator address | +| operator | address | The operator losing delegation | +| asset | address | The asset being unstaked | +| amount | uint256 | The amount unstaked | + +#### recordOperatorRegistered + +```solidity +function recordOperatorRegistered(address operator, address asset, uint256 amount) external +``` + +Record operator registration + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The operator address | +| asset | address | The asset staked | +| amount | uint256 | Initial stake amount | + +#### recordHeartbeat + +```solidity +function recordHeartbeat(address operator, uint64 serviceId, uint64 timestamp) external +``` + +Record operator heartbeat (liveness proof) + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------------- | +| operator | address | The operator address | +| serviceId | uint64 | The service ID | +| timestamp | uint64 | Block timestamp of heartbeat | + +#### recordJobCompletion + +```solidity +function recordJobCompletion(address operator, uint64 serviceId, uint64 jobCallId, bool success) external +``` + +Record job completion by operator + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ------------------------- | +| operator | address | The operator address | +| serviceId | uint64 | The service ID | +| jobCallId | uint64 | The job call ID | +| success | bool | Whether the job succeeded | + +#### recordSlash + +```solidity +function recordSlash(address operator, uint64 serviceId, uint256 amount) external +``` + +Record operator slashing (negative metric) + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | -------------------- | +| operator | address | The operator address | +| serviceId | uint64 | The service ID | +| amount | uint256 | Amount slashed | + +#### recordServiceCreated + +```solidity +function recordServiceCreated(uint64 serviceId, uint64 blueprintId, address owner, uint256 operatorCount) external +``` + +Record service creation/activation + +##### Parameters + +| Name | Type | Description | +| ------------- | ------- | ------------------- | +| serviceId | uint64 | The service ID | +| blueprintId | uint64 | The blueprint ID | +| owner | address | The service owner | +| operatorCount | uint256 | Number of operators | + +#### recordServiceTerminated + +```solidity +function recordServiceTerminated(uint64 serviceId, uint256 duration) external +``` + +Record service termination + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------------------- | +| serviceId | uint64 | The service ID | +| duration | uint256 | How long the service ran (seconds) | + +#### recordJobCall + +```solidity +function recordJobCall(uint64 serviceId, address caller, uint64 jobCallId) external +``` + +Record a job call on a service + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------- | +| serviceId | uint64 | The service ID | +| caller | address | Who initiated the job | +| jobCallId | uint64 | The job call ID | + +#### recordPayment + +```solidity +function recordPayment(address payer, uint64 serviceId, address token, uint256 amount) external +``` + +Record fee payment for a service + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ----------------------------------------- | +| payer | address | Who paid the fee | +| serviceId | uint64 | The service ID | +| token | address | The payment token (address(0) for native) | +| amount | uint256 | The amount paid | + +#### recordBlueprintCreated + +```solidity +function recordBlueprintCreated(uint64 blueprintId, address developer) external +``` + +Record blueprint creation + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | --------------------- | +| blueprintId | uint64 | The blueprint ID | +| developer | address | The developer address | + +#### recordBlueprintRegistration + +```solidity +function recordBlueprintRegistration(uint64 blueprintId, address operator) external +``` + +Record operator registration to a blueprint + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | -------------------- | +| blueprintId | uint64 | The blueprint ID | +| operator | address | The operator address | diff --git a/pages/developers/api/reference/IMultiAssetDelegation.mdx b/pages/developers/api/reference/IMultiAssetDelegation.mdx new file mode 100644 index 00000000..7a755261 --- /dev/null +++ b/pages/developers/api/reference/IMultiAssetDelegation.mdx @@ -0,0 +1,739 @@ +--- +title: IMultiAssetDelegation +description: Auto-generated Solidity API reference. +--- + +# IMultiAssetDelegation + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IMultiAssetDelegation.sol + +### IMultiAssetDelegation + +Full interface for the multi-asset staking contract + +#### Functions + +#### registerOperator + +```solidity +function registerOperator() external payable +``` + +#### registerOperatorWithAsset + +```solidity +function registerOperatorWithAsset(address token, uint256 amount) external +``` + +#### increaseStake + +```solidity +function increaseStake() external payable +``` + +#### scheduleOperatorUnstake + +```solidity +function scheduleOperatorUnstake(uint256 amount) external +``` + +#### executeOperatorUnstake + +```solidity +function executeOperatorUnstake() external +``` + +#### addBlueprint + +```solidity +function addBlueprint(uint64 blueprintId) external +``` + +#### removeBlueprint + +```solidity +function removeBlueprint(uint64 blueprintId) external +``` + +#### startLeaving + +```solidity +function startLeaving() external +``` + +#### completeLeaving + +```solidity +function completeLeaving() external +``` + +#### deposit + +```solidity +function deposit() external payable +``` + +#### depositWithLock + +```solidity +function depositWithLock(enum Types.LockMultiplier lockMultiplier) external payable +``` + +#### depositERC20 + +```solidity +function depositERC20(address token, uint256 amount) external +``` + +#### depositERC20WithLock + +```solidity +function depositERC20WithLock(address token, uint256 amount, enum Types.LockMultiplier lockMultiplier) external +``` + +#### scheduleWithdraw + +```solidity +function scheduleWithdraw(address token, uint256 amount) external +``` + +#### executeWithdraw + +```solidity +function executeWithdraw() external +``` + +#### depositAndDelegate + +```solidity +function depositAndDelegate(address operator) external payable +``` + +#### depositAndDelegateWithOptions + +```solidity +function depositAndDelegateWithOptions(address operator, address token, uint256 amount, enum Types.BlueprintSelectionMode selectionMode, uint64[] blueprintIds) external payable +``` + +#### delegate + +```solidity +function delegate(address operator, uint256 amount) external +``` + +#### delegateWithOptions + +```solidity +function delegateWithOptions(address operator, address token, uint256 amount, enum Types.BlueprintSelectionMode selectionMode, uint64[] blueprintIds) external +``` + +#### scheduleDelegatorUnstake + +```solidity +function scheduleDelegatorUnstake(address operator, address token, uint256 amount) external +``` + +#### undelegate + +```solidity +function undelegate(address operator, uint256 amount) external +``` + +#### executeDelegatorUnstake + +```solidity +function executeDelegatorUnstake() external +``` + +#### executeDelegatorUnstakeAndWithdraw + +```solidity +function executeDelegatorUnstakeAndWithdraw(address operator, address token, uint256 shares, uint64 requestedRound, address receiver) external returns (uint256 amount) +``` + +Execute a specific matured unstake request and withdraw the resulting assets to `receiver`. + +_Convenience helper for integrations (e.g. ERC7540 liquid delegation vaults) to avoid a separate +scheduleWithdraw/executeWithdraw flow after bond-less delay has already elapsed._ + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | ----------------------------------------------------------------- | +| operator | address | Operator to unstake from | +| token | address | Token address (address(0) for native) | +| shares | uint256 | Shares to unstake (as stored in the underlying bond-less request) | +| requestedRound | uint64 | Round in which the unstake was scheduled | +| receiver | address | Recipient of the withdrawn assets | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | --------------------------------------------------------------------- | +| amount | uint256 | Actual amount returned (after exchange-rate + lazy-slash adjustments) | + +#### addBlueprintToDelegation + +```solidity +function addBlueprintToDelegation(uint256 delegationIndex, uint64 blueprintId) external +``` + +#### removeBlueprintFromDelegation + +```solidity +function removeBlueprintFromDelegation(uint256 delegationIndex, uint64 blueprintId) external +``` + +#### slashForBlueprint + +```solidity +function slashForBlueprint(address operator, uint64 blueprintId, uint64 serviceId, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +#### slashForService + +```solidity +function slashForService(address operator, uint64 blueprintId, uint64 serviceId, struct Types.AssetSecurityCommitment[] commitments, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +#### slash + +```solidity +function slash(address operator, uint64 serviceId, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +#### advanceRound + +```solidity +function advanceRound() external +``` + +#### snapshotOperator + +```solidity +function snapshotOperator(address operator) external +``` + +#### enableAsset + +```solidity +function enableAsset(address token, uint256 minOperatorStake, uint256 minDelegation, uint256 depositCap, uint16 rewardMultiplierBps) external +``` + +#### disableAsset + +```solidity +function disableAsset(address token) external +``` + +#### getAssetConfig + +```solidity +function getAssetConfig(address token) external view returns (struct Types.AssetConfig) +``` + +#### registerAdapter + +```solidity +function registerAdapter(address token, address adapter) external +``` + +#### removeAdapter + +```solidity +function removeAdapter(address token) external +``` + +#### setRequireAdapters + +```solidity +function setRequireAdapters(bool required) external +``` + +#### enableAssetWithAdapter + +```solidity +function enableAssetWithAdapter(address token, address adapter, uint256 minOperatorStake, uint256 minDelegation, uint256 depositCap, uint16 rewardMultiplierBps) external +``` + +#### isOperator + +```solidity +function isOperator(address operator) external view returns (bool) +``` + +#### isOperatorActive + +```solidity +function isOperatorActive(address operator) external view returns (bool) +``` + +#### getOperatorStake + +```solidity +function getOperatorStake(address operator) external view returns (uint256) +``` + +#### getOperatorSelfStake + +```solidity +function getOperatorSelfStake(address operator) external view returns (uint256) +``` + +#### getOperatorDelegatedStake + +```solidity +function getOperatorDelegatedStake(address operator) external view returns (uint256) +``` + +#### getDelegation + +```solidity +function getDelegation(address delegator, address operator) external view returns (uint256) +``` + +#### getTotalDelegation + +```solidity +function getTotalDelegation(address delegator) external view returns (uint256 total) +``` + +#### minOperatorStake + +```solidity +function minOperatorStake() external view returns (uint256) +``` + +#### meetsStakeRequirement + +```solidity +function meetsStakeRequirement(address operator, uint256 required) external view returns (bool) +``` + +#### isSlasher + +```solidity +function isSlasher(address account) external view returns (bool) +``` + +#### getOperatorMetadata + +```solidity +function getOperatorMetadata(address operator) external view returns (struct Types.OperatorMetadata) +``` + +#### getOperatorBlueprints + +```solidity +function getOperatorBlueprints(address operator) external view returns (uint256[]) +``` + +#### operatorCount + +```solidity +function operatorCount() external view returns (uint256) +``` + +#### operatorAt + +```solidity +function operatorAt(uint256 index) external view returns (address) +``` + +#### getDeposit + +```solidity +function getDeposit(address delegator, address token) external view returns (struct Types.Deposit) +``` + +#### getPendingWithdrawals + +```solidity +function getPendingWithdrawals(address delegator) external view returns (struct Types.WithdrawRequest[]) +``` + +#### getLocks + +```solidity +function getLocks(address delegator, address token) external view returns (struct Types.LockInfo[]) +``` + +#### getDelegations + +```solidity +function getDelegations(address delegator) external view returns (struct Types.BondInfoDelegator[]) +``` + +#### getDelegationBlueprints + +```solidity +function getDelegationBlueprints(address delegator, uint256 idx) external view returns (uint64[]) +``` + +#### getPendingUnstakes + +```solidity +function getPendingUnstakes(address delegator) external view returns (struct Types.BondLessRequest[]) +``` + +#### getOperatorRewardPool + +```solidity +function getOperatorRewardPool(address operator) external view returns (struct Types.OperatorRewardPool) +``` + +#### getOperatorDelegators + +```solidity +function getOperatorDelegators(address operator) external view returns (address[]) +``` + +#### getOperatorDelegatorCount + +```solidity +function getOperatorDelegatorCount(address operator) external view returns (uint256) +``` + +#### rewardsManager + +```solidity +function rewardsManager() external view returns (address) +``` + +#### serviceFeeDistributor + +```solidity +function serviceFeeDistributor() external view returns (address) +``` + +#### getSlashImpact + +```solidity +function getSlashImpact(address operator, uint64 slashIndex, address delegator) external view returns (uint256) +``` + +#### getSlashCount + +```solidity +function getSlashCount(address operator) external view returns (uint64) +``` + +#### getSlashRecord + +```solidity +function getSlashRecord(address operator, uint64 slashIndex) external view returns (struct SlashingManager.SlashRecord) +``` + +#### getSlashCountForService + +```solidity +function getSlashCountForService(uint64 serviceId, address operator) external view returns (uint64) +``` + +#### getSlashCountForBlueprint + +```solidity +function getSlashCountForBlueprint(uint64 blueprintId, address operator) external view returns (uint64) +``` + +#### currentRound + +```solidity +function currentRound() external view returns (uint64) +``` + +#### roundDuration + +```solidity +function roundDuration() external view returns (uint64) +``` + +#### delegationBondLessDelay + +```solidity +function delegationBondLessDelay() external view returns (uint64) +``` + +#### leaveDelegatorsDelay + +```solidity +function leaveDelegatorsDelay() external view returns (uint64) +``` + +#### leaveOperatorsDelay + +```solidity +function leaveOperatorsDelay() external view returns (uint64) +``` + +#### operatorCommissionBps + +```solidity +function operatorCommissionBps() external view returns (uint16) +``` + +#### LOCK_ONE_MONTH + +```solidity +function LOCK_ONE_MONTH() external view returns (uint64) +``` + +#### LOCK_TWO_MONTHS + +```solidity +function LOCK_TWO_MONTHS() external view returns (uint64) +``` + +#### LOCK_THREE_MONTHS + +```solidity +function LOCK_THREE_MONTHS() external view returns (uint64) +``` + +#### LOCK_SIX_MONTHS + +```solidity +function LOCK_SIX_MONTHS() external view returns (uint64) +``` + +#### MULTIPLIER_NONE + +```solidity +function MULTIPLIER_NONE() external view returns (uint16) +``` + +#### MULTIPLIER_ONE_MONTH + +```solidity +function MULTIPLIER_ONE_MONTH() external view returns (uint16) +``` + +#### MULTIPLIER_TWO_MONTHS + +```solidity +function MULTIPLIER_TWO_MONTHS() external view returns (uint16) +``` + +#### MULTIPLIER_THREE_MONTHS + +```solidity +function MULTIPLIER_THREE_MONTHS() external view returns (uint16) +``` + +#### MULTIPLIER_SIX_MONTHS + +```solidity +function MULTIPLIER_SIX_MONTHS() external view returns (uint16) +``` + +#### addSlasher + +```solidity +function addSlasher(address slasher) external +``` + +#### removeSlasher + +```solidity +function removeSlasher(address slasher) external +``` + +#### setOperatorCommission + +```solidity +function setOperatorCommission(uint16 bps) external +``` + +#### setDelays + +```solidity +function setDelays(uint64 delegationBondLessDelay, uint64 leaveDelegatorsDelay, uint64 leaveOperatorsDelay) external +``` + +#### setRewardsManager + +```solidity +function setRewardsManager(address manager) external +``` + +#### setServiceFeeDistributor + +```solidity +function setServiceFeeDistributor(address distributor) external +``` + +#### pause + +```solidity +function pause() external +``` + +#### unpause + +```solidity +function unpause() external +``` + +#### rescueTokens + +```solidity +function rescueTokens(address token, address to, uint256 amount) external +``` + +#### Events + +#### AssetEnabled + +```solidity +event AssetEnabled(address token, uint256 minOperatorStake, uint256 minDelegation) +``` + +#### AssetDisabled + +```solidity +event AssetDisabled(address token) +``` + +#### RoundAdvanced + +```solidity +event RoundAdvanced(uint64 round) +``` + +#### OperatorRegistered + +```solidity +event OperatorRegistered(address operator, uint256 stake) +``` + +#### OperatorStakeIncreased + +```solidity +event OperatorStakeIncreased(address operator, uint256 amount) +``` + +#### OperatorUnstakeScheduled + +```solidity +event OperatorUnstakeScheduled(address operator, uint256 amount, uint64 readyRound) +``` + +#### OperatorUnstakeExecuted + +```solidity +event OperatorUnstakeExecuted(address operator, uint256 amount) +``` + +#### OperatorLeavingScheduled + +```solidity +event OperatorLeavingScheduled(address operator, uint64 readyRound) +``` + +#### OperatorLeft + +```solidity +event OperatorLeft(address operator) +``` + +#### OperatorBlueprintAdded + +```solidity +event OperatorBlueprintAdded(address operator, uint64 blueprintId) +``` + +#### OperatorBlueprintRemoved + +```solidity +event OperatorBlueprintRemoved(address operator, uint64 blueprintId) +``` + +#### Deposited + +```solidity +event Deposited(address delegator, address token, uint256 amount, enum Types.LockMultiplier lock) +``` + +#### WithdrawScheduled + +```solidity +event WithdrawScheduled(address delegator, address token, uint256 amount, uint64 readyRound) +``` + +#### Withdrawn + +```solidity +event Withdrawn(address delegator, address token, uint256 amount) +``` + +#### ExpiredLocksHarvested + +```solidity +event ExpiredLocksHarvested(address delegator, address token, uint256 count, uint256 totalAmount) +``` + +#### Delegated + +```solidity +event Delegated(address delegator, address operator, address token, uint256 amount, uint256 shares, enum Types.BlueprintSelectionMode selectionMode) +``` + +#### DelegatorUnstakeScheduled + +```solidity +event DelegatorUnstakeScheduled(address delegator, address operator, address token, uint256 shares, uint256 estimatedAmount, uint64 readyRound) +``` + +#### DelegatorUnstakeExecuted + +```solidity +event DelegatorUnstakeExecuted(address delegator, address operator, address token, uint256 shares, uint256 amount) +``` + +#### BlueprintAddedToDelegation + +```solidity +event BlueprintAddedToDelegation(address delegator, uint256 delegationIndex, uint64 blueprintId) +``` + +#### BlueprintRemovedFromDelegation + +```solidity +event BlueprintRemovedFromDelegation(address delegator, uint256 delegationIndex, uint64 blueprintId) +``` + +#### Slashed + +```solidity +event Slashed(address operator, uint64 serviceId, uint256 operatorSlashed, uint256 delegatorsSlashed, uint256 newExchangeRate) +``` + +#### SlashedForService + +```solidity +event SlashedForService(address operator, uint64 serviceId, uint64 blueprintId, uint256 totalSlashed, uint256 commitmentCount) +``` + +#### SlashRecorded + +```solidity +event SlashRecorded(address operator, uint64 slashId, uint256 totalSlashed, uint256 exchangeRateBefore, uint256 exchangeRateAfter) +``` + +#### AdapterRegistered + +```solidity +event AdapterRegistered(address token, address adapter) +``` + +#### AdapterRemoved + +```solidity +event AdapterRemoved(address token) +``` + +#### RequireAdaptersUpdated + +```solidity +event RequireAdaptersUpdated(bool required) +``` diff --git a/pages/developers/api/reference/IPaymentAdapterRegistry.mdx b/pages/developers/api/reference/IPaymentAdapterRegistry.mdx new file mode 100644 index 00000000..681b6952 --- /dev/null +++ b/pages/developers/api/reference/IPaymentAdapterRegistry.mdx @@ -0,0 +1,125 @@ +--- +title: IPaymentAdapterRegistry +description: Auto-generated Solidity API reference. +--- + +# IPaymentAdapterRegistry + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentAdapter.sol + +### IPaymentAdapterRegistry + +Registry for managing multiple payment adapters + +#### Functions + +#### registerAdapter + +```solidity +function registerAdapter(string name, address adapter) external +``` + +Register a new payment adapter + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | --------------- | +| name | string | Adapter name | +| adapter | address | Adapter address | + +#### removeAdapter + +```solidity +function removeAdapter(string name) external +``` + +Remove a payment adapter + +##### Parameters + +| Name | Type | Description | +| ---- | ------ | ---------------------- | +| name | string | Adapter name to remove | + +#### getAdapter + +```solidity +function getAdapter(string name) external view returns (address adapter) +``` + +Get an adapter by name + +##### Parameters + +| Name | Type | Description | +| ---- | ------ | ------------ | +| name | string | Adapter name | + +##### Return Values + +| Name | Type | Description | +| ------- | ------- | --------------- | +| adapter | address | Adapter address | + +#### getDefaultAdapter + +```solidity +function getDefaultAdapter() external view returns (address adapter) +``` + +Get the default adapter + +##### Return Values + +| Name | Type | Description | +| ------- | ------- | ----------------------- | +| adapter | address | Default adapter address | + +#### setDefaultAdapter + +```solidity +function setDefaultAdapter(string name) external +``` + +Set the default adapter + +##### Parameters + +| Name | Type | Description | +| ---- | ------ | --------------------------------- | +| name | string | Name of adapter to set as default | + +#### isRegistered + +```solidity +function isRegistered(string name) external view returns (bool registered) +``` + +Check if an adapter is registered + +##### Parameters + +| Name | Type | Description | +| ---- | ------ | ------------ | +| name | string | Adapter name | + +##### Return Values + +| Name | Type | Description | +| ---------- | ---- | ---------------------- | +| registered | bool | True if adapter exists | + +#### getRegisteredAdapters + +```solidity +function getRegisteredAdapters() external view returns (string[] names) +``` + +Get all registered adapter names + +##### Return Values + +| Name | Type | Description | +| ----- | -------- | ---------------------- | +| names | string[] | Array of adapter names | diff --git a/pages/developers/api/reference/IRestaking.mdx b/pages/developers/api/reference/IRestaking.mdx new file mode 100644 index 00000000..757eaf35 --- /dev/null +++ b/pages/developers/api/reference/IRestaking.mdx @@ -0,0 +1,307 @@ +--- +title: IStaking +description: Auto-generated Solidity API reference. +--- + +# IStaking + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStaking.sol + +### IStaking + +Abstract interface for staking/shared security protocols + +\_Implement this to integrate with native staking, Symbiotic, or other staking systems. + +Design principles: + +- Minimal interface - only what Tangle core needs +- Read-heavy - most operations are queries +- Write-light - only slash() modifies state +- No assumptions about underlying implementation\_ + +#### Functions + +#### isOperator + +```solidity +function isOperator(address operator) external view returns (bool) +``` + +Check if an address is a registered operator + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The address to check | + +##### Return Values + +| Name | Type | Description | +| ---- | ---- | ------------------------------ | +| [0] | bool | True if registered as operator | + +#### isOperatorActive + +```solidity +function isOperatorActive(address operator) external view returns (bool) +``` + +Check if an operator is currently active (not leaving, not slashed out) + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The address to check | + +##### Return Values + +| Name | Type | Description | +| ---- | ---- | -------------- | +| [0] | bool | True if active | + +#### getOperatorStake + +```solidity +function getOperatorStake(address operator) external view returns (uint256) +``` + +Get an operator's total stake (self-stake + delegations) + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ---------------------------------- | +| [0] | uint256 | Total stake amount in native units | + +#### getOperatorSelfStake + +```solidity +function getOperatorSelfStake(address operator) external view returns (uint256) +``` + +Get an operator's self-stake only + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ----------------- | +| [0] | uint256 | Self-stake amount | + +#### getOperatorDelegatedStake + +```solidity +function getOperatorDelegatedStake(address operator) external view returns (uint256) +``` + +Get total amount delegated to an operator + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ---------------------- | +| [0] | uint256 | Total delegated amount | + +#### getDelegation + +```solidity +function getDelegation(address delegator, address operator) external view returns (uint256) +``` + +Get a delegator's delegation to a specific operator + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------- | +| delegator | address | The delegator address | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ----------------- | +| [0] | uint256 | Delegation amount | + +#### getTotalDelegation + +```solidity +function getTotalDelegation(address delegator) external view returns (uint256) +``` + +Get a delegator's total delegations across all operators + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------- | +| delegator | address | The delegator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ---------------------- | +| [0] | uint256 | Total delegated amount | + +#### minOperatorStake + +```solidity +function minOperatorStake() external view returns (uint256) +``` + +Get minimum stake required to be an operator + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | -------------------- | +| [0] | uint256 | Minimum stake amount | + +#### meetsStakeRequirement + +```solidity +function meetsStakeRequirement(address operator, uint256 required) external view returns (bool) +``` + +Check if operator meets a specific stake requirement + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------------------- | +| operator | address | The operator address | +| required | uint256 | The required stake amount | + +##### Return Values + +| Name | Type | Description | +| ---- | ---- | ------------------------------------- | +| [0] | bool | True if operator has sufficient stake | + +#### slashForBlueprint + +```solidity +function slashForBlueprint(address operator, uint64 blueprintId, uint64 serviceId, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +Slash an operator's stake for a specific blueprint + +_Only affects delegators exposed to this blueprint (All mode + Fixed mode who selected it)_ + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | --------------------------------------- | +| operator | address | The operator to slash | +| blueprintId | uint64 | The blueprint where violation occurred | +| serviceId | uint64 | The service where violation occurred | +| amount | uint256 | Amount to slash | +| evidence | bytes32 | Evidence hash (IPFS or other reference) | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ------------------------------------------------------------- | +| actualSlashed | uint256 | The actual amount slashed (may be less if insufficient stake) | + +#### slashForService + +```solidity +function slashForService(address operator, uint64 blueprintId, uint64 serviceId, struct Types.AssetSecurityCommitment[] commitments, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +Slash an operator for a specific service, only slashing committed assets + +_Only slashes assets the operator committed to this service, proportionally_ + +##### Parameters + +| Name | Type | Description | +| ----------- | -------------------------------------- | ---------------------------------------------------------- | +| operator | address | The operator to slash | +| blueprintId | uint64 | The blueprint where violation occurred | +| serviceId | uint64 | The service where violation occurred | +| commitments | struct Types.AssetSecurityCommitment[] | The operator's asset security commitments for this service | +| amount | uint256 | Amount to slash | +| evidence | bytes32 | Evidence hash (IPFS or other reference) | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ----------------------------------------------------------------------- | +| actualSlashed | uint256 | The actual amount slashed (may be less if insufficient committed stake) | + +#### slash + +```solidity +function slash(address operator, uint64 serviceId, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +Slash an operator's stake (legacy - slashes all delegators) + +_Only callable by authorized slashers (e.g., Tangle core contract)_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------------------------- | +| operator | address | The operator to slash | +| serviceId | uint64 | The service where violation occurred | +| amount | uint256 | Amount to slash | +| evidence | bytes32 | Evidence hash (IPFS or other reference) | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ------------------------------------------------------------- | +| actualSlashed | uint256 | The actual amount slashed (may be less if insufficient stake) | + +#### isSlasher + +```solidity +function isSlasher(address account) external view returns (bool) +``` + +Check if an address is authorized to call slash() + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | -------------------- | +| account | address | The address to check | + +##### Return Values + +| Name | Type | Description | +| ---- | ---- | ------------------ | +| [0] | bool | True if authorized | + +#### Events + +#### OperatorSlashed + +```solidity +event OperatorSlashed(address operator, uint64 serviceId, uint256 amount, bytes32 evidence) +``` + +Emitted when an operator is slashed diff --git a/pages/developers/api/reference/IRestakingAdmin.mdx b/pages/developers/api/reference/IRestakingAdmin.mdx new file mode 100644 index 00000000..6916f50c --- /dev/null +++ b/pages/developers/api/reference/IRestakingAdmin.mdx @@ -0,0 +1,58 @@ +--- +title: IStakingAdmin +description: Auto-generated Solidity API reference. +--- + +# IStakingAdmin + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStaking.sol + +### IStakingAdmin + +Admin functions for staking implementations + +_Separated to keep main interface clean_ + +#### Functions + +#### addSlasher + +```solidity +function addSlasher(address slasher) external +``` + +Add an authorized slasher + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | -------------------- | +| slasher | address | Address to authorize | + +#### removeSlasher + +```solidity +function removeSlasher(address slasher) external +``` + +Remove an authorized slasher + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | ----------------- | +| slasher | address | Address to remove | + +#### setMinOperatorStake + +```solidity +function setMinOperatorStake(uint256 amount) external +``` + +Update minimum operator stake + +##### Parameters + +| Name | Type | Description | +| ------ | ------- | ----------- | +| amount | uint256 | New minimum | diff --git a/pages/developers/api/reference/IRewardsManager.mdx b/pages/developers/api/reference/IRewardsManager.mdx new file mode 100644 index 00000000..a60b8134 --- /dev/null +++ b/pages/developers/api/reference/IRewardsManager.mdx @@ -0,0 +1,85 @@ +--- +title: IRewardsManager +description: Auto-generated Solidity API reference. +--- + +# IRewardsManager + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IRewardsManager.sol + +### IRewardsManager + +Interface for reward vault management - called by MultiAssetDelegation + +#### Functions + +#### recordDelegate + +```solidity +function recordDelegate(address delegator, address operator, address asset, uint256 amount, uint16 lockMultiplierBps) external +``` + +Records a delegation for reward tracking + +##### Parameters + +| Name | Type | Description | +| ----------------- | ------- | --------------------------------------------------------- | +| delegator | address | The account making the delegation | +| operator | address | The operator being delegated to | +| asset | address | The asset being delegated (address(0) for native) | +| amount | uint256 | The amount being delegated | +| lockMultiplierBps | uint16 | Lock multiplier in basis points (10000 = 1x, 0 = no lock) | + +#### recordUndelegate + +```solidity +function recordUndelegate(address delegator, address operator, address asset, uint256 amount) external +``` + +Records an undelegation + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ----------------------------------- | +| delegator | address | The account making the undelegation | +| operator | address | The operator being undelegated from | +| asset | address | The asset being undelegated | +| amount | uint256 | The amount being undelegated | + +#### recordServiceReward + +```solidity +function recordServiceReward(address operator, address asset, uint256 amount) external +``` + +Records a service reward for an operator + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | --------------------------------- | +| operator | address | The operator receiving the reward | +| asset | address | The reward asset | +| amount | uint256 | The reward amount | + +#### getAssetDepositCapRemaining + +```solidity +function getAssetDepositCapRemaining(address asset) external view returns (uint256 remaining) +``` + +Get remaining deposit capacity for an asset vault + +##### Parameters + +| Name | Type | Description | +| ----- | ------- | ------------------ | +| asset | address | The asset to query | + +##### Return Values + +| Name | Type | Description | +| --------- | ------- | ------------------------------ | +| remaining | uint256 | The remaining deposit capacity | diff --git a/pages/developers/api/reference/ISablierAdapter.mdx b/pages/developers/api/reference/ISablierAdapter.mdx new file mode 100644 index 00000000..8df737c5 --- /dev/null +++ b/pages/developers/api/reference/ISablierAdapter.mdx @@ -0,0 +1,162 @@ +--- +title: ISablierAdapter +description: Auto-generated Solidity API reference. +--- + +# ISablierAdapter + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentAdapter.sol + +### ISablierAdapter + +Extended interface for Sablier-specific features + +#### Types + +#### StreamType + +Stream type for Sablier + +```solidity +enum StreamType { + Linear, + Dynamic, + Tranched +} +``` + +#### Segment + +Segment for dynamic streams + +```solidity +struct Segment { + uint128 amount; + uint64 exponent; + uint40 timestamp; +} +``` + +#### Functions + +#### createLinearStream + +```solidity +function createLinearStream(uint64 serviceId, address token, uint128 totalAmount, uint40 durationSeconds, uint40 cliffSeconds) external returns (uint256 streamId) +``` + +Create a linear stream (constant rate) + +##### Parameters + +| Name | Type | Description | +| --------------- | ------- | ---------------------- | +| serviceId | uint64 | The Tangle service ID | +| token | address | The ERC-20 token | +| totalAmount | uint128 | Total amount to stream | +| durationSeconds | uint40 | Total duration | +| cliffSeconds | uint40 | Cliff period | + +##### Return Values + +| Name | Type | Description | +| -------- | ------- | --------------------- | +| streamId | uint256 | The created stream ID | + +#### createDynamicStream + +```solidity +function createDynamicStream(uint64 serviceId, address token, uint128 totalAmount, struct ISablierAdapter.Segment[] segments) external returns (uint256 streamId) +``` + +Create a dynamic stream with custom curve + +##### Parameters + +| Name | Type | Description | +| ----------- | -------------------------------- | ------------------------------------ | +| serviceId | uint64 | The Tangle service ID | +| token | address | The ERC-20 token | +| totalAmount | uint128 | Total amount to stream | +| segments | struct ISablierAdapter.Segment[] | Array of segments defining the curve | + +##### Return Values + +| Name | Type | Description | +| -------- | ------- | --------------------- | +| streamId | uint256 | The created stream ID | + +#### isCancelable + +```solidity +function isCancelable(uint256 streamId) external view returns (bool cancelable) +``` + +Check if a stream is cancelable + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ---------- | ---- | ------------------------------- | +| cancelable | bool | True if stream can be cancelled | + +#### wasCancelled + +```solidity +function wasCancelled(uint256 streamId) external view returns (bool cancelled) +``` + +Check if a stream was cancelled + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| --------- | ---- | ---------------------------- | +| cancelled | bool | True if stream was cancelled | + +#### getStreamNFT + +```solidity +function getStreamNFT(uint256 streamId) external view returns (uint256 tokenId) +``` + +Get the NFT token ID for a stream (Sablier streams are NFTs) + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ------- | ------- | -------------------- | +| tokenId | uint256 | The ERC-721 token ID | + +#### transferStream + +```solidity +function transferStream(uint256 streamId, address newRecipient) external +``` + +Transfer stream ownership (NFT transfer) + +##### Parameters + +| Name | Type | Description | +| ------------ | ------- | --------------------- | +| streamId | uint256 | The stream ID | +| newRecipient | address | New recipient address | diff --git a/pages/developers/api/reference/IServiceFeeDistributor.mdx b/pages/developers/api/reference/IServiceFeeDistributor.mdx new file mode 100644 index 00000000..427d0973 --- /dev/null +++ b/pages/developers/api/reference/IServiceFeeDistributor.mdx @@ -0,0 +1,157 @@ +--- +title: IServiceFeeDistributor +description: Auto-generated Solidity API reference. +--- + +# IServiceFeeDistributor + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IServiceFeeDistributor.sol + +### IServiceFeeDistributor + +Tracks service-fee payouts to stakers across payment tokens + +_Receives delegation-change hooks from MultiAssetDelegation and fee-distribution calls from Tangle._ + +#### Functions + +#### distributeServiceFee + +```solidity +function distributeServiceFee(uint64 serviceId, uint64 blueprintId, address operator, address paymentToken, uint256 amount) external payable +``` + +#### distributeInflationReward + +```solidity +function distributeInflationReward(uint64 serviceId, uint64 blueprintId, address operator, address paymentToken, uint256 amount) external payable +``` + +Distribute inflation-funded staker rewards using service exposure weights + +_Intended for InflationPool; rewards are paid in the provided token (TNT)._ + +#### claimFor + +```solidity +function claimFor(address token, address operator, struct Types.Asset asset) external returns (uint256 amount) +``` + +Claim rewards for a specific delegator position and token + +#### claimAll + +```solidity +function claimAll(address token) external returns (uint256 totalAmount) +``` + +Claim all pending rewards across all positions for a token + +#### claimAllBatch + +```solidity +function claimAllBatch(address[] tokens) external returns (uint256[] amounts) +``` + +Claim all pending rewards for multiple tokens + +#### pendingRewards + +```solidity +function pendingRewards(address delegator, address token) external view returns (uint256 pending) +``` + +Preview pending rewards for a delegator across all positions for a token + +#### delegatorOperators + +```solidity +function delegatorOperators(address delegator) external view returns (address[] operators) +``` + +Return all operators a delegator has positions with + +#### delegatorAssets + +```solidity +function delegatorAssets(address delegator, address operator) external view returns (bytes32[] assetHashes) +``` + +Return all asset hashes a delegator has positions for with an operator + +#### getPosition + +```solidity +function getPosition(address delegator, address operator, bytes32 assetHash) external view returns (uint8 mode, uint256 principal, uint256 score) +``` + +Return a delegator's position details + +#### operatorRewardTokens + +```solidity +function operatorRewardTokens(address operator) external view returns (address[] tokens) +``` + +Return reward tokens ever distributed for an operator + +#### onDelegationChanged + +```solidity +function onDelegationChanged(address delegator, address operator, struct Types.Asset asset, uint256 amount, bool isIncrease, enum Types.BlueprintSelectionMode selectionMode, uint64[] blueprintIds, uint16 lockMultiplierBps) external +``` + +#### onBlueprintAdded + +```solidity +function onBlueprintAdded(address delegator, address operator, struct Types.Asset asset, uint64 blueprintId) external +``` + +#### onBlueprintRemoved + +```solidity +function onBlueprintRemoved(address delegator, address operator, struct Types.Asset asset, uint64 blueprintId) external +``` + +#### getPoolScore + +```solidity +function getPoolScore(address operator, uint64 blueprintId, struct Types.Asset asset) external view returns (uint256 allScore, uint256 fixedScore) +``` + +#### getOperatorServiceUsdExposure + +```solidity +function getOperatorServiceUsdExposure(uint64 serviceId, uint64 blueprintId, address operator) external view returns (uint256 totalUsdExposure) +``` + +Get USD-weighted exposure for an operator/service + +_Returns total USD exposure across All+Fixed pools for the service._ + +#### onOperatorLeaving + +```solidity +function onOperatorLeaving(uint64 serviceId, address operator) external +``` + +Called when an operator is about to leave a service + +_Drips all active streams for the operator BEFORE they're removed_ + +#### onServiceTerminated + +```solidity +function onServiceTerminated(uint64 serviceId, address refundRecipient) external +``` + +Called when a service is terminated early + +_Cancels streaming payments and refunds remaining amounts to the service owner_ + +##### Parameters + +| Name | Type | Description | +| --------------- | ------- | ------------------------------------------------------------- | +| serviceId | uint64 | The terminated service ID | +| refundRecipient | address | Where to send the remaining payment (typically service owner) | diff --git a/pages/developers/api/reference/IStreamingPaymentAdapter.mdx b/pages/developers/api/reference/IStreamingPaymentAdapter.mdx new file mode 100644 index 00000000..37a09f10 --- /dev/null +++ b/pages/developers/api/reference/IStreamingPaymentAdapter.mdx @@ -0,0 +1,316 @@ +--- +title: IStreamingPaymentAdapter +description: Auto-generated Solidity API reference. +--- + +# IStreamingPaymentAdapter + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentAdapter.sol + +### IStreamingPaymentAdapter + +Common interface for streaming payment adapters (Superfluid, Sablier, etc.) + +_Adapters implement this interface to provide streaming payment capabilities +to Tangle services without tight coupling to specific protocols._ + +#### Functions + +#### createStream + +```solidity +function createStream(uint64 serviceId, address token, uint256 totalAmount, uint64 durationSeconds, uint64 cliffSeconds) external payable returns (uint256 streamId) +``` + +Create a streaming payment for a service + +##### Parameters + +| Name | Type | Description | +| --------------- | ------- | -------------------------------------------------- | +| serviceId | uint64 | The Tangle service ID | +| token | address | The ERC-20 token to stream (address(0) for native) | +| totalAmount | uint256 | Total amount to stream | +| durationSeconds | uint64 | Stream duration in seconds | +| cliffSeconds | uint64 | Optional cliff period (0 for no cliff) | + +##### Return Values + +| Name | Type | Description | +| -------- | ------- | --------------------- | +| streamId | uint256 | The created stream ID | + +#### updateStreamRate + +```solidity +function updateStreamRate(uint256 streamId, uint256 newRatePerSecond) external +``` + +Update the rate of an existing stream + +##### Parameters + +| Name | Type | Description | +| ---------------- | ------- | ----------------------- | +| streamId | uint256 | The stream ID to update | +| newRatePerSecond | uint256 | New streaming rate | + +#### cancelStream + +```solidity +function cancelStream(uint256 streamId) external returns (uint256 refundedAmount) +``` + +Cancel a stream and refund remaining balance + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ----------------------- | +| streamId | uint256 | The stream ID to cancel | + +##### Return Values + +| Name | Type | Description | +| -------------- | ------- | ---------------------------- | +| refundedAmount | uint256 | Amount refunded to the payer | + +#### withdrawFromStream + +```solidity +function withdrawFromStream(uint256 streamId) external returns (uint256 withdrawnAmount) +``` + +Withdraw available funds from a stream + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| --------------- | ------- | ---------------- | +| withdrawnAmount | uint256 | Amount withdrawn | + +#### settleAndDistribute + +```solidity +function settleAndDistribute(uint256 streamId) external +``` + +Settle a stream's accumulated funds and distribute to operators + +_This triggers distribution through Tangle's payment system_ + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ----------------------- | +| streamId | uint256 | The stream ID to settle | + +#### getWithdrawableAmount + +```solidity +function getWithdrawableAmount(uint256 streamId) external view returns (uint256 amount) +``` + +Get the current withdrawable amount for a stream + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ---------------------------- | +| amount | uint256 | Amount available to withdraw | + +#### getStreamRate + +```solidity +function getStreamRate(uint256 streamId) external view returns (uint256 ratePerSecond) +``` + +Get the current streaming rate + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | -------------------------------- | +| ratePerSecond | uint256 | Tokens per second being streamed | + +#### getStreamInfo + +```solidity +function getStreamInfo(uint256 streamId) external view returns (uint64 serviceId, address payer, address token, uint256 totalAmount, uint256 withdrawnAmount, uint256 startTime, uint256 endTime, uint256 cliffTime, bool active) +``` + +Get full stream information + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| --------------- | ------- | ------------------------------- | +| serviceId | uint64 | Associated Tangle service | +| payer | address | Address funding the stream | +| token | address | Token being streamed | +| totalAmount | uint256 | Total stream amount | +| withdrawnAmount | uint256 | Amount already withdrawn | +| startTime | uint256 | Stream start timestamp | +| endTime | uint256 | Stream end timestamp | +| cliffTime | uint256 | Cliff timestamp (0 if no cliff) | +| active | bool | Whether stream is active | + +#### getStreamServiceId + +```solidity +function getStreamServiceId(uint256 streamId) external view returns (uint64 serviceId) +``` + +Get the service ID associated with a stream + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| --------- | ------ | --------------------- | +| serviceId | uint64 | The Tangle service ID | + +#### getServiceStreams + +```solidity +function getServiceStreams(uint64 serviceId) external view returns (uint256[] streamIds) +``` + +Get all active streams for a service + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | --------------------- | +| serviceId | uint64 | The Tangle service ID | + +##### Return Values + +| Name | Type | Description | +| --------- | --------- | -------------------------- | +| streamIds | uint256[] | Array of active stream IDs | + +#### getAccruedAmount + +```solidity +function getAccruedAmount(uint256 streamId) external view returns (uint256 accruedAmount) +``` + +Calculate real-time accrued amount (not yet settled) + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ------------------------------------ | +| accruedAmount | uint256 | Amount accrued since last settlement | + +#### protocolName + +```solidity +function protocolName() external view returns (string name) +``` + +Get the name of the underlying protocol + +##### Return Values + +| Name | Type | Description | +| ---- | ------ | --------------------------------------------- | +| name | string | Protocol name (e.g., "Superfluid", "Sablier") | + +#### isTokenSupported + +```solidity +function isTokenSupported(address token) external view returns (bool supported) +``` + +Check if a token is supported for streaming + +##### Parameters + +| Name | Type | Description | +| ----- | ------- | ----------------- | +| token | address | The token address | + +##### Return Values + +| Name | Type | Description | +| --------- | ---- | ----------------------------- | +| supported | bool | True if token can be streamed | + +#### Events + +#### StreamCreated + +```solidity +event StreamCreated(uint64 serviceId, uint256 streamId, address payer, address token, uint256 ratePerSecond, uint256 totalAmount) +``` + +Emitted when a stream is created for a service + +#### StreamUpdated + +```solidity +event StreamUpdated(uint64 serviceId, uint256 streamId, uint256 newRatePerSecond) +``` + +Emitted when a stream is updated + +#### StreamCancelled + +```solidity +event StreamCancelled(uint64 serviceId, uint256 streamId, uint256 refundedAmount) +``` + +Emitted when a stream is cancelled + +#### StreamWithdrawn + +```solidity +event StreamWithdrawn(uint64 serviceId, uint256 streamId, uint256 amount, address recipient) +``` + +Emitted when funds are withdrawn from a stream + +#### StreamSettled + +```solidity +event StreamSettled(uint64 serviceId, uint256 streamId, uint256 amount) +``` + +Emitted when a stream is settled and distributed diff --git a/pages/developers/api/reference/IStreamingPaymentManager.mdx b/pages/developers/api/reference/IStreamingPaymentManager.mdx new file mode 100644 index 00000000..7f4e6a0f --- /dev/null +++ b/pages/developers/api/reference/IStreamingPaymentManager.mdx @@ -0,0 +1,78 @@ +--- +title: IStreamingPaymentManager +description: Auto-generated Solidity API reference. +--- + +# IStreamingPaymentManager + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentManager.sol + +### IStreamingPaymentManager + +Interface for streaming payment management + +#### Functions + +#### createStream + +```solidity +function createStream(uint64 serviceId, uint64 blueprintId, address operator, address paymentToken, uint256 amount, uint64 startTime, uint64 endTime) external payable +``` + +Create a streaming payment for a service + +#### dripAndGetChunk + +```solidity +function dripAndGetChunk(uint64 serviceId, address operator) external returns (uint256 amount, uint256 durationSeconds, uint64 blueprintId, address paymentToken) +``` + +Drip a specific stream and return chunk info + +#### dripOperatorStreams + +```solidity +function dripOperatorStreams(address operator) external returns (uint64[] serviceIds, uint64[] blueprintIds, address[] paymentTokens, uint256[] amounts, uint256[] durations) +``` + +Drip all active streams for an operator + +#### onServiceTerminated + +```solidity +function onServiceTerminated(uint64 serviceId, address refundRecipient) external +``` + +Called when service is terminated + +#### onOperatorLeaving + +```solidity +function onOperatorLeaving(uint64 serviceId, address operator) external +``` + +Called when operator is leaving + +#### getOperatorActiveStreams + +```solidity +function getOperatorActiveStreams(address operator) external view returns (uint64[]) +``` + +Get active stream IDs for an operator + +#### getStreamingPayment + +```solidity +function getStreamingPayment(uint64 serviceId, address operator) external view returns (uint64 _serviceId, uint64 blueprintId, address _operator, address paymentToken, uint256 totalAmount, uint256 distributed, uint64 startTime, uint64 endTime, uint64 lastDripTime) +``` + +Get streaming payment details + +#### pendingDrip + +```solidity +function pendingDrip(uint64 serviceId, address operator) external view returns (uint256) +``` + +Calculate pending drip amount diff --git a/pages/developers/api/reference/ISuperfluidAdapter.mdx b/pages/developers/api/reference/ISuperfluidAdapter.mdx new file mode 100644 index 00000000..3f11783c --- /dev/null +++ b/pages/developers/api/reference/ISuperfluidAdapter.mdx @@ -0,0 +1,129 @@ +--- +title: ISuperfluidAdapter +description: Auto-generated Solidity API reference. +--- + +# ISuperfluidAdapter + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentAdapter.sol + +### ISuperfluidAdapter + +Extended interface for Superfluid-specific features + +#### Functions + +#### getNetFlowRate + +```solidity +function getNetFlowRate(address account, address token) external view returns (int96 netFlowRate) +``` + +Get the net flow rate for an account (incoming - outgoing) + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | ------------------- | +| account | address | The account address | +| token | address | The super token | + +##### Return Values + +| Name | Type | Description | +| ----------- | ----- | ------------------------------- | +| netFlowRate | int96 | Net flow rate (can be negative) | + +#### getRealtimeBalance + +```solidity +function getRealtimeBalance(address account, address token) external view returns (int256 availableBalance, uint256 deposit) +``` + +Get the real-time balance of an account + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | ------------------- | +| account | address | The account address | +| token | address | The super token | + +##### Return Values + +| Name | Type | Description | +| ---------------- | ------- | ------------------------- | +| availableBalance | int256 | Current available balance | +| deposit | uint256 | Required deposit/buffer | + +#### isSolvent + +```solidity +function isSolvent(address account, address token) external view returns (bool solvent) +``` + +Check if an account is solvent (positive balance) + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | ------------------- | +| account | address | The account address | +| token | address | The super token | + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | ------------------------------------ | +| solvent | bool | True if account has positive balance | + +#### getRequiredBuffer + +```solidity +function getRequiredBuffer(address token, int96 flowRate) external view returns (uint256 bufferAmount) +``` + +Get the required buffer/deposit for a flow rate + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ----------------------- | +| token | address | The super token | +| flowRate | int96 | Flow rate in wei/second | + +##### Return Values + +| Name | Type | Description | +| ------------ | ------- | ----------------------- | +| bufferAmount | uint256 | Required buffer deposit | + +#### wrapTokens + +```solidity +function wrapTokens(address token, uint256 amount) external +``` + +Wrap underlying tokens to super tokens + +##### Parameters + +| Name | Type | Description | +| ------ | ------- | -------------------- | +| token | address | The underlying token | +| amount | uint256 | Amount to wrap | + +#### unwrapTokens + +```solidity +function unwrapTokens(address token, uint256 amount) external +``` + +Unwrap super tokens to underlying + +##### Parameters + +| Name | Type | Description | +| ------ | ------- | ---------------- | +| token | address | The super token | +| amount | uint256 | Amount to unwrap | diff --git a/pages/developers/api/reference/ITangle.mdx b/pages/developers/api/reference/ITangle.mdx new file mode 100644 index 00000000..2ec0bbd0 --- /dev/null +++ b/pages/developers/api/reference/ITangle.mdx @@ -0,0 +1,15 @@ +--- +title: ITangle +description: Auto-generated Solidity API reference. +--- + +# ITangle + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangle.sol + +### ITangle + +Core interface for Tangle Protocol + +_Consolidates all sub-interfaces into a single entry point. +Inherits from focused sub-interfaces for modularity._ diff --git a/pages/developers/api/reference/ITangleAdmin.mdx b/pages/developers/api/reference/ITangleAdmin.mdx new file mode 100644 index 00000000..3613bd1f --- /dev/null +++ b/pages/developers/api/reference/ITangleAdmin.mdx @@ -0,0 +1,248 @@ +--- +title: ITangleAdmin +description: Auto-generated Solidity API reference. +--- + +# ITangleAdmin + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangle.sol + +### ITangleAdmin + +Admin functions for Tangle protocol + +#### Functions + +#### setStaking + +```solidity +function setStaking(address staking) external +``` + +Set the staking module + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | --------------------------- | +| staking | address | The IStaking implementation | + +#### setTreasury + +```solidity +function setTreasury(address treasury) external +``` + +Set the protocol treasury + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| treasury | address | The treasury address | + +#### setPaymentSplit + +```solidity +function setPaymentSplit(struct Types.PaymentSplit split) external +``` + +Set the payment split configuration + +##### Parameters + +| Name | Type | Description | +| ----- | ------------------------- | --------------------------- | +| split | struct Types.PaymentSplit | The new split configuration | + +#### paymentSplit + +```solidity +function paymentSplit() external view returns (uint16 developerBps, uint16 protocolBps, uint16 operatorBps, uint16 stakerBps) +``` + +Get the current payment split + +#### pause + +```solidity +function pause() external +``` + +Pause the protocol + +#### unpause + +```solidity +function unpause() external +``` + +Unpause the protocol + +#### treasury + +```solidity +function treasury() external view returns (address payable) +``` + +Get the configured treasury + +#### setMetricsRecorder + +```solidity +function setMetricsRecorder(address recorder) external +``` + +Set the metrics recorder (optional) + +#### metricsRecorder + +```solidity +function metricsRecorder() external view returns (address) +``` + +Get the metrics recorder address + +#### setOperatorStatusRegistry + +```solidity +function setOperatorStatusRegistry(address registry) external +``` + +Set operator status registry + +#### operatorStatusRegistry + +```solidity +function operatorStatusRegistry() external view returns (address) +``` + +Get operator status registry + +#### setServiceFeeDistributor + +```solidity +function setServiceFeeDistributor(address distributor) external +``` + +Configure service fee distributor + +#### serviceFeeDistributor + +```solidity +function serviceFeeDistributor() external view returns (address) +``` + +Get service fee distributor + +#### setPriceOracle + +```solidity +function setPriceOracle(address oracle) external +``` + +Configure price oracle + +#### priceOracle + +```solidity +function priceOracle() external view returns (address) +``` + +Get price oracle + +#### setMBSMRegistry + +```solidity +function setMBSMRegistry(address registry) external +``` + +Configure Master Blueprint Service Manager registry + +#### mbsmRegistry + +```solidity +function mbsmRegistry() external view returns (address) +``` + +Get Master Blueprint Service Manager registry + +#### maxBlueprintsPerOperator + +```solidity +function maxBlueprintsPerOperator() external view returns (uint32) +``` + +Get max blueprints per operator + +#### setMaxBlueprintsPerOperator + +```solidity +function setMaxBlueprintsPerOperator(uint32 newMax) external +``` + +Set max blueprints per operator + +#### tntToken + +```solidity +function tntToken() external view returns (address) +``` + +Get TNT token address + +#### setTntToken + +```solidity +function setTntToken(address token) external +``` + +Set TNT token address + +#### rewardVaults + +```solidity +function rewardVaults() external view returns (address) +``` + +Get reward vaults address + +#### setRewardVaults + +```solidity +function setRewardVaults(address vaults) external +``` + +Set reward vaults address + +#### defaultTntMinExposureBps + +```solidity +function defaultTntMinExposureBps() external view returns (uint16) +``` + +Get default TNT min exposure bps + +#### setDefaultTntMinExposureBps + +```solidity +function setDefaultTntMinExposureBps(uint16 minExposureBps) external +``` + +Set default TNT min exposure bps + +#### tntPaymentDiscountBps + +```solidity +function tntPaymentDiscountBps() external view returns (uint16) +``` + +Get TNT payment discount bps + +#### setTntPaymentDiscountBps + +```solidity +function setTntPaymentDiscountBps(uint16 discountBps) external +``` + +Set TNT payment discount bps diff --git a/pages/developers/api/reference/ITangleBlueprints.mdx b/pages/developers/api/reference/ITangleBlueprints.mdx new file mode 100644 index 00000000..d83a4b9f --- /dev/null +++ b/pages/developers/api/reference/ITangleBlueprints.mdx @@ -0,0 +1,156 @@ +--- +title: ITangleBlueprints +description: Auto-generated Solidity API reference. +--- + +# ITangleBlueprints + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleBlueprints.sol + +### ITangleBlueprints + +Blueprint management interface + +#### Functions + +#### createBlueprint + +```solidity +function createBlueprint(struct Types.BlueprintDefinition definition) external returns (uint64 blueprintId) +``` + +Create a blueprint from an encoded definition that includes schemas and job metadata + +##### Parameters + +| Name | Type | Description | +| ---------- | -------------------------------- | ------------------------------------------- | +| definition | struct Types.BlueprintDefinition | Fully populated blueprint definition struct | + +##### Return Values + +| Name | Type | Description | +| ----------- | ------ | -------------------- | +| blueprintId | uint64 | The new blueprint ID | + +#### updateBlueprint + +```solidity +function updateBlueprint(uint64 blueprintId, string metadataUri) external +``` + +Update blueprint metadata + +#### transferBlueprint + +```solidity +function transferBlueprint(uint64 blueprintId, address newOwner) external +``` + +Transfer blueprint ownership + +#### deactivateBlueprint + +```solidity +function deactivateBlueprint(uint64 blueprintId) external +``` + +Deactivate a blueprint + +#### getBlueprint + +```solidity +function getBlueprint(uint64 blueprintId) external view returns (struct Types.Blueprint) +``` + +Get blueprint info + +#### getBlueprintConfig + +```solidity +function getBlueprintConfig(uint64 blueprintId) external view returns (struct Types.BlueprintConfig) +``` + +Get blueprint configuration + +#### blueprintOperatorCount + +```solidity +function blueprintOperatorCount(uint64 blueprintId) external view returns (uint256) +``` + +Get number of operators for a blueprint + +#### blueprintCount + +```solidity +function blueprintCount() external view returns (uint64) +``` + +Get current blueprint count + +#### getBlueprintDefinition + +```solidity +function getBlueprintDefinition(uint64 blueprintId) external view returns (struct Types.BlueprintDefinition definition) +``` + +Get the original blueprint definition + +#### blueprintMetadata + +```solidity +function blueprintMetadata(uint64 blueprintId) external view returns (struct Types.BlueprintMetadata metadata, string metadataUri) +``` + +Get blueprint metadata and URI + +#### blueprintSources + +```solidity +function blueprintSources(uint64 blueprintId) external view returns (struct Types.BlueprintSource[] sources) +``` + +Get blueprint sources + +#### blueprintSupportedMemberships + +```solidity +function blueprintSupportedMemberships(uint64 blueprintId) external view returns (enum Types.MembershipModel[] memberships) +``` + +Get blueprint supported membership models + +#### blueprintMasterRevision + +```solidity +function blueprintMasterRevision(uint64 blueprintId) external view returns (uint32) +``` + +Get master blueprint revision + +#### Events + +#### BlueprintCreated + +```solidity +event BlueprintCreated(uint64 blueprintId, address owner, address manager, string metadataUri) +``` + +#### BlueprintUpdated + +```solidity +event BlueprintUpdated(uint64 blueprintId, string metadataUri) +``` + +#### BlueprintTransferred + +```solidity +event BlueprintTransferred(uint64 blueprintId, address from, address to) +``` + +#### BlueprintDeactivated + +```solidity +event BlueprintDeactivated(uint64 blueprintId) +``` diff --git a/pages/developers/api/reference/ITangleFull.mdx b/pages/developers/api/reference/ITangleFull.mdx new file mode 100644 index 00000000..568156a4 --- /dev/null +++ b/pages/developers/api/reference/ITangleFull.mdx @@ -0,0 +1,12 @@ +--- +title: ITangleFull +description: Auto-generated Solidity API reference. +--- + +# ITangleFull + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangle.sol + +### ITangleFull + +Complete Tangle interface including admin and slashing diff --git a/pages/developers/api/reference/ITangleGovernance.mdx b/pages/developers/api/reference/ITangleGovernance.mdx new file mode 100644 index 00000000..3fd6d7ad --- /dev/null +++ b/pages/developers/api/reference/ITangleGovernance.mdx @@ -0,0 +1,271 @@ +--- +title: ITangleGovernance +description: Auto-generated Solidity API reference. +--- + +# ITangleGovernance + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleGovernance.sol + +### ITangleGovernance + +Interface for Tangle governance components + +#### Types + +#### ProposalState + +Proposal states + +```solidity +enum ProposalState { + Pending, + Active, + Canceled, + Defeated, + Succeeded, + Queued, + Expired, + Executed +} +``` + +#### Functions + +#### propose + +```solidity +function propose(address[] targets, uint256[] values, bytes[] calldatas, string description) external returns (uint256 proposalId) +``` + +Create a new proposal + +##### Parameters + +| Name | Type | Description | +| ----------- | --------- | -------------------------- | +| targets | address[] | Contract addresses to call | +| values | uint256[] | ETH values to send | +| calldatas | bytes[] | Encoded function calls | +| description | string | Human-readable description | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ------------------------------ | +| proposalId | uint256 | The unique proposal identifier | + +#### queue + +```solidity +function queue(address[] targets, uint256[] values, bytes[] calldatas, bytes32 descriptionHash) external returns (uint256 proposalId) +``` + +Queue a successful proposal for execution + +##### Parameters + +| Name | Type | Description | +| --------------- | --------- | -------------------------------- | +| targets | address[] | Contract addresses to call | +| values | uint256[] | ETH values to send | +| calldatas | bytes[] | Encoded function calls | +| descriptionHash | bytes32 | Hash of the proposal description | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ----------------------- | +| proposalId | uint256 | The proposal identifier | + +#### execute + +```solidity +function execute(address[] targets, uint256[] values, bytes[] calldatas, bytes32 descriptionHash) external payable returns (uint256 proposalId) +``` + +Execute a queued proposal + +##### Parameters + +| Name | Type | Description | +| --------------- | --------- | -------------------------------- | +| targets | address[] | Contract addresses to call | +| values | uint256[] | ETH values to send | +| calldatas | bytes[] | Encoded function calls | +| descriptionHash | bytes32 | Hash of the proposal description | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ----------------------- | +| proposalId | uint256 | The proposal identifier | + +#### cancel + +```solidity +function cancel(address[] targets, uint256[] values, bytes[] calldatas, bytes32 descriptionHash) external returns (uint256 proposalId) +``` + +Cancel a proposal + +##### Parameters + +| Name | Type | Description | +| --------------- | --------- | -------------------------------- | +| targets | address[] | Contract addresses to call | +| values | uint256[] | ETH values to send | +| calldatas | bytes[] | Encoded function calls | +| descriptionHash | bytes32 | Hash of the proposal description | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ----------------------- | +| proposalId | uint256 | The proposal identifier | + +#### castVote + +```solidity +function castVote(uint256 proposalId, uint8 support) external returns (uint256 weight) +``` + +Cast a vote on a proposal + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------- | +| proposalId | uint256 | The proposal to vote on | +| support | uint8 | 0=Against, 1=For, 2=Abstain | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ---------------------- | +| weight | uint256 | The voting weight used | + +#### castVoteWithReason + +```solidity +function castVoteWithReason(uint256 proposalId, uint8 support, string reason) external returns (uint256 weight) +``` + +Cast a vote with reason + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------- | +| proposalId | uint256 | The proposal to vote on | +| support | uint8 | 0=Against, 1=For, 2=Abstain | +| reason | string | Explanation for the vote | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ---------------------- | +| weight | uint256 | The voting weight used | + +#### castVoteBySig + +```solidity +function castVoteBySig(uint256 proposalId, uint8 support, address voter, bytes signature) external returns (uint256 weight) +``` + +Cast a vote using EIP-712 signature + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------- | +| proposalId | uint256 | The proposal to vote on | +| support | uint8 | 0=Against, 1=For, 2=Abstain | +| voter | address | The voter address | +| signature | bytes | The EIP-712 signature | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ---------------------- | +| weight | uint256 | The voting weight used | + +#### state + +```solidity +function state(uint256 proposalId) external view returns (enum ITangleGovernance.ProposalState) +``` + +Get the current state of a proposal + +#### proposalSnapshot + +```solidity +function proposalSnapshot(uint256 proposalId) external view returns (uint256) +``` + +Get the block number when voting starts + +#### proposalDeadline + +```solidity +function proposalDeadline(uint256 proposalId) external view returns (uint256) +``` + +Get the block number when voting ends + +#### proposalProposer + +```solidity +function proposalProposer(uint256 proposalId) external view returns (address) +``` + +Get the proposer of a proposal + +#### hasVoted + +```solidity +function hasVoted(uint256 proposalId, address account) external view returns (bool) +``` + +Check if an account has voted on a proposal + +#### getVotes + +```solidity +function getVotes(address account, uint256 blockNumber) external view returns (uint256) +``` + +Get voting power of an account at a specific block + +#### quorum + +```solidity +function quorum(uint256 blockNumber) external view returns (uint256) +``` + +Get the required quorum at a specific block + +#### votingDelay + +```solidity +function votingDelay() external view returns (uint256) +``` + +Get the voting delay (blocks before voting starts) + +#### votingPeriod + +```solidity +function votingPeriod() external view returns (uint256) +``` + +Get the voting period (blocks for voting) + +#### proposalThreshold + +```solidity +function proposalThreshold() external view returns (uint256) +``` + +Get the proposal threshold (tokens needed to propose) diff --git a/pages/developers/api/reference/ITangleJobs.mdx b/pages/developers/api/reference/ITangleJobs.mdx new file mode 100644 index 00000000..96151878 --- /dev/null +++ b/pages/developers/api/reference/ITangleJobs.mdx @@ -0,0 +1,91 @@ +--- +title: ITangleJobs +description: Auto-generated Solidity API reference. +--- + +# ITangleJobs + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleJobs.sol + +### ITangleJobs + +Job submission and result management interface + +#### Functions + +#### submitJob + +```solidity +function submitJob(uint64 serviceId, uint8 jobIndex, bytes inputs) external payable returns (uint64 callId) +``` + +Submit a job to a service + +#### submitResult + +```solidity +function submitResult(uint64 serviceId, uint64 callId, bytes result) external +``` + +Submit a job result (as operator) + +#### submitResults + +```solidity +function submitResults(uint64 serviceId, uint64[] callIds, bytes[] results) external +``` + +Submit multiple results in one transaction + +#### submitAggregatedResult + +```solidity +function submitAggregatedResult(uint64 serviceId, uint64 callId, bytes output, uint256 signerBitmap, uint256[2] aggregatedSignature, uint256[4] aggregatedPubkey) external +``` + +Submit an aggregated BLS result for a job + +_Only valid for jobs where requiresAggregation returns true_ + +##### Parameters + +| Name | Type | Description | +| ------------------- | ---------- | ------------------------------------------------------------------------ | +| serviceId | uint64 | The service ID | +| callId | uint64 | The job call ID | +| output | bytes | The aggregated output data | +| signerBitmap | uint256 | Bitmap indicating which operators signed (bit i = operator i in service) | +| aggregatedSignature | uint256[2] | The aggregated BLS signature [x, y] | +| aggregatedPubkey | uint256[4] | The aggregated public key [x0, x1, y0, y1] | + +#### getJobCall + +```solidity +function getJobCall(uint64 serviceId, uint64 callId) external view returns (struct Types.JobCall) +``` + +Get job call info + +#### Events + +#### JobSubmitted + +```solidity +event JobSubmitted(uint64 serviceId, uint64 callId, uint8 jobIndex, address caller, bytes inputs) +``` + +#### JobResultSubmitted + +```solidity +event JobResultSubmitted(uint64 serviceId, uint64 callId, address operator, bytes result) +``` + +#### JobCompleted + +```solidity +event JobCompleted(uint64 serviceId, uint64 callId) +``` + +Emitted when a job reaches its required result threshold + +_Derive resultCount from getJobCall(serviceId, callId).resultCount_ diff --git a/pages/developers/api/reference/ITangleOperators.mdx b/pages/developers/api/reference/ITangleOperators.mdx new file mode 100644 index 00000000..8a5b6fa1 --- /dev/null +++ b/pages/developers/api/reference/ITangleOperators.mdx @@ -0,0 +1,159 @@ +--- +title: ITangleOperators +description: Auto-generated Solidity API reference. +--- + +# ITangleOperators + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleOperators.sol + +### ITangleOperators + +Operator registration and management interface + +_Operator liveness is tracked via OperatorStatusRegistry heartbeats, +not a setOperatorOnline call. Use submitHeartbeat/isOnline/getOperatorStatus +on the registry for liveness signals._ + +#### Functions + +#### preRegister + +```solidity +function preRegister(uint64 blueprintId) external +``` + +Signal intent to register for a blueprint + +#### registerOperator + +```solidity +function registerOperator(uint64 blueprintId, bytes ecdsaPublicKey, string rpcAddress) external +``` + +Register as operator for a blueprint + +##### Parameters + +| Name | Type | Description | +| -------------- | ------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| blueprintId | uint64 | The blueprint to register for | +| ecdsaPublicKey | bytes | The ECDSA public key for gossip network identity This key is used for signing/verifying messages in the P2P gossip network and may differ from the wallet key (msg.sender) | +| rpcAddress | string | The operator's RPC endpoint URL | + +#### registerOperator + +```solidity +function registerOperator(uint64 blueprintId, bytes ecdsaPublicKey, string rpcAddress, bytes registrationInputs) external +``` + +Register as operator providing blueprint-specific registration inputs + +##### Parameters + +| Name | Type | Description | +| ------------------ | ------ | ----------------------------------------------- | +| blueprintId | uint64 | | +| ecdsaPublicKey | bytes | | +| rpcAddress | string | | +| registrationInputs | bytes | Encoded payload validated by blueprint's schema | + +#### unregisterOperator + +```solidity +function unregisterOperator(uint64 blueprintId) external +``` + +Unregister from a blueprint + +#### updateOperatorPreferences + +```solidity +function updateOperatorPreferences(uint64 blueprintId, bytes ecdsaPublicKey, string rpcAddress) external +``` + +Update operator preferences for a blueprint + +##### Parameters + +| Name | Type | Description | +| -------------- | ------ | --------------------------------------------------------- | +| blueprintId | uint64 | The blueprint to update preferences for | +| ecdsaPublicKey | bytes | New ECDSA public key (pass empty bytes to keep unchanged) | +| rpcAddress | string | New RPC endpoint (pass empty string to keep unchanged) | + +#### getOperatorRegistration + +```solidity +function getOperatorRegistration(uint64 blueprintId, address operator) external view returns (struct Types.OperatorRegistration) +``` + +Get operator registration for a blueprint + +#### getOperatorPreferences + +```solidity +function getOperatorPreferences(uint64 blueprintId, address operator) external view returns (struct Types.OperatorPreferences) +``` + +Get operator preferences for a blueprint (includes ECDSA public key) + +#### getOperatorPublicKey + +```solidity +function getOperatorPublicKey(uint64 blueprintId, address operator) external view returns (bytes) +``` + +Get operator's ECDSA public key for gossip network identity + +_Returns the key used for signing/verifying gossip messages_ + +#### isOperatorRegistered + +```solidity +function isOperatorRegistered(uint64 blueprintId, address operator) external view returns (bool) +``` + +Check if operator is registered for a blueprint + +#### Events + +#### OperatorRegistered + +```solidity +event OperatorRegistered(uint64 blueprintId, address operator, bytes ecdsaPublicKey, string rpcAddress) +``` + +Emitted when an operator registers for a blueprint + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | ------------------------------------------------ | +| blueprintId | uint64 | The blueprint ID | +| operator | address | The operator address (wallet) | +| ecdsaPublicKey | bytes | The ECDSA public key for gossip network identity | +| rpcAddress | string | The operator's RPC endpoint | + +#### OperatorUnregistered + +```solidity +event OperatorUnregistered(uint64 blueprintId, address operator) +``` + +#### OperatorPreferencesUpdated + +```solidity +event OperatorPreferencesUpdated(uint64 blueprintId, address operator, bytes ecdsaPublicKey, string rpcAddress) +``` + +Emitted when an operator updates their preferences + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | -------------------------------------------------------- | +| blueprintId | uint64 | The blueprint ID | +| operator | address | The operator address | +| ecdsaPublicKey | bytes | The updated ECDSA public key (may be empty if unchanged) | +| rpcAddress | string | The updated RPC endpoint (may be empty if unchanged) | diff --git a/pages/developers/api/reference/ITanglePaymentsInternal.mdx b/pages/developers/api/reference/ITanglePaymentsInternal.mdx new file mode 100644 index 00000000..093608b5 --- /dev/null +++ b/pages/developers/api/reference/ITanglePaymentsInternal.mdx @@ -0,0 +1,24 @@ +--- +title: ITanglePaymentsInternal +description: Auto-generated Solidity API reference. +--- + +# ITanglePaymentsInternal + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITanglePaymentsInternal.sol + +### ITanglePaymentsInternal + +#### Functions + +#### distributePayment + +```solidity +function distributePayment(uint64 serviceId, uint64 blueprintId, address token, uint256 amount, address[] operators, uint16[] exposures, uint256 totalExposure) external +``` + +#### depositToEscrow + +```solidity +function depositToEscrow(uint64 serviceId, address token, uint256 amount) external +``` diff --git a/pages/developers/api/reference/ITangleRewards.mdx b/pages/developers/api/reference/ITangleRewards.mdx new file mode 100644 index 00000000..2c7cb27a --- /dev/null +++ b/pages/developers/api/reference/ITangleRewards.mdx @@ -0,0 +1,80 @@ +--- +title: ITangleRewards +description: Auto-generated Solidity API reference. +--- + +# ITangleRewards + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleRewards.sol + +### ITangleRewards + +Reward distribution and claiming interface + +#### Functions + +#### claimRewards + +```solidity +function claimRewards() external +``` + +Claim accumulated rewards (native token) + +#### claimRewards + +```solidity +function claimRewards(address token) external +``` + +Claim accumulated rewards for a specific token + +#### claimRewardsBatch + +```solidity +function claimRewardsBatch(address[] tokens) external +``` + +Claim accumulated rewards for multiple tokens + +#### claimRewardsAll + +```solidity +function claimRewardsAll() external +``` + +Claim accumulated rewards for all pending tokens + +#### pendingRewards + +```solidity +function pendingRewards(address account) external view returns (uint256) +``` + +Get pending rewards for an account (native token) + +#### pendingRewards + +```solidity +function pendingRewards(address account, address token) external view returns (uint256) +``` + +Get pending rewards for an account and token + +#### rewardTokens + +```solidity +function rewardTokens(address account) external view returns (address[]) +``` + +List tokens with non-zero pending rewards for an account + +_Convenience view; mappings are not enumerable._ + +#### Events + +#### RewardsClaimed + +```solidity +event RewardsClaimed(address account, address token, uint256 amount) +``` diff --git a/pages/developers/api/reference/ITangleSecurityView.mdx b/pages/developers/api/reference/ITangleSecurityView.mdx new file mode 100644 index 00000000..30c2f1fb --- /dev/null +++ b/pages/developers/api/reference/ITangleSecurityView.mdx @@ -0,0 +1,44 @@ +--- +title: ITangleSecurityView +description: Auto-generated Solidity API reference. +--- + +# ITangleSecurityView + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleSecurityView.sol + +### ITangleSecurityView + +Minimal view interface for reading service security requirements + operator commitments. + +#### Functions + +#### getServiceSecurityRequirements + +```solidity +function getServiceSecurityRequirements(uint64 serviceId) external view returns (struct Types.AssetSecurityRequirement[]) +``` + +#### getServiceSecurityCommitmentBps + +```solidity +function getServiceSecurityCommitmentBps(uint64 serviceId, address operator, enum Types.AssetKind kind, address token) external view returns (uint16) +``` + +#### treasury + +```solidity +function treasury() external view returns (address payable) +``` + +#### getService + +```solidity +function getService(uint64 serviceId) external view returns (struct Types.Service) +``` + +#### getServiceOperators + +```solidity +function getServiceOperators(uint64 serviceId) external view returns (address[]) +``` diff --git a/pages/developers/api/reference/ITangleServices.mdx b/pages/developers/api/reference/ITangleServices.mdx new file mode 100644 index 00000000..7e62887d --- /dev/null +++ b/pages/developers/api/reference/ITangleServices.mdx @@ -0,0 +1,413 @@ +--- +title: ITangleServices +description: Auto-generated Solidity API reference. +--- + +# ITangleServices + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleServices.sol + +### ITangleServices + +Service lifecycle management interface + +#### Functions + +#### requestService + +```solidity +function requestService(uint64 blueprintId, address[] operators, bytes config, address[] permittedCallers, uint64 ttl, address paymentToken, uint256 paymentAmount) external payable returns (uint64 requestId) +``` + +Request a new service + +#### requestServiceWithExposure + +```solidity +function requestServiceWithExposure(uint64 blueprintId, address[] operators, uint16[] exposureBps, bytes config, address[] permittedCallers, uint64 ttl, address paymentToken, uint256 paymentAmount) external payable returns (uint64 requestId) +``` + +Request a service with explicit exposure commitments + +#### requestServiceWithSecurity + +```solidity +function requestServiceWithSecurity(uint64 blueprintId, address[] operators, struct Types.AssetSecurityRequirement[] securityRequirements, bytes config, address[] permittedCallers, uint64 ttl, address paymentToken, uint256 paymentAmount) external payable returns (uint64 requestId) +``` + +Request a service with multi-asset security requirements + +_Each operator must provide security commitments matching these requirements when approving_ + +#### approveService + +```solidity +function approveService(uint64 requestId, uint8 stakingPercent) external +``` + +Approve a service request (as operator) - simple version + +#### approveServiceWithCommitments + +```solidity +function approveServiceWithCommitments(uint64 requestId, struct Types.AssetSecurityCommitment[] commitments) external +``` + +Approve a service request with multi-asset security commitments + +_Commitments must match the security requirements specified in the request_ + +#### rejectService + +```solidity +function rejectService(uint64 requestId) external +``` + +Reject a service request (as operator) + +#### createServiceFromQuotes + +```solidity +function createServiceFromQuotes(uint64 blueprintId, struct Types.SignedQuote[] quotes, bytes config, address[] permittedCallers, uint64 ttl) external payable returns (uint64 serviceId) +``` + +Create a service instantly using pre-signed operator quotes + +_No approval flow needed - operators have pre-committed via signatures_ + +##### Parameters + +| Name | Type | Description | +| ---------------- | -------------------------- | ---------------------------------------- | +| blueprintId | uint64 | The blueprint to use | +| quotes | struct Types.SignedQuote[] | Array of signed quotes from operators | +| config | bytes | Service configuration | +| permittedCallers | address[] | Addresses allowed to call jobs | +| ttl | uint64 | Service time-to-live (must match quotes) | + +#### extendServiceFromQuotes + +```solidity +function extendServiceFromQuotes(uint64 serviceId, struct Types.SignedQuote[] quotes, uint64 extensionDuration) external payable +``` + +Extend a service using pre-signed operator quotes + +#### terminateService + +```solidity +function terminateService(uint64 serviceId) external +``` + +Terminate a service (as owner) + +#### addPermittedCaller + +```solidity +function addPermittedCaller(uint64 serviceId, address caller) external +``` + +Add a permitted caller to a service + +#### removePermittedCaller + +```solidity +function removePermittedCaller(uint64 serviceId, address caller) external +``` + +Remove a permitted caller from a service + +#### joinService + +```solidity +function joinService(uint64 serviceId, uint16 exposureBps) external +``` + +Join an active service (Dynamic membership only) + +#### joinServiceWithCommitments + +```solidity +function joinServiceWithCommitments(uint64 serviceId, uint16 exposureBps, struct Types.AssetSecurityCommitment[] commitments) external +``` + +Join an active service with per-asset security commitments (Dynamic membership only) + +#### leaveService + +```solidity +function leaveService(uint64 serviceId) external +``` + +Leave an active service (Dynamic membership only) + +#### scheduleExit + +```solidity +function scheduleExit(uint64 serviceId) external +``` + +Schedule exit from an active service when exit queues are enabled + +#### executeExit + +```solidity +function executeExit(uint64 serviceId) external +``` + +Execute a scheduled exit after the queue delay + +#### cancelExit + +```solidity +function cancelExit(uint64 serviceId) external +``` + +Cancel a scheduled exit before execution + +#### forceExit + +```solidity +function forceExit(uint64 serviceId, address operator) external +``` + +Force exit an operator from a service (if permitted by config) + +#### forceRemoveOperator + +```solidity +function forceRemoveOperator(uint64 serviceId, address operator) external +``` + +Force remove an operator from a service (blueprint manager only) + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator to remove | + +#### billSubscription + +```solidity +function billSubscription(uint64 serviceId) external +``` + +Bill a subscription service for the current period + +#### billSubscriptionBatch + +```solidity +function billSubscriptionBatch(uint64[] serviceIds) external returns (uint256 totalBilled, uint256 billedCount) +``` + +Bill multiple subscription services in one call + +#### getBillableServices + +```solidity +function getBillableServices(uint64[] serviceIds) external view returns (uint64[] billable) +``` + +Get billable services from a list of candidates + +#### fundService + +```solidity +function fundService(uint64 serviceId, uint256 amount) external payable +``` + +Fund a service escrow balance + +#### getServiceRequest + +```solidity +function getServiceRequest(uint64 requestId) external view returns (struct Types.ServiceRequest) +``` + +Get service request + +#### getServiceRequestSecurityRequirements + +```solidity +function getServiceRequestSecurityRequirements(uint64 requestId) external view returns (struct Types.AssetSecurityRequirement[]) +``` + +Get security requirements for a service request + +#### getServiceRequestSecurityCommitments + +```solidity +function getServiceRequestSecurityCommitments(uint64 requestId, address operator) external view returns (struct Types.AssetSecurityCommitment[]) +``` + +Get security commitments for a service request by operator + +#### getService + +```solidity +function getService(uint64 serviceId) external view returns (struct Types.Service) +``` + +Get service info + +#### isServiceActive + +```solidity +function isServiceActive(uint64 serviceId) external view returns (bool) +``` + +Check if service is active + +#### isServiceOperator + +```solidity +function isServiceOperator(uint64 serviceId, address operator) external view returns (bool) +``` + +Check if address is operator in service + +#### getServiceOperator + +```solidity +function getServiceOperator(uint64 serviceId, address operator) external view returns (struct Types.ServiceOperator) +``` + +Get operator info for a service + +#### getServiceOperators + +```solidity +function getServiceOperators(uint64 serviceId) external view returns (address[]) +``` + +Get the list of operators for a service + +#### getServiceSecurityRequirements + +```solidity +function getServiceSecurityRequirements(uint64 serviceId) external view returns (struct Types.AssetSecurityRequirement[]) +``` + +Get persisted security requirements for an active service + +#### getServiceEscrow + +```solidity +function getServiceEscrow(uint64 serviceId) external view returns (struct PaymentLib.ServiceEscrow) +``` + +Get service escrow details + +#### getExitRequest + +```solidity +function getExitRequest(uint64 serviceId, address operator) external view returns (struct Types.ExitRequest) +``` + +Get exit request for an operator + +#### getExitStatus + +```solidity +function getExitStatus(uint64 serviceId, address operator) external view returns (enum Types.ExitStatus) +``` + +Get exit status for an operator + +#### getExitConfig + +```solidity +function getExitConfig(uint64 serviceId) external view returns (struct Types.ExitConfig) +``` + +Get exit configuration for a service + +#### canScheduleExit + +```solidity +function canScheduleExit(uint64 serviceId, address operator) external view returns (bool canExit, string reason) +``` + +Check if operator can schedule exit now + +#### getServiceSecurityCommitments + +```solidity +function getServiceSecurityCommitments(uint64 serviceId, address operator) external view returns (struct Types.AssetSecurityCommitment[]) +``` + +Get persisted security commitments for an active service by operator + +#### isPermittedCaller + +```solidity +function isPermittedCaller(uint64 serviceId, address caller) external view returns (bool) +``` + +Check if address can call jobs on service + +#### serviceCount + +```solidity +function serviceCount() external view returns (uint64) +``` + +Get current service count + +#### Events + +#### ServiceRequested + +```solidity +event ServiceRequested(uint64 requestId, uint64 blueprintId, address requester) +``` + +#### ServiceRequestedWithSecurity + +```solidity +event ServiceRequestedWithSecurity(uint64 requestId, uint64 blueprintId, address requester) +``` + +#### ServiceApproved + +```solidity +event ServiceApproved(uint64 requestId, address operator) +``` + +#### ServiceRejected + +```solidity +event ServiceRejected(uint64 requestId, address operator) +``` + +#### ServiceActivated + +```solidity +event ServiceActivated(uint64 serviceId, uint64 requestId, uint64 blueprintId) +``` + +#### ServiceTerminated + +```solidity +event ServiceTerminated(uint64 serviceId) +``` + +#### OperatorJoinedService + +```solidity +event OperatorJoinedService(uint64 serviceId, address operator, uint16 exposureBps) +``` + +#### OperatorLeftService + +```solidity +event OperatorLeftService(uint64 serviceId, address operator) +``` + +#### SubscriptionBilled + +```solidity +event SubscriptionBilled(uint64 serviceId, uint256 amount, uint64 period) +``` diff --git a/pages/developers/api/reference/ITangleSlashing.mdx b/pages/developers/api/reference/ITangleSlashing.mdx new file mode 100644 index 00000000..2b935915 --- /dev/null +++ b/pages/developers/api/reference/ITangleSlashing.mdx @@ -0,0 +1,107 @@ +--- +title: ITangleSlashing +description: Auto-generated Solidity API reference. +--- + +# ITangleSlashing + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleSlashing.sol + +### ITangleSlashing + +Slashing interface for Tangle protocol + +#### Functions + +#### proposeSlash + +```solidity +function proposeSlash(uint64 serviceId, address operator, uint256 amount, bytes32 evidence) external returns (uint64 slashId) +``` + +Propose a slash against an operator + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ------------------------------------ | +| serviceId | uint64 | The service where violation occurred | +| operator | address | The operator to slash | +| amount | uint256 | Amount to slash | +| evidence | bytes32 | Evidence hash | + +##### Return Values + +| Name | Type | Description | +| ------- | ------ | ------------------------------------ | +| slashId | uint64 | The ID of the created slash proposal | + +#### disputeSlash + +```solidity +function disputeSlash(uint64 slashId, string reason) external +``` + +Dispute a slash proposal + +#### executeSlash + +```solidity +function executeSlash(uint64 slashId) external returns (uint256 actualSlashed) +``` + +Execute a slash proposal + +#### executeSlashBatch + +```solidity +function executeSlashBatch(uint64[] slashIds) external returns (uint256 totalSlashed, uint256 executedCount) +``` + +Execute a batch of slashes + +#### getExecutableSlashes + +```solidity +function getExecutableSlashes(uint64 fromId, uint64 toId) external view returns (uint64[] ids) +``` + +Get list of executable slash IDs in a range + +#### cancelSlash + +```solidity +function cancelSlash(uint64 slashId, string reason) external +``` + +Cancel a slash proposal + +#### setSlashConfig + +```solidity +function setSlashConfig(uint64 disputeWindow, bool instantSlashEnabled, uint16 maxSlashBps) external +``` + +Update slashing configuration + +#### getSlashProposal + +```solidity +function getSlashProposal(uint64 slashId) external view returns (struct SlashingLib.SlashProposal) +``` + +Get slash proposal details + +#### Events + +#### SlashProposed + +```solidity +event SlashProposed(uint64 serviceId, address operator, uint256 amount, bytes32 evidence) +``` + +#### SlashExecuted + +```solidity +event SlashExecuted(uint64 serviceId, address operator, uint256 amount) +``` diff --git a/pages/developers/api/reference/ITangleToken.mdx b/pages/developers/api/reference/ITangleToken.mdx new file mode 100644 index 00000000..5c92135a --- /dev/null +++ b/pages/developers/api/reference/ITangleToken.mdx @@ -0,0 +1,94 @@ +--- +title: ITangleToken +description: Auto-generated Solidity API reference. +--- + +# ITangleToken + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleGovernance.sol + +### ITangleToken + +Interface for the TNT governance token + +#### Functions + +#### getVotes + +```solidity +function getVotes(address account) external view returns (uint256) +``` + +Get the current voting power of an account + +#### getPastVotes + +```solidity +function getPastVotes(address account, uint256 blockNumber) external view returns (uint256) +``` + +Get historical voting power at a past block + +#### getPastTotalSupply + +```solidity +function getPastTotalSupply(uint256 blockNumber) external view returns (uint256) +``` + +Get the total supply at a past block + +#### delegates + +```solidity +function delegates(address account) external view returns (address) +``` + +Get the delegate of an account + +#### delegate + +```solidity +function delegate(address delegatee) external +``` + +Delegate voting power to another address + +#### delegateBySig + +```solidity +function delegateBySig(address delegatee, uint256 nonce, uint256 expiry, uint8 v, bytes32 r, bytes32 s) external +``` + +Delegate using EIP-712 signature + +#### totalSupply + +```solidity +function totalSupply() external view returns (uint256) +``` + +Standard ERC20 functions + +#### balanceOf + +```solidity +function balanceOf(address account) external view returns (uint256) +``` + +#### transfer + +```solidity +function transfer(address to, uint256 amount) external returns (bool) +``` + +#### approve + +```solidity +function approve(address spender, uint256 amount) external returns (bool) +``` + +#### transferFrom + +```solidity +function transferFrom(address from, address to, uint256 amount) external returns (bool) +``` diff --git a/pages/developers/api/reference/generated/BlueprintHookBase.mdx b/pages/developers/api/reference/generated/BlueprintHookBase.mdx new file mode 100644 index 00000000..2ae6ecbf --- /dev/null +++ b/pages/developers/api/reference/generated/BlueprintHookBase.mdx @@ -0,0 +1,142 @@ +--- +title: BlueprintHookBase +description: Auto-generated Solidity API reference. +--- + +# BlueprintHookBase + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IBlueprintHook.sol + +### BlueprintHookBase + +Base implementation with sensible defaults + +_For full features, extend BlueprintServiceManagerBase instead_ + +#### Functions + +#### onBlueprintCreated + +```solidity +function onBlueprintCreated(uint64, address) external virtual +``` + +#### onOperatorRegister + +```solidity +function onOperatorRegister(uint64, address, bytes) external virtual returns (bool) +``` + +#### onOperatorUnregister + +```solidity +function onOperatorUnregister(uint64, address) external virtual +``` + +#### onServiceRequest + +```solidity +function onServiceRequest(uint64, uint64, address, address[], bytes) external payable virtual returns (bool) +``` + +#### onServiceApprove + +```solidity +function onServiceApprove(uint64, address, uint8) external virtual +``` + +#### onServiceReject + +```solidity +function onServiceReject(uint64, address) external virtual +``` + +#### onServiceActivated + +```solidity +function onServiceActivated(uint64, uint64, address, address[]) external virtual +``` + +#### onServiceTerminated + +```solidity +function onServiceTerminated(uint64, address) external virtual +``` + +#### canJoin + +```solidity +function canJoin(uint64, address, uint16) external view virtual returns (bool) +``` + +#### canLeave + +```solidity +function canLeave(uint64, address) external view virtual returns (bool) +``` + +#### onJobSubmitted + +```solidity +function onJobSubmitted(uint64, uint64, uint8, address, bytes) external payable virtual returns (bool) +``` + +#### onJobResult + +```solidity +function onJobResult(uint64, uint64, address, bytes) external virtual returns (bool) +``` + +#### onJobCompleted + +```solidity +function onJobCompleted(uint64, uint64, uint32) external virtual +``` + +#### onSlashProposed + +```solidity +function onSlashProposed(uint64, address, uint256, bytes32) external virtual returns (bool) +``` + +#### onSlashApplied + +```solidity +function onSlashApplied(uint64, address, uint256) external virtual +``` + +#### getDeveloperPaymentAddress + +```solidity +function getDeveloperPaymentAddress(uint64) external view virtual returns (address payable) +``` + +#### isPaymentTokenAllowed + +```solidity +function isPaymentTokenAllowed(address) external view virtual returns (bool) +``` + +#### getRequiredResultCount + +```solidity +function getRequiredResultCount(uint64, uint8) external view virtual returns (uint32) +``` + +#### requiresAggregation + +```solidity +function requiresAggregation(uint64, uint8) external view virtual returns (bool) +``` + +#### getAggregationThreshold + +```solidity +function getAggregationThreshold(uint64, uint8) external view virtual returns (uint16, uint8) +``` + +#### onAggregatedResult + +```solidity +function onAggregatedResult(uint64, uint64, uint256, bytes) external virtual +``` diff --git a/pages/developers/api/reference/generated/IBlueprintHook.mdx b/pages/developers/api/reference/generated/IBlueprintHook.mdx new file mode 100644 index 00000000..82d48ae6 --- /dev/null +++ b/pages/developers/api/reference/generated/IBlueprintHook.mdx @@ -0,0 +1,227 @@ +--- +title: IBlueprintHook +description: Auto-generated Solidity API reference. +--- + +# IBlueprintHook + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IBlueprintHook.sol + +### IBlueprintHook + +Simplified hook interface for basic blueprint customization + +\_For full control, implement IBlueprintServiceManager directly. +This interface provides a simpler subset for common use cases. + +Migration path: + +- Simple blueprints: Use IBlueprintHook / BlueprintHookBase +- Full-featured blueprints: Use IBlueprintServiceManager / BlueprintServiceManagerBase\_ + +#### Functions + +#### onBlueprintCreated + +```solidity +function onBlueprintCreated(uint64 blueprintId, address owner) external +``` + +Called when blueprint is created + +#### onOperatorRegister + +```solidity +function onOperatorRegister(uint64 blueprintId, address operator, bytes data) external returns (bool accept) +``` + +Called when an operator registers + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | --------------------------- | +| accept | bool | True to accept registration | + +#### onOperatorUnregister + +```solidity +function onOperatorUnregister(uint64 blueprintId, address operator) external +``` + +Called when an operator unregisters + +#### onServiceRequest + +```solidity +function onServiceRequest(uint64 requestId, uint64 blueprintId, address requester, address[] operators, bytes config) external payable returns (bool accept) +``` + +Called when a service is requested + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | ---------------------- | +| accept | bool | True to accept request | + +#### onServiceApprove + +```solidity +function onServiceApprove(uint64 requestId, address operator, uint8 stakingPercent) external +``` + +Called when an operator approves a service request + +#### onServiceReject + +```solidity +function onServiceReject(uint64 requestId, address operator) external +``` + +Called when an operator rejects a service request + +#### onServiceActivated + +```solidity +function onServiceActivated(uint64 serviceId, uint64 requestId, address owner, address[] operators) external +``` + +Called when service becomes active + +#### onServiceTerminated + +```solidity +function onServiceTerminated(uint64 serviceId, address owner) external +``` + +Called when service is terminated + +#### canJoin + +```solidity +function canJoin(uint64 serviceId, address operator, uint16 exposureBps) external view returns (bool) +``` + +Check if operator can join a dynamic service + +#### canLeave + +```solidity +function canLeave(uint64 serviceId, address operator) external view returns (bool) +``` + +Check if operator can leave a dynamic service + +#### onJobSubmitted + +```solidity +function onJobSubmitted(uint64 serviceId, uint64 callId, uint8 jobIndex, address caller, bytes inputs) external payable returns (bool accept) +``` + +Called when a job is submitted + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | ------------------ | +| accept | bool | True to accept job | + +#### onJobResult + +```solidity +function onJobResult(uint64 serviceId, uint64 callId, address operator, bytes result) external returns (bool accept) +``` + +Called when an operator submits a result + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | --------------------- | +| accept | bool | True to accept result | + +#### onJobCompleted + +```solidity +function onJobCompleted(uint64 serviceId, uint64 callId, uint32 resultCount) external +``` + +Called when a job is marked complete + +#### onSlashProposed + +```solidity +function onSlashProposed(uint64 serviceId, address operator, uint256 amount, bytes32 evidence) external returns (bool approve) +``` + +Called before a slash is applied + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | --------------------- | +| approve | bool | True to approve slash | + +#### onSlashApplied + +```solidity +function onSlashApplied(uint64 serviceId, address operator, uint256 amount) external +``` + +Called after a slash is applied + +#### getDeveloperPaymentAddress + +```solidity +function getDeveloperPaymentAddress(uint64 serviceId) external view returns (address payable) +``` + +Get the developer payment address + +#### isPaymentTokenAllowed + +```solidity +function isPaymentTokenAllowed(address token) external view returns (bool) +``` + +Check if a payment token is allowed + +#### getRequiredResultCount + +```solidity +function getRequiredResultCount(uint64 serviceId, uint8 jobIndex) external view returns (uint32) +``` + +Get the number of results required for job completion + +#### requiresAggregation + +```solidity +function requiresAggregation(uint64 serviceId, uint8 jobIndex) external view returns (bool) +``` + +Check if a job requires BLS aggregated results + +#### getAggregationThreshold + +```solidity +function getAggregationThreshold(uint64 serviceId, uint8 jobIndex) external view returns (uint16 thresholdBps, uint8 thresholdType) +``` + +Get the aggregation threshold configuration for a job + +##### Return Values + +| Name | Type | Description | +| ------------- | ------ | --------------------------------------------------------------------- | +| thresholdBps | uint16 | Threshold in basis points (6700 = 67%) | +| thresholdType | uint8 | 0 = CountBased (% of operators), 1 = StakeWeighted (% of total stake) | + +#### onAggregatedResult + +```solidity +function onAggregatedResult(uint64 serviceId, uint64 callId, uint256 signerBitmap, bytes output) external +``` + +Called when an aggregated result is submitted diff --git a/pages/developers/api/reference/generated/IBlueprintServiceManager.mdx b/pages/developers/api/reference/generated/IBlueprintServiceManager.mdx new file mode 100644 index 00000000..0ff78a57 --- /dev/null +++ b/pages/developers/api/reference/generated/IBlueprintServiceManager.mdx @@ -0,0 +1,656 @@ +--- +title: IBlueprintServiceManager +description: Auto-generated Solidity API reference. +--- + +# IBlueprintServiceManager + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IBlueprintServiceManager.sol + +### IBlueprintServiceManager + +Full interface for blueprint-specific service managers + +\_Blueprint developers implement this to customize all aspects of their blueprint. +This is the primary integration point for blueprint developers - implement the hooks +you need and leave others as default (via BlueprintServiceManagerBase). + +The lifecycle flow: + +1. Blueprint created → onBlueprintCreated +2. Operators register → onRegister +3. Service requested → onRequest +4. Operators approve → onApprove +5. Service activated → onServiceInitialized +6. Jobs submitted → onJobCall +7. Results submitted → onJobResult +8. Service terminated → onServiceTermination\_ + +#### Functions + +#### onBlueprintCreated + +```solidity +function onBlueprintCreated(uint64 blueprintId, address owner, address tangleCore) external +``` + +Called when blueprint is created + +_Store the blueprintId and tangleCore address for future reference_ + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | --------------------------------------- | +| blueprintId | uint64 | The new blueprint ID | +| owner | address | The blueprint owner | +| tangleCore | address | The address of the Tangle core contract | + +#### onRegister + +```solidity +function onRegister(address operator, bytes registrationInputs) external payable +``` + +Called when an operator registers to this blueprint + +_Validate operator requirements here (stake, reputation, etc.)_ + +##### Parameters + +| Name | Type | Description | +| ------------------ | ------- | ------------------------------------------------------ | +| operator | address | The operator's address | +| registrationInputs | bytes | Custom registration data (blueprint-specific encoding) | + +#### onUnregister + +```solidity +function onUnregister(address operator) external +``` + +Called when an operator unregisters from this blueprint + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ---------------------- | +| operator | address | The operator's address | + +#### onUpdatePreferences + +```solidity +function onUpdatePreferences(address operator, bytes newPreferences) external payable +``` + +Called when an operator updates their preferences (RPC address, etc.) + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | ------------------------ | +| operator | address | The operator's address | +| newPreferences | bytes | Updated preferences data | + +#### getHeartbeatInterval + +```solidity +function getHeartbeatInterval(uint64 serviceId) external view returns (bool useDefault, uint64 interval) +``` + +Get the heartbeat interval for a service + +_Operators must submit heartbeats within this interval_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------ | ------------------------------------------------------- | +| useDefault | bool | True to use protocol default, false to use custom value | +| interval | uint64 | Heartbeat interval in blocks (0 = disabled) | + +#### getHeartbeatThreshold + +```solidity +function getHeartbeatThreshold(uint64 serviceId) external view returns (bool useDefault, uint8 threshold) +``` + +Get the heartbeat threshold for a service + +_Percentage of operators that must respond within interval_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ---------- | ----- | ---------------------------- | +| useDefault | bool | True to use protocol default | +| threshold | uint8 | Threshold percentage (0-100) | + +#### getSlashingWindow + +```solidity +function getSlashingWindow(uint64 serviceId) external view returns (bool useDefault, uint64 window) +``` + +Get the slashing window for a service + +_Time window for disputes before slash is finalized_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------ | ---------------------------- | +| useDefault | bool | True to use protocol default | +| window | uint64 | Slashing window in blocks | + +#### getExitConfig + +```solidity +function getExitConfig(uint64 serviceId) external view returns (bool useDefault, uint64 minCommitmentDuration, uint64 exitQueueDuration, bool forceExitAllowed) +``` + +Get the exit configuration for operator departures + +_Defines minimum commitment and exit queue timing_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| --------------------- | ------ | -------------------------------------------------------- | +| useDefault | bool | True to use protocol default | +| minCommitmentDuration | uint64 | Minimum time operator must stay after joining (seconds) | +| exitQueueDuration | uint64 | Time between scheduling exit and completing it (seconds) | +| forceExitAllowed | bool | Whether service owner can force-exit operators | + +#### onRequest + +```solidity +function onRequest(uint64 requestId, address requester, address[] operators, bytes requestInputs, uint64 ttl, address paymentAsset, uint256 paymentAmount) external payable +``` + +Called when a service is requested + +_Validate service configuration, operator selection, payment amount_ + +##### Parameters + +| Name | Type | Description | +| ------------- | --------- | --------------------------------------------------- | +| requestId | uint64 | The request ID | +| requester | address | Who is requesting the service | +| operators | address[] | Requested operators | +| requestInputs | bytes | Service configuration (blueprint-specific encoding) | +| ttl | uint64 | Time-to-live for the service | +| paymentAsset | address | Payment token address (address(0) for native) | +| paymentAmount | uint256 | Payment amount | + +#### onApprove + +```solidity +function onApprove(address operator, uint64 requestId, uint8 stakingPercent) external payable +``` + +Called when an operator approves a service request + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | ----------------------------------------------------- | +| operator | address | The approving operator | +| requestId | uint64 | The request ID | +| stakingPercent | uint8 | Percentage of stake committed to this service (0-100) | + +#### onReject + +```solidity +function onReject(address operator, uint64 requestId) external +``` + +Called when an operator rejects a service request + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------- | +| operator | address | The rejecting operator | +| requestId | uint64 | The request ID | + +#### onServiceInitialized + +```solidity +function onServiceInitialized(uint64 blueprintId, uint64 requestId, uint64 serviceId, address owner, address[] permittedCallers, uint64 ttl) external +``` + +Called when service becomes active (all operators approved) + +##### Parameters + +| Name | Type | Description | +| ---------------- | --------- | -------------------------------- | +| blueprintId | uint64 | The blueprint ID | +| requestId | uint64 | The original request ID | +| serviceId | uint64 | The new service ID | +| owner | address | The service owner | +| permittedCallers | address[] | Addresses allowed to submit jobs | +| ttl | uint64 | Service time-to-live | + +#### onServiceTermination + +```solidity +function onServiceTermination(uint64 serviceId, address owner) external +``` + +Called when service is terminated + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ----------------- | +| serviceId | uint64 | The service ID | +| owner | address | The service owner | + +#### canJoin + +```solidity +function canJoin(uint64 serviceId, address operator) external view returns (bool allowed) +``` + +Check if an operator can join a dynamic service + +_Called before operator joins - return false to reject_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator wanting to join | + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | ------------------------- | +| allowed | bool | True if operator can join | + +#### onOperatorJoined + +```solidity +function onOperatorJoined(uint64 serviceId, address operator, uint16 exposureBps) external +``` + +Called after an operator successfully joins a service + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | --------------------------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator that joined | +| exposureBps | uint16 | The operator's stake exposure in basis points | + +#### canLeave + +```solidity +function canLeave(uint64 serviceId, address operator) external view returns (bool allowed) +``` + +Check if an operator can leave a dynamic service + +_Called before operator leaves - return false to reject +Note: This is called AFTER the exit queue check. Use getExitConfig to customize timing._ + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ----------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator wanting to leave | + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | -------------------------- | +| allowed | bool | True if operator can leave | + +#### onOperatorLeft + +```solidity +function onOperatorLeft(uint64 serviceId, address operator) external +``` + +Called after an operator successfully leaves a service + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator that left | + +#### onExitScheduled + +```solidity +function onExitScheduled(uint64 serviceId, address operator, uint64 executeAfter) external +``` + +Called when an operator schedules their exit from a service + +_Allows manager to track pending exits, notify other parties, etc._ + +##### Parameters + +| Name | Type | Description | +| ------------ | ------- | ----------------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator scheduling exit | +| executeAfter | uint64 | Timestamp when exit can be executed | + +#### onExitCanceled + +```solidity +function onExitCanceled(uint64 serviceId, address operator) external +``` + +Called when an operator cancels their scheduled exit + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator canceling exit | + +#### onJobCall + +```solidity +function onJobCall(uint64 serviceId, uint8 job, uint64 jobCallId, bytes inputs) external payable +``` + +Called when a job is submitted + +_Validate job inputs, check caller permissions, etc._ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | ---------------------------------------- | +| serviceId | uint64 | The service ID | +| job | uint8 | The job index in the blueprint | +| jobCallId | uint64 | Unique ID for this job call | +| inputs | bytes | Job inputs (blueprint-specific encoding) | + +#### onJobResult + +```solidity +function onJobResult(uint64 serviceId, uint8 job, uint64 jobCallId, address operator, bytes inputs, bytes outputs) external payable +``` + +Called when an operator submits a job result + +_Validate result format, check operator eligibility, aggregate results_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | -------------------------------------------- | +| serviceId | uint64 | The service ID | +| job | uint8 | The job index | +| jobCallId | uint64 | The job call ID | +| operator | address | The operator submitting | +| inputs | bytes | Original job inputs | +| outputs | bytes | Result outputs (blueprint-specific encoding) | + +#### onUnappliedSlash + +```solidity +function onUnappliedSlash(uint64 serviceId, bytes offender, uint8 slashPercent) external +``` + +Called when a slash is queued but not yet applied + +_This is the dispute window - gather evidence, notify parties_ + +##### Parameters + +| Name | Type | Description | +| ------------ | ------ | ------------------------------------------------------------- | +| serviceId | uint64 | The service ID | +| offender | bytes | The operator being slashed (encoded as bytes for flexibility) | +| slashPercent | uint8 | Percentage of stake to slash | + +#### onSlash + +```solidity +function onSlash(uint64 serviceId, bytes offender, uint8 slashPercent) external +``` + +Called when a slash is finalized and applied + +##### Parameters + +| Name | Type | Description | +| ------------ | ------ | -------------------- | +| serviceId | uint64 | The service ID | +| offender | bytes | The slashed operator | +| slashPercent | uint8 | Percentage slashed | + +#### querySlashingOrigin + +```solidity +function querySlashingOrigin(uint64 serviceId) external view returns (address slashingOrigin) +``` + +Query the account authorized to propose slashes for a service + +_Override to allow custom slashing authorities (dispute contracts, etc.)_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| -------------- | ------- | ----------------------------------------------- | +| slashingOrigin | address | Address that can slash (default: this contract) | + +#### queryDisputeOrigin + +```solidity +function queryDisputeOrigin(uint64 serviceId) external view returns (address disputeOrigin) +``` + +Query the account authorized to dispute slashes + +_Override to allow custom dispute resolution_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ------------------------------------------------- | +| disputeOrigin | address | Address that can dispute (default: this contract) | + +#### queryDeveloperPaymentAddress + +```solidity +function queryDeveloperPaymentAddress(uint64 serviceId) external view returns (address payable developerPaymentAddress) +``` + +Get the developer payment address for a service + +_Override to route payments to different addresses per service_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | + +##### Return Values + +| Name | Type | Description | +| ----------------------- | --------------- | ---------------------------------- | +| developerPaymentAddress | address payable | Address to receive developer share | + +#### queryIsPaymentAssetAllowed + +```solidity +function queryIsPaymentAssetAllowed(uint64 serviceId, address asset) external view returns (bool isAllowed) +``` + +Check if a payment asset is allowed for this blueprint + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ------------------------------------------------- | +| serviceId | uint64 | The service ID | +| asset | address | The payment asset address (address(0) for native) | + +##### Return Values + +| Name | Type | Description | +| --------- | ---- | ----------------------------------------- | +| isAllowed | bool | True if the asset can be used for payment | + +#### getRequiredResultCount + +```solidity +function getRequiredResultCount(uint64 serviceId, uint8 jobIndex) external view returns (uint32 required) +``` + +Get the number of results required to complete a job + +_Override for consensus requirements (e.g., 2/3 majority)_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | +| jobIndex | uint8 | The job index | + +##### Return Values + +| Name | Type | Description | +| -------- | ------ | ----------------------------------------------------- | +| required | uint32 | Number of results needed (0 = service operator count) | + +#### requiresAggregation + +```solidity +function requiresAggregation(uint64 serviceId, uint8 jobIndex) external view returns (bool required) +``` + +Check if a job requires BLS aggregated results + +_When true, operators must submit individual signatures that are aggregated +off-chain, then submitted via submitAggregatedResult instead of submitResult_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | +| jobIndex | uint8 | The job index | + +##### Return Values + +| Name | Type | Description | +| -------- | ---- | ------------------------------------------------ | +| required | bool | True if BLS aggregation is required for this job | + +#### getAggregationThreshold + +```solidity +function getAggregationThreshold(uint64 serviceId, uint8 jobIndex) external view returns (uint16 thresholdBps, uint8 thresholdType) +``` + +Get the aggregation threshold configuration for a job + +_Only relevant if requiresAggregation returns true_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | -------------- | +| serviceId | uint64 | The service ID | +| jobIndex | uint8 | The job index | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------ | --------------------------------------------------------------------- | +| thresholdBps | uint16 | Threshold in basis points (6700 = 67%) | +| thresholdType | uint8 | 0 = CountBased (% of operators), 1 = StakeWeighted (% of total stake) | + +#### onAggregatedResult + +```solidity +function onAggregatedResult(uint64 serviceId, uint8 job, uint64 jobCallId, bytes output, uint256 signerBitmap, uint256[2] aggregatedSignature, uint256[4] aggregatedPubkey) external +``` + +Called when an aggregated job result is submitted + +_Validate the aggregated result, verify BLS signature, check threshold_ + +##### Parameters + +| Name | Type | Description | +| ------------------- | ---------- | ----------------------------------------------- | +| serviceId | uint64 | The service ID | +| job | uint8 | The job index | +| jobCallId | uint64 | The job call ID | +| output | bytes | The aggregated output | +| signerBitmap | uint256 | Bitmap of which operators signed | +| aggregatedSignature | uint256[2] | The aggregated BLS signature (G1 point x, y) | +| aggregatedPubkey | uint256[4] | The aggregated public key of signers (G2 point) | + +#### getMinOperatorStake + +```solidity +function getMinOperatorStake() external view returns (bool useDefault, uint256 minStake) +``` + +Get the minimum stake required for operators to register for this blueprint + +_Called during operator registration to validate stake requirements_ + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ----------------------------------------------------------- | +| useDefault | bool | True to use protocol default from staking module | +| minStake | uint256 | Custom minimum stake amount (only used if useDefault=false) | diff --git a/pages/developers/api/reference/generated/IERC7540.mdx b/pages/developers/api/reference/generated/IERC7540.mdx new file mode 100644 index 00000000..f4ba2d5f --- /dev/null +++ b/pages/developers/api/reference/generated/IERC7540.mdx @@ -0,0 +1,14 @@ +--- +title: IERC7540 +description: Auto-generated Solidity API reference. +--- + +# IERC7540 + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IERC7540.sol + +### IERC7540 + +Full ERC7540 interface combining deposit, redeem, and operator management + +_Extends ERC4626 with asynchronous request patterns_ diff --git a/pages/developers/api/reference/generated/IERC7540Deposit.mdx b/pages/developers/api/reference/generated/IERC7540Deposit.mdx new file mode 100644 index 00000000..b9cf7f1a --- /dev/null +++ b/pages/developers/api/reference/generated/IERC7540Deposit.mdx @@ -0,0 +1,90 @@ +--- +title: IERC7540Deposit +description: Auto-generated Solidity API reference. +--- + +# IERC7540Deposit + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IERC7540.sol + +### IERC7540Deposit + +Interface for asynchronous deposit requests + +_See https://eips.ethereum.org/EIPS/eip-7540_ + +#### Functions + +#### requestDeposit + +```solidity +function requestDeposit(uint256 assets, address controller, address owner) external returns (uint256 requestId) +``` + +Request an asynchronous deposit + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------------- | +| assets | uint256 | Amount of assets to deposit | +| controller | address | Address that controls the request | +| owner | address | Address that owns the assets | + +##### Return Values + +| Name | Type | Description | +| --------- | ------- | ---------------------------------- | +| requestId | uint256 | Unique identifier for this request | + +#### pendingDepositRequest + +```solidity +function pendingDepositRequest(uint256 requestId, address controller) external view returns (uint256 assets) +``` + +Get pending deposit request amount + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| requestId | uint256 | The request identifier | +| controller | address | The controller address | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ------------------------ | +| assets | uint256 | Amount of assets pending | + +#### claimableDepositRequest + +```solidity +function claimableDepositRequest(uint256 requestId, address controller) external view returns (uint256 assets) +``` + +Get claimable deposit request amount + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| requestId | uint256 | The request identifier | +| controller | address | The controller address | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | -------------------------- | +| assets | uint256 | Amount of assets claimable | + +#### Events + +#### DepositRequest + +```solidity +event DepositRequest(address controller, address owner, uint256 requestId, address sender, uint256 assets) +``` + +Emitted when a deposit request is created diff --git a/pages/developers/api/reference/generated/IERC7540Operator.mdx b/pages/developers/api/reference/generated/IERC7540Operator.mdx new file mode 100644 index 00000000..9924218c --- /dev/null +++ b/pages/developers/api/reference/generated/IERC7540Operator.mdx @@ -0,0 +1,66 @@ +--- +title: IERC7540Operator +description: Auto-generated Solidity API reference. +--- + +# IERC7540Operator + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IERC7540.sol + +### IERC7540Operator + +Interface for operator management in ERC7540 + +#### Functions + +#### isOperator + +```solidity +function isOperator(address controller, address operator) external view returns (bool status) +``` + +Check if operator is approved for controller + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| controller | address | The controller address | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ------ | ---- | ---------------- | +| status | bool | True if approved | + +#### setOperator + +```solidity +function setOperator(address operator, bool approved) external returns (bool success) +``` + +Grant or revoke operator permissions + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------------------- | +| operator | address | The operator address | +| approved | bool | True to approve, false to revoke | + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | ------------------ | +| success | bool | True if successful | + +#### Events + +#### OperatorSet + +```solidity +event OperatorSet(address controller, address operator, bool approved) +``` + +Emitted when operator approval changes diff --git a/pages/developers/api/reference/generated/IERC7540Redeem.mdx b/pages/developers/api/reference/generated/IERC7540Redeem.mdx new file mode 100644 index 00000000..b8114962 --- /dev/null +++ b/pages/developers/api/reference/generated/IERC7540Redeem.mdx @@ -0,0 +1,90 @@ +--- +title: IERC7540Redeem +description: Auto-generated Solidity API reference. +--- + +# IERC7540Redeem + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IERC7540.sol + +### IERC7540Redeem + +Interface for asynchronous redemption requests + +_See https://eips.ethereum.org/EIPS/eip-7540_ + +#### Functions + +#### requestRedeem + +```solidity +function requestRedeem(uint256 shares, address controller, address owner) external returns (uint256 requestId) +``` + +Request an asynchronous redemption + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------------- | +| shares | uint256 | Amount of shares to redeem | +| controller | address | Address that controls the request | +| owner | address | Address that owns the shares | + +##### Return Values + +| Name | Type | Description | +| --------- | ------- | ---------------------------------- | +| requestId | uint256 | Unique identifier for this request | + +#### pendingRedeemRequest + +```solidity +function pendingRedeemRequest(uint256 requestId, address controller) external view returns (uint256 shares) +``` + +Get pending redeem request amount + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| requestId | uint256 | The request identifier | +| controller | address | The controller address | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ------------------------ | +| shares | uint256 | Amount of shares pending | + +#### claimableRedeemRequest + +```solidity +function claimableRedeemRequest(uint256 requestId, address controller) external view returns (uint256 shares) +``` + +Get claimable redeem request amount + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | ---------------------- | +| requestId | uint256 | The request identifier | +| controller | address | The controller address | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | -------------------------- | +| shares | uint256 | Amount of shares claimable | + +#### Events + +#### RedeemRequest + +```solidity +event RedeemRequest(address controller, address owner, uint256 requestId, address sender, uint256 shares) +``` + +Emitted when a redeem request is created diff --git a/pages/developers/api/reference/generated/IFacetSelectors.mdx b/pages/developers/api/reference/generated/IFacetSelectors.mdx new file mode 100644 index 00000000..08992cdf --- /dev/null +++ b/pages/developers/api/reference/generated/IFacetSelectors.mdx @@ -0,0 +1,22 @@ +--- +title: IFacetSelectors +description: Auto-generated Solidity API reference. +--- + +# IFacetSelectors + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IFacetSelectors.sol + +### IFacetSelectors + +Standard interface for facet selector discovery + +#### Functions + +#### selectors + +```solidity +function selectors() external pure returns (bytes4[]) +``` + +Return the selectors this facet wants registered diff --git a/pages/developers/api/reference/generated/IMBSMRegistry.mdx b/pages/developers/api/reference/generated/IMBSMRegistry.mdx new file mode 100644 index 00000000..4d8e110f --- /dev/null +++ b/pages/developers/api/reference/generated/IMBSMRegistry.mdx @@ -0,0 +1,100 @@ +--- +title: IMBSMRegistry +description: Auto-generated Solidity API reference. +--- + +# IMBSMRegistry + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IMBSMRegistry.sol + +### IMBSMRegistry + +Minimal interface for the Master Blueprint Service Manager registry + +#### Functions + +#### getMBSM + +```solidity +function getMBSM(uint64 blueprintId) external view returns (address mbsmAddress) +``` + +Get the MBSM address currently pinned for a blueprint + +##### Parameters + +| Name | Type | Description | +| ----------- | ------ | ------------------------ | +| blueprintId | uint64 | The blueprint identifier | + +##### Return Values + +| Name | Type | Description | +| ----------- | ------- | ----------------------------------------- | +| mbsmAddress | address | The pinned MBSM (or latest if not pinned) | + +#### getPinnedRevision + +```solidity +function getPinnedRevision(uint64 blueprintId) external view returns (uint32 revision) +``` + +Get the revision pinned for a blueprint (0 = latest) + +#### getLatestMBSM + +```solidity +function getLatestMBSM() external view returns (address mbsmAddress) +``` + +Get the latest registered MBSM address + +##### Return Values + +| Name | Type | Description | +| ----------- | ------- | --------------- | +| mbsmAddress | address | The latest MBSM | + +#### getMBSMByRevision + +```solidity +function getMBSMByRevision(uint32 revision) external view returns (address mbsmAddress) +``` + +Get an MBSM by explicit revision + +##### Parameters + +| Name | Type | Description | +| -------- | ------ | --------------------------------- | +| revision | uint32 | The registry revision (1-indexed) | + +##### Return Values + +| Name | Type | Description | +| ----------- | ------- | --------------------------------------- | +| mbsmAddress | address | The registered address for the revision | + +#### getLatestRevision + +```solidity +function getLatestRevision() external view returns (uint32) +``` + +Get the latest revision number registered in the registry + +#### pinBlueprint + +```solidity +function pinBlueprint(uint64 blueprintId, uint32 revision) external +``` + +Pin a blueprint to a specific revision (0 disallowed) + +#### unpinBlueprint + +```solidity +function unpinBlueprint(uint64 blueprintId) external +``` + +Unpin a blueprint (reverting to latest) diff --git a/pages/developers/api/reference/generated/IMasterBlueprintServiceManager.mdx b/pages/developers/api/reference/generated/IMasterBlueprintServiceManager.mdx new file mode 100644 index 00000000..9da62957 --- /dev/null +++ b/pages/developers/api/reference/generated/IMasterBlueprintServiceManager.mdx @@ -0,0 +1,30 @@ +--- +title: IMasterBlueprintServiceManager +description: Auto-generated Solidity API reference. +--- + +# IMasterBlueprintServiceManager + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IMasterBlueprintServiceManager.sol + +### IMasterBlueprintServiceManager + +Interface for the protocol-wide master blueprint service manager + +#### Functions + +#### onBlueprintCreated + +```solidity +function onBlueprintCreated(uint64 blueprintId, address owner, bytes encodedDefinition) external +``` + +Called when a new blueprint is created + +##### Parameters + +| Name | Type | Description | +| ----------------- | ------- | ------------------------------------- | +| blueprintId | uint64 | The newly assigned blueprint ID | +| owner | address | The blueprint owner | +| encodedDefinition | bytes | ABI-encoded blueprint definition data | diff --git a/pages/developers/api/reference/generated/IMetricsRecorder.mdx b/pages/developers/api/reference/generated/IMetricsRecorder.mdx new file mode 100644 index 00000000..fc4ed35a --- /dev/null +++ b/pages/developers/api/reference/generated/IMetricsRecorder.mdx @@ -0,0 +1,210 @@ +--- +title: IMetricsRecorder +description: Auto-generated Solidity API reference. +--- + +# IMetricsRecorder + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IMetricsRecorder.sol + +### IMetricsRecorder + +Minimal interface for recording protocol activity metrics + +_Implemented by TangleMetrics, called by core contracts_ + +#### Functions + +#### recordStake + +```solidity +function recordStake(address delegator, address operator, address asset, uint256 amount) external +``` + +Record a stake/delegation event + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------------------------------- | +| delegator | address | The delegator address | +| operator | address | The operator receiving delegation | +| asset | address | The asset being staked (address(0) for native) | +| amount | uint256 | The amount staked | + +#### recordUnstake + +```solidity +function recordUnstake(address delegator, address operator, address asset, uint256 amount) external +``` + +Record an unstake event + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ------------------------------ | +| delegator | address | The delegator address | +| operator | address | The operator losing delegation | +| asset | address | The asset being unstaked | +| amount | uint256 | The amount unstaked | + +#### recordOperatorRegistered + +```solidity +function recordOperatorRegistered(address operator, address asset, uint256 amount) external +``` + +Record operator registration + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The operator address | +| asset | address | The asset staked | +| amount | uint256 | Initial stake amount | + +#### recordHeartbeat + +```solidity +function recordHeartbeat(address operator, uint64 serviceId, uint64 timestamp) external +``` + +Record operator heartbeat (liveness proof) + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------------- | +| operator | address | The operator address | +| serviceId | uint64 | The service ID | +| timestamp | uint64 | Block timestamp of heartbeat | + +#### recordJobCompletion + +```solidity +function recordJobCompletion(address operator, uint64 serviceId, uint64 jobCallId, bool success) external +``` + +Record job completion by operator + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ------------------------- | +| operator | address | The operator address | +| serviceId | uint64 | The service ID | +| jobCallId | uint64 | The job call ID | +| success | bool | Whether the job succeeded | + +#### recordSlash + +```solidity +function recordSlash(address operator, uint64 serviceId, uint256 amount) external +``` + +Record operator slashing (negative metric) + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | -------------------- | +| operator | address | The operator address | +| serviceId | uint64 | The service ID | +| amount | uint256 | Amount slashed | + +#### recordServiceCreated + +```solidity +function recordServiceCreated(uint64 serviceId, uint64 blueprintId, address owner, uint256 operatorCount) external +``` + +Record service creation/activation + +##### Parameters + +| Name | Type | Description | +| ------------- | ------- | ------------------- | +| serviceId | uint64 | The service ID | +| blueprintId | uint64 | The blueprint ID | +| owner | address | The service owner | +| operatorCount | uint256 | Number of operators | + +#### recordServiceTerminated + +```solidity +function recordServiceTerminated(uint64 serviceId, uint256 duration) external +``` + +Record service termination + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------------------- | +| serviceId | uint64 | The service ID | +| duration | uint256 | How long the service ran (seconds) | + +#### recordJobCall + +```solidity +function recordJobCall(uint64 serviceId, address caller, uint64 jobCallId) external +``` + +Record a job call on a service + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------- | +| serviceId | uint64 | The service ID | +| caller | address | Who initiated the job | +| jobCallId | uint64 | The job call ID | + +#### recordPayment + +```solidity +function recordPayment(address payer, uint64 serviceId, address token, uint256 amount) external +``` + +Record fee payment for a service + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ----------------------------------------- | +| payer | address | Who paid the fee | +| serviceId | uint64 | The service ID | +| token | address | The payment token (address(0) for native) | +| amount | uint256 | The amount paid | + +#### recordBlueprintCreated + +```solidity +function recordBlueprintCreated(uint64 blueprintId, address developer) external +``` + +Record blueprint creation + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | --------------------- | +| blueprintId | uint64 | The blueprint ID | +| developer | address | The developer address | + +#### recordBlueprintRegistration + +```solidity +function recordBlueprintRegistration(uint64 blueprintId, address operator) external +``` + +Record operator registration to a blueprint + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | -------------------- | +| blueprintId | uint64 | The blueprint ID | +| operator | address | The operator address | diff --git a/pages/developers/api/reference/generated/IMultiAssetDelegation.mdx b/pages/developers/api/reference/generated/IMultiAssetDelegation.mdx new file mode 100644 index 00000000..7a755261 --- /dev/null +++ b/pages/developers/api/reference/generated/IMultiAssetDelegation.mdx @@ -0,0 +1,739 @@ +--- +title: IMultiAssetDelegation +description: Auto-generated Solidity API reference. +--- + +# IMultiAssetDelegation + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IMultiAssetDelegation.sol + +### IMultiAssetDelegation + +Full interface for the multi-asset staking contract + +#### Functions + +#### registerOperator + +```solidity +function registerOperator() external payable +``` + +#### registerOperatorWithAsset + +```solidity +function registerOperatorWithAsset(address token, uint256 amount) external +``` + +#### increaseStake + +```solidity +function increaseStake() external payable +``` + +#### scheduleOperatorUnstake + +```solidity +function scheduleOperatorUnstake(uint256 amount) external +``` + +#### executeOperatorUnstake + +```solidity +function executeOperatorUnstake() external +``` + +#### addBlueprint + +```solidity +function addBlueprint(uint64 blueprintId) external +``` + +#### removeBlueprint + +```solidity +function removeBlueprint(uint64 blueprintId) external +``` + +#### startLeaving + +```solidity +function startLeaving() external +``` + +#### completeLeaving + +```solidity +function completeLeaving() external +``` + +#### deposit + +```solidity +function deposit() external payable +``` + +#### depositWithLock + +```solidity +function depositWithLock(enum Types.LockMultiplier lockMultiplier) external payable +``` + +#### depositERC20 + +```solidity +function depositERC20(address token, uint256 amount) external +``` + +#### depositERC20WithLock + +```solidity +function depositERC20WithLock(address token, uint256 amount, enum Types.LockMultiplier lockMultiplier) external +``` + +#### scheduleWithdraw + +```solidity +function scheduleWithdraw(address token, uint256 amount) external +``` + +#### executeWithdraw + +```solidity +function executeWithdraw() external +``` + +#### depositAndDelegate + +```solidity +function depositAndDelegate(address operator) external payable +``` + +#### depositAndDelegateWithOptions + +```solidity +function depositAndDelegateWithOptions(address operator, address token, uint256 amount, enum Types.BlueprintSelectionMode selectionMode, uint64[] blueprintIds) external payable +``` + +#### delegate + +```solidity +function delegate(address operator, uint256 amount) external +``` + +#### delegateWithOptions + +```solidity +function delegateWithOptions(address operator, address token, uint256 amount, enum Types.BlueprintSelectionMode selectionMode, uint64[] blueprintIds) external +``` + +#### scheduleDelegatorUnstake + +```solidity +function scheduleDelegatorUnstake(address operator, address token, uint256 amount) external +``` + +#### undelegate + +```solidity +function undelegate(address operator, uint256 amount) external +``` + +#### executeDelegatorUnstake + +```solidity +function executeDelegatorUnstake() external +``` + +#### executeDelegatorUnstakeAndWithdraw + +```solidity +function executeDelegatorUnstakeAndWithdraw(address operator, address token, uint256 shares, uint64 requestedRound, address receiver) external returns (uint256 amount) +``` + +Execute a specific matured unstake request and withdraw the resulting assets to `receiver`. + +_Convenience helper for integrations (e.g. ERC7540 liquid delegation vaults) to avoid a separate +scheduleWithdraw/executeWithdraw flow after bond-less delay has already elapsed._ + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | ----------------------------------------------------------------- | +| operator | address | Operator to unstake from | +| token | address | Token address (address(0) for native) | +| shares | uint256 | Shares to unstake (as stored in the underlying bond-less request) | +| requestedRound | uint64 | Round in which the unstake was scheduled | +| receiver | address | Recipient of the withdrawn assets | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | --------------------------------------------------------------------- | +| amount | uint256 | Actual amount returned (after exchange-rate + lazy-slash adjustments) | + +#### addBlueprintToDelegation + +```solidity +function addBlueprintToDelegation(uint256 delegationIndex, uint64 blueprintId) external +``` + +#### removeBlueprintFromDelegation + +```solidity +function removeBlueprintFromDelegation(uint256 delegationIndex, uint64 blueprintId) external +``` + +#### slashForBlueprint + +```solidity +function slashForBlueprint(address operator, uint64 blueprintId, uint64 serviceId, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +#### slashForService + +```solidity +function slashForService(address operator, uint64 blueprintId, uint64 serviceId, struct Types.AssetSecurityCommitment[] commitments, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +#### slash + +```solidity +function slash(address operator, uint64 serviceId, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +#### advanceRound + +```solidity +function advanceRound() external +``` + +#### snapshotOperator + +```solidity +function snapshotOperator(address operator) external +``` + +#### enableAsset + +```solidity +function enableAsset(address token, uint256 minOperatorStake, uint256 minDelegation, uint256 depositCap, uint16 rewardMultiplierBps) external +``` + +#### disableAsset + +```solidity +function disableAsset(address token) external +``` + +#### getAssetConfig + +```solidity +function getAssetConfig(address token) external view returns (struct Types.AssetConfig) +``` + +#### registerAdapter + +```solidity +function registerAdapter(address token, address adapter) external +``` + +#### removeAdapter + +```solidity +function removeAdapter(address token) external +``` + +#### setRequireAdapters + +```solidity +function setRequireAdapters(bool required) external +``` + +#### enableAssetWithAdapter + +```solidity +function enableAssetWithAdapter(address token, address adapter, uint256 minOperatorStake, uint256 minDelegation, uint256 depositCap, uint16 rewardMultiplierBps) external +``` + +#### isOperator + +```solidity +function isOperator(address operator) external view returns (bool) +``` + +#### isOperatorActive + +```solidity +function isOperatorActive(address operator) external view returns (bool) +``` + +#### getOperatorStake + +```solidity +function getOperatorStake(address operator) external view returns (uint256) +``` + +#### getOperatorSelfStake + +```solidity +function getOperatorSelfStake(address operator) external view returns (uint256) +``` + +#### getOperatorDelegatedStake + +```solidity +function getOperatorDelegatedStake(address operator) external view returns (uint256) +``` + +#### getDelegation + +```solidity +function getDelegation(address delegator, address operator) external view returns (uint256) +``` + +#### getTotalDelegation + +```solidity +function getTotalDelegation(address delegator) external view returns (uint256 total) +``` + +#### minOperatorStake + +```solidity +function minOperatorStake() external view returns (uint256) +``` + +#### meetsStakeRequirement + +```solidity +function meetsStakeRequirement(address operator, uint256 required) external view returns (bool) +``` + +#### isSlasher + +```solidity +function isSlasher(address account) external view returns (bool) +``` + +#### getOperatorMetadata + +```solidity +function getOperatorMetadata(address operator) external view returns (struct Types.OperatorMetadata) +``` + +#### getOperatorBlueprints + +```solidity +function getOperatorBlueprints(address operator) external view returns (uint256[]) +``` + +#### operatorCount + +```solidity +function operatorCount() external view returns (uint256) +``` + +#### operatorAt + +```solidity +function operatorAt(uint256 index) external view returns (address) +``` + +#### getDeposit + +```solidity +function getDeposit(address delegator, address token) external view returns (struct Types.Deposit) +``` + +#### getPendingWithdrawals + +```solidity +function getPendingWithdrawals(address delegator) external view returns (struct Types.WithdrawRequest[]) +``` + +#### getLocks + +```solidity +function getLocks(address delegator, address token) external view returns (struct Types.LockInfo[]) +``` + +#### getDelegations + +```solidity +function getDelegations(address delegator) external view returns (struct Types.BondInfoDelegator[]) +``` + +#### getDelegationBlueprints + +```solidity +function getDelegationBlueprints(address delegator, uint256 idx) external view returns (uint64[]) +``` + +#### getPendingUnstakes + +```solidity +function getPendingUnstakes(address delegator) external view returns (struct Types.BondLessRequest[]) +``` + +#### getOperatorRewardPool + +```solidity +function getOperatorRewardPool(address operator) external view returns (struct Types.OperatorRewardPool) +``` + +#### getOperatorDelegators + +```solidity +function getOperatorDelegators(address operator) external view returns (address[]) +``` + +#### getOperatorDelegatorCount + +```solidity +function getOperatorDelegatorCount(address operator) external view returns (uint256) +``` + +#### rewardsManager + +```solidity +function rewardsManager() external view returns (address) +``` + +#### serviceFeeDistributor + +```solidity +function serviceFeeDistributor() external view returns (address) +``` + +#### getSlashImpact + +```solidity +function getSlashImpact(address operator, uint64 slashIndex, address delegator) external view returns (uint256) +``` + +#### getSlashCount + +```solidity +function getSlashCount(address operator) external view returns (uint64) +``` + +#### getSlashRecord + +```solidity +function getSlashRecord(address operator, uint64 slashIndex) external view returns (struct SlashingManager.SlashRecord) +``` + +#### getSlashCountForService + +```solidity +function getSlashCountForService(uint64 serviceId, address operator) external view returns (uint64) +``` + +#### getSlashCountForBlueprint + +```solidity +function getSlashCountForBlueprint(uint64 blueprintId, address operator) external view returns (uint64) +``` + +#### currentRound + +```solidity +function currentRound() external view returns (uint64) +``` + +#### roundDuration + +```solidity +function roundDuration() external view returns (uint64) +``` + +#### delegationBondLessDelay + +```solidity +function delegationBondLessDelay() external view returns (uint64) +``` + +#### leaveDelegatorsDelay + +```solidity +function leaveDelegatorsDelay() external view returns (uint64) +``` + +#### leaveOperatorsDelay + +```solidity +function leaveOperatorsDelay() external view returns (uint64) +``` + +#### operatorCommissionBps + +```solidity +function operatorCommissionBps() external view returns (uint16) +``` + +#### LOCK_ONE_MONTH + +```solidity +function LOCK_ONE_MONTH() external view returns (uint64) +``` + +#### LOCK_TWO_MONTHS + +```solidity +function LOCK_TWO_MONTHS() external view returns (uint64) +``` + +#### LOCK_THREE_MONTHS + +```solidity +function LOCK_THREE_MONTHS() external view returns (uint64) +``` + +#### LOCK_SIX_MONTHS + +```solidity +function LOCK_SIX_MONTHS() external view returns (uint64) +``` + +#### MULTIPLIER_NONE + +```solidity +function MULTIPLIER_NONE() external view returns (uint16) +``` + +#### MULTIPLIER_ONE_MONTH + +```solidity +function MULTIPLIER_ONE_MONTH() external view returns (uint16) +``` + +#### MULTIPLIER_TWO_MONTHS + +```solidity +function MULTIPLIER_TWO_MONTHS() external view returns (uint16) +``` + +#### MULTIPLIER_THREE_MONTHS + +```solidity +function MULTIPLIER_THREE_MONTHS() external view returns (uint16) +``` + +#### MULTIPLIER_SIX_MONTHS + +```solidity +function MULTIPLIER_SIX_MONTHS() external view returns (uint16) +``` + +#### addSlasher + +```solidity +function addSlasher(address slasher) external +``` + +#### removeSlasher + +```solidity +function removeSlasher(address slasher) external +``` + +#### setOperatorCommission + +```solidity +function setOperatorCommission(uint16 bps) external +``` + +#### setDelays + +```solidity +function setDelays(uint64 delegationBondLessDelay, uint64 leaveDelegatorsDelay, uint64 leaveOperatorsDelay) external +``` + +#### setRewardsManager + +```solidity +function setRewardsManager(address manager) external +``` + +#### setServiceFeeDistributor + +```solidity +function setServiceFeeDistributor(address distributor) external +``` + +#### pause + +```solidity +function pause() external +``` + +#### unpause + +```solidity +function unpause() external +``` + +#### rescueTokens + +```solidity +function rescueTokens(address token, address to, uint256 amount) external +``` + +#### Events + +#### AssetEnabled + +```solidity +event AssetEnabled(address token, uint256 minOperatorStake, uint256 minDelegation) +``` + +#### AssetDisabled + +```solidity +event AssetDisabled(address token) +``` + +#### RoundAdvanced + +```solidity +event RoundAdvanced(uint64 round) +``` + +#### OperatorRegistered + +```solidity +event OperatorRegistered(address operator, uint256 stake) +``` + +#### OperatorStakeIncreased + +```solidity +event OperatorStakeIncreased(address operator, uint256 amount) +``` + +#### OperatorUnstakeScheduled + +```solidity +event OperatorUnstakeScheduled(address operator, uint256 amount, uint64 readyRound) +``` + +#### OperatorUnstakeExecuted + +```solidity +event OperatorUnstakeExecuted(address operator, uint256 amount) +``` + +#### OperatorLeavingScheduled + +```solidity +event OperatorLeavingScheduled(address operator, uint64 readyRound) +``` + +#### OperatorLeft + +```solidity +event OperatorLeft(address operator) +``` + +#### OperatorBlueprintAdded + +```solidity +event OperatorBlueprintAdded(address operator, uint64 blueprintId) +``` + +#### OperatorBlueprintRemoved + +```solidity +event OperatorBlueprintRemoved(address operator, uint64 blueprintId) +``` + +#### Deposited + +```solidity +event Deposited(address delegator, address token, uint256 amount, enum Types.LockMultiplier lock) +``` + +#### WithdrawScheduled + +```solidity +event WithdrawScheduled(address delegator, address token, uint256 amount, uint64 readyRound) +``` + +#### Withdrawn + +```solidity +event Withdrawn(address delegator, address token, uint256 amount) +``` + +#### ExpiredLocksHarvested + +```solidity +event ExpiredLocksHarvested(address delegator, address token, uint256 count, uint256 totalAmount) +``` + +#### Delegated + +```solidity +event Delegated(address delegator, address operator, address token, uint256 amount, uint256 shares, enum Types.BlueprintSelectionMode selectionMode) +``` + +#### DelegatorUnstakeScheduled + +```solidity +event DelegatorUnstakeScheduled(address delegator, address operator, address token, uint256 shares, uint256 estimatedAmount, uint64 readyRound) +``` + +#### DelegatorUnstakeExecuted + +```solidity +event DelegatorUnstakeExecuted(address delegator, address operator, address token, uint256 shares, uint256 amount) +``` + +#### BlueprintAddedToDelegation + +```solidity +event BlueprintAddedToDelegation(address delegator, uint256 delegationIndex, uint64 blueprintId) +``` + +#### BlueprintRemovedFromDelegation + +```solidity +event BlueprintRemovedFromDelegation(address delegator, uint256 delegationIndex, uint64 blueprintId) +``` + +#### Slashed + +```solidity +event Slashed(address operator, uint64 serviceId, uint256 operatorSlashed, uint256 delegatorsSlashed, uint256 newExchangeRate) +``` + +#### SlashedForService + +```solidity +event SlashedForService(address operator, uint64 serviceId, uint64 blueprintId, uint256 totalSlashed, uint256 commitmentCount) +``` + +#### SlashRecorded + +```solidity +event SlashRecorded(address operator, uint64 slashId, uint256 totalSlashed, uint256 exchangeRateBefore, uint256 exchangeRateAfter) +``` + +#### AdapterRegistered + +```solidity +event AdapterRegistered(address token, address adapter) +``` + +#### AdapterRemoved + +```solidity +event AdapterRemoved(address token) +``` + +#### RequireAdaptersUpdated + +```solidity +event RequireAdaptersUpdated(bool required) +``` diff --git a/pages/developers/api/reference/generated/IPaymentAdapterRegistry.mdx b/pages/developers/api/reference/generated/IPaymentAdapterRegistry.mdx new file mode 100644 index 00000000..681b6952 --- /dev/null +++ b/pages/developers/api/reference/generated/IPaymentAdapterRegistry.mdx @@ -0,0 +1,125 @@ +--- +title: IPaymentAdapterRegistry +description: Auto-generated Solidity API reference. +--- + +# IPaymentAdapterRegistry + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentAdapter.sol + +### IPaymentAdapterRegistry + +Registry for managing multiple payment adapters + +#### Functions + +#### registerAdapter + +```solidity +function registerAdapter(string name, address adapter) external +``` + +Register a new payment adapter + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | --------------- | +| name | string | Adapter name | +| adapter | address | Adapter address | + +#### removeAdapter + +```solidity +function removeAdapter(string name) external +``` + +Remove a payment adapter + +##### Parameters + +| Name | Type | Description | +| ---- | ------ | ---------------------- | +| name | string | Adapter name to remove | + +#### getAdapter + +```solidity +function getAdapter(string name) external view returns (address adapter) +``` + +Get an adapter by name + +##### Parameters + +| Name | Type | Description | +| ---- | ------ | ------------ | +| name | string | Adapter name | + +##### Return Values + +| Name | Type | Description | +| ------- | ------- | --------------- | +| adapter | address | Adapter address | + +#### getDefaultAdapter + +```solidity +function getDefaultAdapter() external view returns (address adapter) +``` + +Get the default adapter + +##### Return Values + +| Name | Type | Description | +| ------- | ------- | ----------------------- | +| adapter | address | Default adapter address | + +#### setDefaultAdapter + +```solidity +function setDefaultAdapter(string name) external +``` + +Set the default adapter + +##### Parameters + +| Name | Type | Description | +| ---- | ------ | --------------------------------- | +| name | string | Name of adapter to set as default | + +#### isRegistered + +```solidity +function isRegistered(string name) external view returns (bool registered) +``` + +Check if an adapter is registered + +##### Parameters + +| Name | Type | Description | +| ---- | ------ | ------------ | +| name | string | Adapter name | + +##### Return Values + +| Name | Type | Description | +| ---------- | ---- | ---------------------- | +| registered | bool | True if adapter exists | + +#### getRegisteredAdapters + +```solidity +function getRegisteredAdapters() external view returns (string[] names) +``` + +Get all registered adapter names + +##### Return Values + +| Name | Type | Description | +| ----- | -------- | ---------------------- | +| names | string[] | Array of adapter names | diff --git a/pages/developers/api/reference/generated/IRestaking.mdx b/pages/developers/api/reference/generated/IRestaking.mdx new file mode 100644 index 00000000..757eaf35 --- /dev/null +++ b/pages/developers/api/reference/generated/IRestaking.mdx @@ -0,0 +1,307 @@ +--- +title: IStaking +description: Auto-generated Solidity API reference. +--- + +# IStaking + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStaking.sol + +### IStaking + +Abstract interface for staking/shared security protocols + +\_Implement this to integrate with native staking, Symbiotic, or other staking systems. + +Design principles: + +- Minimal interface - only what Tangle core needs +- Read-heavy - most operations are queries +- Write-light - only slash() modifies state +- No assumptions about underlying implementation\_ + +#### Functions + +#### isOperator + +```solidity +function isOperator(address operator) external view returns (bool) +``` + +Check if an address is a registered operator + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The address to check | + +##### Return Values + +| Name | Type | Description | +| ---- | ---- | ------------------------------ | +| [0] | bool | True if registered as operator | + +#### isOperatorActive + +```solidity +function isOperatorActive(address operator) external view returns (bool) +``` + +Check if an operator is currently active (not leaving, not slashed out) + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The address to check | + +##### Return Values + +| Name | Type | Description | +| ---- | ---- | -------------- | +| [0] | bool | True if active | + +#### getOperatorStake + +```solidity +function getOperatorStake(address operator) external view returns (uint256) +``` + +Get an operator's total stake (self-stake + delegations) + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ---------------------------------- | +| [0] | uint256 | Total stake amount in native units | + +#### getOperatorSelfStake + +```solidity +function getOperatorSelfStake(address operator) external view returns (uint256) +``` + +Get an operator's self-stake only + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ----------------- | +| [0] | uint256 | Self-stake amount | + +#### getOperatorDelegatedStake + +```solidity +function getOperatorDelegatedStake(address operator) external view returns (uint256) +``` + +Get total amount delegated to an operator + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ---------------------- | +| [0] | uint256 | Total delegated amount | + +#### getDelegation + +```solidity +function getDelegation(address delegator, address operator) external view returns (uint256) +``` + +Get a delegator's delegation to a specific operator + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------- | +| delegator | address | The delegator address | +| operator | address | The operator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ----------------- | +| [0] | uint256 | Delegation amount | + +#### getTotalDelegation + +```solidity +function getTotalDelegation(address delegator) external view returns (uint256) +``` + +Get a delegator's total delegations across all operators + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------- | +| delegator | address | The delegator address | + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | ---------------------- | +| [0] | uint256 | Total delegated amount | + +#### minOperatorStake + +```solidity +function minOperatorStake() external view returns (uint256) +``` + +Get minimum stake required to be an operator + +##### Return Values + +| Name | Type | Description | +| ---- | ------- | -------------------- | +| [0] | uint256 | Minimum stake amount | + +#### meetsStakeRequirement + +```solidity +function meetsStakeRequirement(address operator, uint256 required) external view returns (bool) +``` + +Check if operator meets a specific stake requirement + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------------------- | +| operator | address | The operator address | +| required | uint256 | The required stake amount | + +##### Return Values + +| Name | Type | Description | +| ---- | ---- | ------------------------------------- | +| [0] | bool | True if operator has sufficient stake | + +#### slashForBlueprint + +```solidity +function slashForBlueprint(address operator, uint64 blueprintId, uint64 serviceId, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +Slash an operator's stake for a specific blueprint + +_Only affects delegators exposed to this blueprint (All mode + Fixed mode who selected it)_ + +##### Parameters + +| Name | Type | Description | +| ----------- | ------- | --------------------------------------- | +| operator | address | The operator to slash | +| blueprintId | uint64 | The blueprint where violation occurred | +| serviceId | uint64 | The service where violation occurred | +| amount | uint256 | Amount to slash | +| evidence | bytes32 | Evidence hash (IPFS or other reference) | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ------------------------------------------------------------- | +| actualSlashed | uint256 | The actual amount slashed (may be less if insufficient stake) | + +#### slashForService + +```solidity +function slashForService(address operator, uint64 blueprintId, uint64 serviceId, struct Types.AssetSecurityCommitment[] commitments, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +Slash an operator for a specific service, only slashing committed assets + +_Only slashes assets the operator committed to this service, proportionally_ + +##### Parameters + +| Name | Type | Description | +| ----------- | -------------------------------------- | ---------------------------------------------------------- | +| operator | address | The operator to slash | +| blueprintId | uint64 | The blueprint where violation occurred | +| serviceId | uint64 | The service where violation occurred | +| commitments | struct Types.AssetSecurityCommitment[] | The operator's asset security commitments for this service | +| amount | uint256 | Amount to slash | +| evidence | bytes32 | Evidence hash (IPFS or other reference) | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ----------------------------------------------------------------------- | +| actualSlashed | uint256 | The actual amount slashed (may be less if insufficient committed stake) | + +#### slash + +```solidity +function slash(address operator, uint64 serviceId, uint256 amount, bytes32 evidence) external returns (uint256 actualSlashed) +``` + +Slash an operator's stake (legacy - slashes all delegators) + +_Only callable by authorized slashers (e.g., Tangle core contract)_ + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | --------------------------------------- | +| operator | address | The operator to slash | +| serviceId | uint64 | The service where violation occurred | +| amount | uint256 | Amount to slash | +| evidence | bytes32 | Evidence hash (IPFS or other reference) | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ------------------------------------------------------------- | +| actualSlashed | uint256 | The actual amount slashed (may be less if insufficient stake) | + +#### isSlasher + +```solidity +function isSlasher(address account) external view returns (bool) +``` + +Check if an address is authorized to call slash() + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | -------------------- | +| account | address | The address to check | + +##### Return Values + +| Name | Type | Description | +| ---- | ---- | ------------------ | +| [0] | bool | True if authorized | + +#### Events + +#### OperatorSlashed + +```solidity +event OperatorSlashed(address operator, uint64 serviceId, uint256 amount, bytes32 evidence) +``` + +Emitted when an operator is slashed diff --git a/pages/developers/api/reference/generated/IRestakingAdmin.mdx b/pages/developers/api/reference/generated/IRestakingAdmin.mdx new file mode 100644 index 00000000..6916f50c --- /dev/null +++ b/pages/developers/api/reference/generated/IRestakingAdmin.mdx @@ -0,0 +1,58 @@ +--- +title: IStakingAdmin +description: Auto-generated Solidity API reference. +--- + +# IStakingAdmin + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStaking.sol + +### IStakingAdmin + +Admin functions for staking implementations + +_Separated to keep main interface clean_ + +#### Functions + +#### addSlasher + +```solidity +function addSlasher(address slasher) external +``` + +Add an authorized slasher + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | -------------------- | +| slasher | address | Address to authorize | + +#### removeSlasher + +```solidity +function removeSlasher(address slasher) external +``` + +Remove an authorized slasher + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | ----------------- | +| slasher | address | Address to remove | + +#### setMinOperatorStake + +```solidity +function setMinOperatorStake(uint256 amount) external +``` + +Update minimum operator stake + +##### Parameters + +| Name | Type | Description | +| ------ | ------- | ----------- | +| amount | uint256 | New minimum | diff --git a/pages/developers/api/reference/generated/IRewardsManager.mdx b/pages/developers/api/reference/generated/IRewardsManager.mdx new file mode 100644 index 00000000..a60b8134 --- /dev/null +++ b/pages/developers/api/reference/generated/IRewardsManager.mdx @@ -0,0 +1,85 @@ +--- +title: IRewardsManager +description: Auto-generated Solidity API reference. +--- + +# IRewardsManager + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IRewardsManager.sol + +### IRewardsManager + +Interface for reward vault management - called by MultiAssetDelegation + +#### Functions + +#### recordDelegate + +```solidity +function recordDelegate(address delegator, address operator, address asset, uint256 amount, uint16 lockMultiplierBps) external +``` + +Records a delegation for reward tracking + +##### Parameters + +| Name | Type | Description | +| ----------------- | ------- | --------------------------------------------------------- | +| delegator | address | The account making the delegation | +| operator | address | The operator being delegated to | +| asset | address | The asset being delegated (address(0) for native) | +| amount | uint256 | The amount being delegated | +| lockMultiplierBps | uint16 | Lock multiplier in basis points (10000 = 1x, 0 = no lock) | + +#### recordUndelegate + +```solidity +function recordUndelegate(address delegator, address operator, address asset, uint256 amount) external +``` + +Records an undelegation + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ----------------------------------- | +| delegator | address | The account making the undelegation | +| operator | address | The operator being undelegated from | +| asset | address | The asset being undelegated | +| amount | uint256 | The amount being undelegated | + +#### recordServiceReward + +```solidity +function recordServiceReward(address operator, address asset, uint256 amount) external +``` + +Records a service reward for an operator + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | --------------------------------- | +| operator | address | The operator receiving the reward | +| asset | address | The reward asset | +| amount | uint256 | The reward amount | + +#### getAssetDepositCapRemaining + +```solidity +function getAssetDepositCapRemaining(address asset) external view returns (uint256 remaining) +``` + +Get remaining deposit capacity for an asset vault + +##### Parameters + +| Name | Type | Description | +| ----- | ------- | ------------------ | +| asset | address | The asset to query | + +##### Return Values + +| Name | Type | Description | +| --------- | ------- | ------------------------------ | +| remaining | uint256 | The remaining deposit capacity | diff --git a/pages/developers/api/reference/generated/ISablierAdapter.mdx b/pages/developers/api/reference/generated/ISablierAdapter.mdx new file mode 100644 index 00000000..8df737c5 --- /dev/null +++ b/pages/developers/api/reference/generated/ISablierAdapter.mdx @@ -0,0 +1,162 @@ +--- +title: ISablierAdapter +description: Auto-generated Solidity API reference. +--- + +# ISablierAdapter + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentAdapter.sol + +### ISablierAdapter + +Extended interface for Sablier-specific features + +#### Types + +#### StreamType + +Stream type for Sablier + +```solidity +enum StreamType { + Linear, + Dynamic, + Tranched +} +``` + +#### Segment + +Segment for dynamic streams + +```solidity +struct Segment { + uint128 amount; + uint64 exponent; + uint40 timestamp; +} +``` + +#### Functions + +#### createLinearStream + +```solidity +function createLinearStream(uint64 serviceId, address token, uint128 totalAmount, uint40 durationSeconds, uint40 cliffSeconds) external returns (uint256 streamId) +``` + +Create a linear stream (constant rate) + +##### Parameters + +| Name | Type | Description | +| --------------- | ------- | ---------------------- | +| serviceId | uint64 | The Tangle service ID | +| token | address | The ERC-20 token | +| totalAmount | uint128 | Total amount to stream | +| durationSeconds | uint40 | Total duration | +| cliffSeconds | uint40 | Cliff period | + +##### Return Values + +| Name | Type | Description | +| -------- | ------- | --------------------- | +| streamId | uint256 | The created stream ID | + +#### createDynamicStream + +```solidity +function createDynamicStream(uint64 serviceId, address token, uint128 totalAmount, struct ISablierAdapter.Segment[] segments) external returns (uint256 streamId) +``` + +Create a dynamic stream with custom curve + +##### Parameters + +| Name | Type | Description | +| ----------- | -------------------------------- | ------------------------------------ | +| serviceId | uint64 | The Tangle service ID | +| token | address | The ERC-20 token | +| totalAmount | uint128 | Total amount to stream | +| segments | struct ISablierAdapter.Segment[] | Array of segments defining the curve | + +##### Return Values + +| Name | Type | Description | +| -------- | ------- | --------------------- | +| streamId | uint256 | The created stream ID | + +#### isCancelable + +```solidity +function isCancelable(uint256 streamId) external view returns (bool cancelable) +``` + +Check if a stream is cancelable + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ---------- | ---- | ------------------------------- | +| cancelable | bool | True if stream can be cancelled | + +#### wasCancelled + +```solidity +function wasCancelled(uint256 streamId) external view returns (bool cancelled) +``` + +Check if a stream was cancelled + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| --------- | ---- | ---------------------------- | +| cancelled | bool | True if stream was cancelled | + +#### getStreamNFT + +```solidity +function getStreamNFT(uint256 streamId) external view returns (uint256 tokenId) +``` + +Get the NFT token ID for a stream (Sablier streams are NFTs) + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ------- | ------- | -------------------- | +| tokenId | uint256 | The ERC-721 token ID | + +#### transferStream + +```solidity +function transferStream(uint256 streamId, address newRecipient) external +``` + +Transfer stream ownership (NFT transfer) + +##### Parameters + +| Name | Type | Description | +| ------------ | ------- | --------------------- | +| streamId | uint256 | The stream ID | +| newRecipient | address | New recipient address | diff --git a/pages/developers/api/reference/generated/IServiceFeeDistributor.mdx b/pages/developers/api/reference/generated/IServiceFeeDistributor.mdx new file mode 100644 index 00000000..427d0973 --- /dev/null +++ b/pages/developers/api/reference/generated/IServiceFeeDistributor.mdx @@ -0,0 +1,157 @@ +--- +title: IServiceFeeDistributor +description: Auto-generated Solidity API reference. +--- + +# IServiceFeeDistributor + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IServiceFeeDistributor.sol + +### IServiceFeeDistributor + +Tracks service-fee payouts to stakers across payment tokens + +_Receives delegation-change hooks from MultiAssetDelegation and fee-distribution calls from Tangle._ + +#### Functions + +#### distributeServiceFee + +```solidity +function distributeServiceFee(uint64 serviceId, uint64 blueprintId, address operator, address paymentToken, uint256 amount) external payable +``` + +#### distributeInflationReward + +```solidity +function distributeInflationReward(uint64 serviceId, uint64 blueprintId, address operator, address paymentToken, uint256 amount) external payable +``` + +Distribute inflation-funded staker rewards using service exposure weights + +_Intended for InflationPool; rewards are paid in the provided token (TNT)._ + +#### claimFor + +```solidity +function claimFor(address token, address operator, struct Types.Asset asset) external returns (uint256 amount) +``` + +Claim rewards for a specific delegator position and token + +#### claimAll + +```solidity +function claimAll(address token) external returns (uint256 totalAmount) +``` + +Claim all pending rewards across all positions for a token + +#### claimAllBatch + +```solidity +function claimAllBatch(address[] tokens) external returns (uint256[] amounts) +``` + +Claim all pending rewards for multiple tokens + +#### pendingRewards + +```solidity +function pendingRewards(address delegator, address token) external view returns (uint256 pending) +``` + +Preview pending rewards for a delegator across all positions for a token + +#### delegatorOperators + +```solidity +function delegatorOperators(address delegator) external view returns (address[] operators) +``` + +Return all operators a delegator has positions with + +#### delegatorAssets + +```solidity +function delegatorAssets(address delegator, address operator) external view returns (bytes32[] assetHashes) +``` + +Return all asset hashes a delegator has positions for with an operator + +#### getPosition + +```solidity +function getPosition(address delegator, address operator, bytes32 assetHash) external view returns (uint8 mode, uint256 principal, uint256 score) +``` + +Return a delegator's position details + +#### operatorRewardTokens + +```solidity +function operatorRewardTokens(address operator) external view returns (address[] tokens) +``` + +Return reward tokens ever distributed for an operator + +#### onDelegationChanged + +```solidity +function onDelegationChanged(address delegator, address operator, struct Types.Asset asset, uint256 amount, bool isIncrease, enum Types.BlueprintSelectionMode selectionMode, uint64[] blueprintIds, uint16 lockMultiplierBps) external +``` + +#### onBlueprintAdded + +```solidity +function onBlueprintAdded(address delegator, address operator, struct Types.Asset asset, uint64 blueprintId) external +``` + +#### onBlueprintRemoved + +```solidity +function onBlueprintRemoved(address delegator, address operator, struct Types.Asset asset, uint64 blueprintId) external +``` + +#### getPoolScore + +```solidity +function getPoolScore(address operator, uint64 blueprintId, struct Types.Asset asset) external view returns (uint256 allScore, uint256 fixedScore) +``` + +#### getOperatorServiceUsdExposure + +```solidity +function getOperatorServiceUsdExposure(uint64 serviceId, uint64 blueprintId, address operator) external view returns (uint256 totalUsdExposure) +``` + +Get USD-weighted exposure for an operator/service + +_Returns total USD exposure across All+Fixed pools for the service._ + +#### onOperatorLeaving + +```solidity +function onOperatorLeaving(uint64 serviceId, address operator) external +``` + +Called when an operator is about to leave a service + +_Drips all active streams for the operator BEFORE they're removed_ + +#### onServiceTerminated + +```solidity +function onServiceTerminated(uint64 serviceId, address refundRecipient) external +``` + +Called when a service is terminated early + +_Cancels streaming payments and refunds remaining amounts to the service owner_ + +##### Parameters + +| Name | Type | Description | +| --------------- | ------- | ------------------------------------------------------------- | +| serviceId | uint64 | The terminated service ID | +| refundRecipient | address | Where to send the remaining payment (typically service owner) | diff --git a/pages/developers/api/reference/generated/IStreamingPaymentAdapter.mdx b/pages/developers/api/reference/generated/IStreamingPaymentAdapter.mdx new file mode 100644 index 00000000..37a09f10 --- /dev/null +++ b/pages/developers/api/reference/generated/IStreamingPaymentAdapter.mdx @@ -0,0 +1,316 @@ +--- +title: IStreamingPaymentAdapter +description: Auto-generated Solidity API reference. +--- + +# IStreamingPaymentAdapter + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentAdapter.sol + +### IStreamingPaymentAdapter + +Common interface for streaming payment adapters (Superfluid, Sablier, etc.) + +_Adapters implement this interface to provide streaming payment capabilities +to Tangle services without tight coupling to specific protocols._ + +#### Functions + +#### createStream + +```solidity +function createStream(uint64 serviceId, address token, uint256 totalAmount, uint64 durationSeconds, uint64 cliffSeconds) external payable returns (uint256 streamId) +``` + +Create a streaming payment for a service + +##### Parameters + +| Name | Type | Description | +| --------------- | ------- | -------------------------------------------------- | +| serviceId | uint64 | The Tangle service ID | +| token | address | The ERC-20 token to stream (address(0) for native) | +| totalAmount | uint256 | Total amount to stream | +| durationSeconds | uint64 | Stream duration in seconds | +| cliffSeconds | uint64 | Optional cliff period (0 for no cliff) | + +##### Return Values + +| Name | Type | Description | +| -------- | ------- | --------------------- | +| streamId | uint256 | The created stream ID | + +#### updateStreamRate + +```solidity +function updateStreamRate(uint256 streamId, uint256 newRatePerSecond) external +``` + +Update the rate of an existing stream + +##### Parameters + +| Name | Type | Description | +| ---------------- | ------- | ----------------------- | +| streamId | uint256 | The stream ID to update | +| newRatePerSecond | uint256 | New streaming rate | + +#### cancelStream + +```solidity +function cancelStream(uint256 streamId) external returns (uint256 refundedAmount) +``` + +Cancel a stream and refund remaining balance + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ----------------------- | +| streamId | uint256 | The stream ID to cancel | + +##### Return Values + +| Name | Type | Description | +| -------------- | ------- | ---------------------------- | +| refundedAmount | uint256 | Amount refunded to the payer | + +#### withdrawFromStream + +```solidity +function withdrawFromStream(uint256 streamId) external returns (uint256 withdrawnAmount) +``` + +Withdraw available funds from a stream + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| --------------- | ------- | ---------------- | +| withdrawnAmount | uint256 | Amount withdrawn | + +#### settleAndDistribute + +```solidity +function settleAndDistribute(uint256 streamId) external +``` + +Settle a stream's accumulated funds and distribute to operators + +_This triggers distribution through Tangle's payment system_ + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ----------------------- | +| streamId | uint256 | The stream ID to settle | + +#### getWithdrawableAmount + +```solidity +function getWithdrawableAmount(uint256 streamId) external view returns (uint256 amount) +``` + +Get the current withdrawable amount for a stream + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ---------------------------- | +| amount | uint256 | Amount available to withdraw | + +#### getStreamRate + +```solidity +function getStreamRate(uint256 streamId) external view returns (uint256 ratePerSecond) +``` + +Get the current streaming rate + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | -------------------------------- | +| ratePerSecond | uint256 | Tokens per second being streamed | + +#### getStreamInfo + +```solidity +function getStreamInfo(uint256 streamId) external view returns (uint64 serviceId, address payer, address token, uint256 totalAmount, uint256 withdrawnAmount, uint256 startTime, uint256 endTime, uint256 cliffTime, bool active) +``` + +Get full stream information + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| --------------- | ------- | ------------------------------- | +| serviceId | uint64 | Associated Tangle service | +| payer | address | Address funding the stream | +| token | address | Token being streamed | +| totalAmount | uint256 | Total stream amount | +| withdrawnAmount | uint256 | Amount already withdrawn | +| startTime | uint256 | Stream start timestamp | +| endTime | uint256 | Stream end timestamp | +| cliffTime | uint256 | Cliff timestamp (0 if no cliff) | +| active | bool | Whether stream is active | + +#### getStreamServiceId + +```solidity +function getStreamServiceId(uint256 streamId) external view returns (uint64 serviceId) +``` + +Get the service ID associated with a stream + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| --------- | ------ | --------------------- | +| serviceId | uint64 | The Tangle service ID | + +#### getServiceStreams + +```solidity +function getServiceStreams(uint64 serviceId) external view returns (uint256[] streamIds) +``` + +Get all active streams for a service + +##### Parameters + +| Name | Type | Description | +| --------- | ------ | --------------------- | +| serviceId | uint64 | The Tangle service ID | + +##### Return Values + +| Name | Type | Description | +| --------- | --------- | -------------------------- | +| streamIds | uint256[] | Array of active stream IDs | + +#### getAccruedAmount + +```solidity +function getAccruedAmount(uint256 streamId) external view returns (uint256 accruedAmount) +``` + +Calculate real-time accrued amount (not yet settled) + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ------------- | +| streamId | uint256 | The stream ID | + +##### Return Values + +| Name | Type | Description | +| ------------- | ------- | ------------------------------------ | +| accruedAmount | uint256 | Amount accrued since last settlement | + +#### protocolName + +```solidity +function protocolName() external view returns (string name) +``` + +Get the name of the underlying protocol + +##### Return Values + +| Name | Type | Description | +| ---- | ------ | --------------------------------------------- | +| name | string | Protocol name (e.g., "Superfluid", "Sablier") | + +#### isTokenSupported + +```solidity +function isTokenSupported(address token) external view returns (bool supported) +``` + +Check if a token is supported for streaming + +##### Parameters + +| Name | Type | Description | +| ----- | ------- | ----------------- | +| token | address | The token address | + +##### Return Values + +| Name | Type | Description | +| --------- | ---- | ----------------------------- | +| supported | bool | True if token can be streamed | + +#### Events + +#### StreamCreated + +```solidity +event StreamCreated(uint64 serviceId, uint256 streamId, address payer, address token, uint256 ratePerSecond, uint256 totalAmount) +``` + +Emitted when a stream is created for a service + +#### StreamUpdated + +```solidity +event StreamUpdated(uint64 serviceId, uint256 streamId, uint256 newRatePerSecond) +``` + +Emitted when a stream is updated + +#### StreamCancelled + +```solidity +event StreamCancelled(uint64 serviceId, uint256 streamId, uint256 refundedAmount) +``` + +Emitted when a stream is cancelled + +#### StreamWithdrawn + +```solidity +event StreamWithdrawn(uint64 serviceId, uint256 streamId, uint256 amount, address recipient) +``` + +Emitted when funds are withdrawn from a stream + +#### StreamSettled + +```solidity +event StreamSettled(uint64 serviceId, uint256 streamId, uint256 amount) +``` + +Emitted when a stream is settled and distributed diff --git a/pages/developers/api/reference/generated/IStreamingPaymentManager.mdx b/pages/developers/api/reference/generated/IStreamingPaymentManager.mdx new file mode 100644 index 00000000..7f4e6a0f --- /dev/null +++ b/pages/developers/api/reference/generated/IStreamingPaymentManager.mdx @@ -0,0 +1,78 @@ +--- +title: IStreamingPaymentManager +description: Auto-generated Solidity API reference. +--- + +# IStreamingPaymentManager + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentManager.sol + +### IStreamingPaymentManager + +Interface for streaming payment management + +#### Functions + +#### createStream + +```solidity +function createStream(uint64 serviceId, uint64 blueprintId, address operator, address paymentToken, uint256 amount, uint64 startTime, uint64 endTime) external payable +``` + +Create a streaming payment for a service + +#### dripAndGetChunk + +```solidity +function dripAndGetChunk(uint64 serviceId, address operator) external returns (uint256 amount, uint256 durationSeconds, uint64 blueprintId, address paymentToken) +``` + +Drip a specific stream and return chunk info + +#### dripOperatorStreams + +```solidity +function dripOperatorStreams(address operator) external returns (uint64[] serviceIds, uint64[] blueprintIds, address[] paymentTokens, uint256[] amounts, uint256[] durations) +``` + +Drip all active streams for an operator + +#### onServiceTerminated + +```solidity +function onServiceTerminated(uint64 serviceId, address refundRecipient) external +``` + +Called when service is terminated + +#### onOperatorLeaving + +```solidity +function onOperatorLeaving(uint64 serviceId, address operator) external +``` + +Called when operator is leaving + +#### getOperatorActiveStreams + +```solidity +function getOperatorActiveStreams(address operator) external view returns (uint64[]) +``` + +Get active stream IDs for an operator + +#### getStreamingPayment + +```solidity +function getStreamingPayment(uint64 serviceId, address operator) external view returns (uint64 _serviceId, uint64 blueprintId, address _operator, address paymentToken, uint256 totalAmount, uint256 distributed, uint64 startTime, uint64 endTime, uint64 lastDripTime) +``` + +Get streaming payment details + +#### pendingDrip + +```solidity +function pendingDrip(uint64 serviceId, address operator) external view returns (uint256) +``` + +Calculate pending drip amount diff --git a/pages/developers/api/reference/generated/ISuperfluidAdapter.mdx b/pages/developers/api/reference/generated/ISuperfluidAdapter.mdx new file mode 100644 index 00000000..3f11783c --- /dev/null +++ b/pages/developers/api/reference/generated/ISuperfluidAdapter.mdx @@ -0,0 +1,129 @@ +--- +title: ISuperfluidAdapter +description: Auto-generated Solidity API reference. +--- + +# ISuperfluidAdapter + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IStreamingPaymentAdapter.sol + +### ISuperfluidAdapter + +Extended interface for Superfluid-specific features + +#### Functions + +#### getNetFlowRate + +```solidity +function getNetFlowRate(address account, address token) external view returns (int96 netFlowRate) +``` + +Get the net flow rate for an account (incoming - outgoing) + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | ------------------- | +| account | address | The account address | +| token | address | The super token | + +##### Return Values + +| Name | Type | Description | +| ----------- | ----- | ------------------------------- | +| netFlowRate | int96 | Net flow rate (can be negative) | + +#### getRealtimeBalance + +```solidity +function getRealtimeBalance(address account, address token) external view returns (int256 availableBalance, uint256 deposit) +``` + +Get the real-time balance of an account + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | ------------------- | +| account | address | The account address | +| token | address | The super token | + +##### Return Values + +| Name | Type | Description | +| ---------------- | ------- | ------------------------- | +| availableBalance | int256 | Current available balance | +| deposit | uint256 | Required deposit/buffer | + +#### isSolvent + +```solidity +function isSolvent(address account, address token) external view returns (bool solvent) +``` + +Check if an account is solvent (positive balance) + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | ------------------- | +| account | address | The account address | +| token | address | The super token | + +##### Return Values + +| Name | Type | Description | +| ------- | ---- | ------------------------------------ | +| solvent | bool | True if account has positive balance | + +#### getRequiredBuffer + +```solidity +function getRequiredBuffer(address token, int96 flowRate) external view returns (uint256 bufferAmount) +``` + +Get the required buffer/deposit for a flow rate + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | ----------------------- | +| token | address | The super token | +| flowRate | int96 | Flow rate in wei/second | + +##### Return Values + +| Name | Type | Description | +| ------------ | ------- | ----------------------- | +| bufferAmount | uint256 | Required buffer deposit | + +#### wrapTokens + +```solidity +function wrapTokens(address token, uint256 amount) external +``` + +Wrap underlying tokens to super tokens + +##### Parameters + +| Name | Type | Description | +| ------ | ------- | -------------------- | +| token | address | The underlying token | +| amount | uint256 | Amount to wrap | + +#### unwrapTokens + +```solidity +function unwrapTokens(address token, uint256 amount) external +``` + +Unwrap super tokens to underlying + +##### Parameters + +| Name | Type | Description | +| ------ | ------- | ---------------- | +| token | address | The super token | +| amount | uint256 | Amount to unwrap | diff --git a/pages/developers/api/reference/generated/ITangle.mdx b/pages/developers/api/reference/generated/ITangle.mdx new file mode 100644 index 00000000..2ec0bbd0 --- /dev/null +++ b/pages/developers/api/reference/generated/ITangle.mdx @@ -0,0 +1,15 @@ +--- +title: ITangle +description: Auto-generated Solidity API reference. +--- + +# ITangle + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangle.sol + +### ITangle + +Core interface for Tangle Protocol + +_Consolidates all sub-interfaces into a single entry point. +Inherits from focused sub-interfaces for modularity._ diff --git a/pages/developers/api/reference/generated/ITangleAdmin.mdx b/pages/developers/api/reference/generated/ITangleAdmin.mdx new file mode 100644 index 00000000..3613bd1f --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleAdmin.mdx @@ -0,0 +1,248 @@ +--- +title: ITangleAdmin +description: Auto-generated Solidity API reference. +--- + +# ITangleAdmin + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangle.sol + +### ITangleAdmin + +Admin functions for Tangle protocol + +#### Functions + +#### setStaking + +```solidity +function setStaking(address staking) external +``` + +Set the staking module + +##### Parameters + +| Name | Type | Description | +| ------- | ------- | --------------------------- | +| staking | address | The IStaking implementation | + +#### setTreasury + +```solidity +function setTreasury(address treasury) external +``` + +Set the protocol treasury + +##### Parameters + +| Name | Type | Description | +| -------- | ------- | -------------------- | +| treasury | address | The treasury address | + +#### setPaymentSplit + +```solidity +function setPaymentSplit(struct Types.PaymentSplit split) external +``` + +Set the payment split configuration + +##### Parameters + +| Name | Type | Description | +| ----- | ------------------------- | --------------------------- | +| split | struct Types.PaymentSplit | The new split configuration | + +#### paymentSplit + +```solidity +function paymentSplit() external view returns (uint16 developerBps, uint16 protocolBps, uint16 operatorBps, uint16 stakerBps) +``` + +Get the current payment split + +#### pause + +```solidity +function pause() external +``` + +Pause the protocol + +#### unpause + +```solidity +function unpause() external +``` + +Unpause the protocol + +#### treasury + +```solidity +function treasury() external view returns (address payable) +``` + +Get the configured treasury + +#### setMetricsRecorder + +```solidity +function setMetricsRecorder(address recorder) external +``` + +Set the metrics recorder (optional) + +#### metricsRecorder + +```solidity +function metricsRecorder() external view returns (address) +``` + +Get the metrics recorder address + +#### setOperatorStatusRegistry + +```solidity +function setOperatorStatusRegistry(address registry) external +``` + +Set operator status registry + +#### operatorStatusRegistry + +```solidity +function operatorStatusRegistry() external view returns (address) +``` + +Get operator status registry + +#### setServiceFeeDistributor + +```solidity +function setServiceFeeDistributor(address distributor) external +``` + +Configure service fee distributor + +#### serviceFeeDistributor + +```solidity +function serviceFeeDistributor() external view returns (address) +``` + +Get service fee distributor + +#### setPriceOracle + +```solidity +function setPriceOracle(address oracle) external +``` + +Configure price oracle + +#### priceOracle + +```solidity +function priceOracle() external view returns (address) +``` + +Get price oracle + +#### setMBSMRegistry + +```solidity +function setMBSMRegistry(address registry) external +``` + +Configure Master Blueprint Service Manager registry + +#### mbsmRegistry + +```solidity +function mbsmRegistry() external view returns (address) +``` + +Get Master Blueprint Service Manager registry + +#### maxBlueprintsPerOperator + +```solidity +function maxBlueprintsPerOperator() external view returns (uint32) +``` + +Get max blueprints per operator + +#### setMaxBlueprintsPerOperator + +```solidity +function setMaxBlueprintsPerOperator(uint32 newMax) external +``` + +Set max blueprints per operator + +#### tntToken + +```solidity +function tntToken() external view returns (address) +``` + +Get TNT token address + +#### setTntToken + +```solidity +function setTntToken(address token) external +``` + +Set TNT token address + +#### rewardVaults + +```solidity +function rewardVaults() external view returns (address) +``` + +Get reward vaults address + +#### setRewardVaults + +```solidity +function setRewardVaults(address vaults) external +``` + +Set reward vaults address + +#### defaultTntMinExposureBps + +```solidity +function defaultTntMinExposureBps() external view returns (uint16) +``` + +Get default TNT min exposure bps + +#### setDefaultTntMinExposureBps + +```solidity +function setDefaultTntMinExposureBps(uint16 minExposureBps) external +``` + +Set default TNT min exposure bps + +#### tntPaymentDiscountBps + +```solidity +function tntPaymentDiscountBps() external view returns (uint16) +``` + +Get TNT payment discount bps + +#### setTntPaymentDiscountBps + +```solidity +function setTntPaymentDiscountBps(uint16 discountBps) external +``` + +Set TNT payment discount bps diff --git a/pages/developers/api/reference/generated/ITangleBlueprints.mdx b/pages/developers/api/reference/generated/ITangleBlueprints.mdx new file mode 100644 index 00000000..d83a4b9f --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleBlueprints.mdx @@ -0,0 +1,156 @@ +--- +title: ITangleBlueprints +description: Auto-generated Solidity API reference. +--- + +# ITangleBlueprints + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleBlueprints.sol + +### ITangleBlueprints + +Blueprint management interface + +#### Functions + +#### createBlueprint + +```solidity +function createBlueprint(struct Types.BlueprintDefinition definition) external returns (uint64 blueprintId) +``` + +Create a blueprint from an encoded definition that includes schemas and job metadata + +##### Parameters + +| Name | Type | Description | +| ---------- | -------------------------------- | ------------------------------------------- | +| definition | struct Types.BlueprintDefinition | Fully populated blueprint definition struct | + +##### Return Values + +| Name | Type | Description | +| ----------- | ------ | -------------------- | +| blueprintId | uint64 | The new blueprint ID | + +#### updateBlueprint + +```solidity +function updateBlueprint(uint64 blueprintId, string metadataUri) external +``` + +Update blueprint metadata + +#### transferBlueprint + +```solidity +function transferBlueprint(uint64 blueprintId, address newOwner) external +``` + +Transfer blueprint ownership + +#### deactivateBlueprint + +```solidity +function deactivateBlueprint(uint64 blueprintId) external +``` + +Deactivate a blueprint + +#### getBlueprint + +```solidity +function getBlueprint(uint64 blueprintId) external view returns (struct Types.Blueprint) +``` + +Get blueprint info + +#### getBlueprintConfig + +```solidity +function getBlueprintConfig(uint64 blueprintId) external view returns (struct Types.BlueprintConfig) +``` + +Get blueprint configuration + +#### blueprintOperatorCount + +```solidity +function blueprintOperatorCount(uint64 blueprintId) external view returns (uint256) +``` + +Get number of operators for a blueprint + +#### blueprintCount + +```solidity +function blueprintCount() external view returns (uint64) +``` + +Get current blueprint count + +#### getBlueprintDefinition + +```solidity +function getBlueprintDefinition(uint64 blueprintId) external view returns (struct Types.BlueprintDefinition definition) +``` + +Get the original blueprint definition + +#### blueprintMetadata + +```solidity +function blueprintMetadata(uint64 blueprintId) external view returns (struct Types.BlueprintMetadata metadata, string metadataUri) +``` + +Get blueprint metadata and URI + +#### blueprintSources + +```solidity +function blueprintSources(uint64 blueprintId) external view returns (struct Types.BlueprintSource[] sources) +``` + +Get blueprint sources + +#### blueprintSupportedMemberships + +```solidity +function blueprintSupportedMemberships(uint64 blueprintId) external view returns (enum Types.MembershipModel[] memberships) +``` + +Get blueprint supported membership models + +#### blueprintMasterRevision + +```solidity +function blueprintMasterRevision(uint64 blueprintId) external view returns (uint32) +``` + +Get master blueprint revision + +#### Events + +#### BlueprintCreated + +```solidity +event BlueprintCreated(uint64 blueprintId, address owner, address manager, string metadataUri) +``` + +#### BlueprintUpdated + +```solidity +event BlueprintUpdated(uint64 blueprintId, string metadataUri) +``` + +#### BlueprintTransferred + +```solidity +event BlueprintTransferred(uint64 blueprintId, address from, address to) +``` + +#### BlueprintDeactivated + +```solidity +event BlueprintDeactivated(uint64 blueprintId) +``` diff --git a/pages/developers/api/reference/generated/ITangleFull.mdx b/pages/developers/api/reference/generated/ITangleFull.mdx new file mode 100644 index 00000000..568156a4 --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleFull.mdx @@ -0,0 +1,12 @@ +--- +title: ITangleFull +description: Auto-generated Solidity API reference. +--- + +# ITangleFull + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangle.sol + +### ITangleFull + +Complete Tangle interface including admin and slashing diff --git a/pages/developers/api/reference/generated/ITangleGovernance.mdx b/pages/developers/api/reference/generated/ITangleGovernance.mdx new file mode 100644 index 00000000..3fd6d7ad --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleGovernance.mdx @@ -0,0 +1,271 @@ +--- +title: ITangleGovernance +description: Auto-generated Solidity API reference. +--- + +# ITangleGovernance + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleGovernance.sol + +### ITangleGovernance + +Interface for Tangle governance components + +#### Types + +#### ProposalState + +Proposal states + +```solidity +enum ProposalState { + Pending, + Active, + Canceled, + Defeated, + Succeeded, + Queued, + Expired, + Executed +} +``` + +#### Functions + +#### propose + +```solidity +function propose(address[] targets, uint256[] values, bytes[] calldatas, string description) external returns (uint256 proposalId) +``` + +Create a new proposal + +##### Parameters + +| Name | Type | Description | +| ----------- | --------- | -------------------------- | +| targets | address[] | Contract addresses to call | +| values | uint256[] | ETH values to send | +| calldatas | bytes[] | Encoded function calls | +| description | string | Human-readable description | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ------------------------------ | +| proposalId | uint256 | The unique proposal identifier | + +#### queue + +```solidity +function queue(address[] targets, uint256[] values, bytes[] calldatas, bytes32 descriptionHash) external returns (uint256 proposalId) +``` + +Queue a successful proposal for execution + +##### Parameters + +| Name | Type | Description | +| --------------- | --------- | -------------------------------- | +| targets | address[] | Contract addresses to call | +| values | uint256[] | ETH values to send | +| calldatas | bytes[] | Encoded function calls | +| descriptionHash | bytes32 | Hash of the proposal description | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ----------------------- | +| proposalId | uint256 | The proposal identifier | + +#### execute + +```solidity +function execute(address[] targets, uint256[] values, bytes[] calldatas, bytes32 descriptionHash) external payable returns (uint256 proposalId) +``` + +Execute a queued proposal + +##### Parameters + +| Name | Type | Description | +| --------------- | --------- | -------------------------------- | +| targets | address[] | Contract addresses to call | +| values | uint256[] | ETH values to send | +| calldatas | bytes[] | Encoded function calls | +| descriptionHash | bytes32 | Hash of the proposal description | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ----------------------- | +| proposalId | uint256 | The proposal identifier | + +#### cancel + +```solidity +function cancel(address[] targets, uint256[] values, bytes[] calldatas, bytes32 descriptionHash) external returns (uint256 proposalId) +``` + +Cancel a proposal + +##### Parameters + +| Name | Type | Description | +| --------------- | --------- | -------------------------------- | +| targets | address[] | Contract addresses to call | +| values | uint256[] | ETH values to send | +| calldatas | bytes[] | Encoded function calls | +| descriptionHash | bytes32 | Hash of the proposal description | + +##### Return Values + +| Name | Type | Description | +| ---------- | ------- | ----------------------- | +| proposalId | uint256 | The proposal identifier | + +#### castVote + +```solidity +function castVote(uint256 proposalId, uint8 support) external returns (uint256 weight) +``` + +Cast a vote on a proposal + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------- | +| proposalId | uint256 | The proposal to vote on | +| support | uint8 | 0=Against, 1=For, 2=Abstain | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ---------------------- | +| weight | uint256 | The voting weight used | + +#### castVoteWithReason + +```solidity +function castVoteWithReason(uint256 proposalId, uint8 support, string reason) external returns (uint256 weight) +``` + +Cast a vote with reason + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------- | +| proposalId | uint256 | The proposal to vote on | +| support | uint8 | 0=Against, 1=For, 2=Abstain | +| reason | string | Explanation for the vote | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ---------------------- | +| weight | uint256 | The voting weight used | + +#### castVoteBySig + +```solidity +function castVoteBySig(uint256 proposalId, uint8 support, address voter, bytes signature) external returns (uint256 weight) +``` + +Cast a vote using EIP-712 signature + +##### Parameters + +| Name | Type | Description | +| ---------- | ------- | --------------------------- | +| proposalId | uint256 | The proposal to vote on | +| support | uint8 | 0=Against, 1=For, 2=Abstain | +| voter | address | The voter address | +| signature | bytes | The EIP-712 signature | + +##### Return Values + +| Name | Type | Description | +| ------ | ------- | ---------------------- | +| weight | uint256 | The voting weight used | + +#### state + +```solidity +function state(uint256 proposalId) external view returns (enum ITangleGovernance.ProposalState) +``` + +Get the current state of a proposal + +#### proposalSnapshot + +```solidity +function proposalSnapshot(uint256 proposalId) external view returns (uint256) +``` + +Get the block number when voting starts + +#### proposalDeadline + +```solidity +function proposalDeadline(uint256 proposalId) external view returns (uint256) +``` + +Get the block number when voting ends + +#### proposalProposer + +```solidity +function proposalProposer(uint256 proposalId) external view returns (address) +``` + +Get the proposer of a proposal + +#### hasVoted + +```solidity +function hasVoted(uint256 proposalId, address account) external view returns (bool) +``` + +Check if an account has voted on a proposal + +#### getVotes + +```solidity +function getVotes(address account, uint256 blockNumber) external view returns (uint256) +``` + +Get voting power of an account at a specific block + +#### quorum + +```solidity +function quorum(uint256 blockNumber) external view returns (uint256) +``` + +Get the required quorum at a specific block + +#### votingDelay + +```solidity +function votingDelay() external view returns (uint256) +``` + +Get the voting delay (blocks before voting starts) + +#### votingPeriod + +```solidity +function votingPeriod() external view returns (uint256) +``` + +Get the voting period (blocks for voting) + +#### proposalThreshold + +```solidity +function proposalThreshold() external view returns (uint256) +``` + +Get the proposal threshold (tokens needed to propose) diff --git a/pages/developers/api/reference/generated/ITangleJobs.mdx b/pages/developers/api/reference/generated/ITangleJobs.mdx new file mode 100644 index 00000000..96151878 --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleJobs.mdx @@ -0,0 +1,91 @@ +--- +title: ITangleJobs +description: Auto-generated Solidity API reference. +--- + +# ITangleJobs + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleJobs.sol + +### ITangleJobs + +Job submission and result management interface + +#### Functions + +#### submitJob + +```solidity +function submitJob(uint64 serviceId, uint8 jobIndex, bytes inputs) external payable returns (uint64 callId) +``` + +Submit a job to a service + +#### submitResult + +```solidity +function submitResult(uint64 serviceId, uint64 callId, bytes result) external +``` + +Submit a job result (as operator) + +#### submitResults + +```solidity +function submitResults(uint64 serviceId, uint64[] callIds, bytes[] results) external +``` + +Submit multiple results in one transaction + +#### submitAggregatedResult + +```solidity +function submitAggregatedResult(uint64 serviceId, uint64 callId, bytes output, uint256 signerBitmap, uint256[2] aggregatedSignature, uint256[4] aggregatedPubkey) external +``` + +Submit an aggregated BLS result for a job + +_Only valid for jobs where requiresAggregation returns true_ + +##### Parameters + +| Name | Type | Description | +| ------------------- | ---------- | ------------------------------------------------------------------------ | +| serviceId | uint64 | The service ID | +| callId | uint64 | The job call ID | +| output | bytes | The aggregated output data | +| signerBitmap | uint256 | Bitmap indicating which operators signed (bit i = operator i in service) | +| aggregatedSignature | uint256[2] | The aggregated BLS signature [x, y] | +| aggregatedPubkey | uint256[4] | The aggregated public key [x0, x1, y0, y1] | + +#### getJobCall + +```solidity +function getJobCall(uint64 serviceId, uint64 callId) external view returns (struct Types.JobCall) +``` + +Get job call info + +#### Events + +#### JobSubmitted + +```solidity +event JobSubmitted(uint64 serviceId, uint64 callId, uint8 jobIndex, address caller, bytes inputs) +``` + +#### JobResultSubmitted + +```solidity +event JobResultSubmitted(uint64 serviceId, uint64 callId, address operator, bytes result) +``` + +#### JobCompleted + +```solidity +event JobCompleted(uint64 serviceId, uint64 callId) +``` + +Emitted when a job reaches its required result threshold + +_Derive resultCount from getJobCall(serviceId, callId).resultCount_ diff --git a/pages/developers/api/reference/generated/ITangleOperators.mdx b/pages/developers/api/reference/generated/ITangleOperators.mdx new file mode 100644 index 00000000..8a5b6fa1 --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleOperators.mdx @@ -0,0 +1,159 @@ +--- +title: ITangleOperators +description: Auto-generated Solidity API reference. +--- + +# ITangleOperators + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleOperators.sol + +### ITangleOperators + +Operator registration and management interface + +_Operator liveness is tracked via OperatorStatusRegistry heartbeats, +not a setOperatorOnline call. Use submitHeartbeat/isOnline/getOperatorStatus +on the registry for liveness signals._ + +#### Functions + +#### preRegister + +```solidity +function preRegister(uint64 blueprintId) external +``` + +Signal intent to register for a blueprint + +#### registerOperator + +```solidity +function registerOperator(uint64 blueprintId, bytes ecdsaPublicKey, string rpcAddress) external +``` + +Register as operator for a blueprint + +##### Parameters + +| Name | Type | Description | +| -------------- | ------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| blueprintId | uint64 | The blueprint to register for | +| ecdsaPublicKey | bytes | The ECDSA public key for gossip network identity This key is used for signing/verifying messages in the P2P gossip network and may differ from the wallet key (msg.sender) | +| rpcAddress | string | The operator's RPC endpoint URL | + +#### registerOperator + +```solidity +function registerOperator(uint64 blueprintId, bytes ecdsaPublicKey, string rpcAddress, bytes registrationInputs) external +``` + +Register as operator providing blueprint-specific registration inputs + +##### Parameters + +| Name | Type | Description | +| ------------------ | ------ | ----------------------------------------------- | +| blueprintId | uint64 | | +| ecdsaPublicKey | bytes | | +| rpcAddress | string | | +| registrationInputs | bytes | Encoded payload validated by blueprint's schema | + +#### unregisterOperator + +```solidity +function unregisterOperator(uint64 blueprintId) external +``` + +Unregister from a blueprint + +#### updateOperatorPreferences + +```solidity +function updateOperatorPreferences(uint64 blueprintId, bytes ecdsaPublicKey, string rpcAddress) external +``` + +Update operator preferences for a blueprint + +##### Parameters + +| Name | Type | Description | +| -------------- | ------ | --------------------------------------------------------- | +| blueprintId | uint64 | The blueprint to update preferences for | +| ecdsaPublicKey | bytes | New ECDSA public key (pass empty bytes to keep unchanged) | +| rpcAddress | string | New RPC endpoint (pass empty string to keep unchanged) | + +#### getOperatorRegistration + +```solidity +function getOperatorRegistration(uint64 blueprintId, address operator) external view returns (struct Types.OperatorRegistration) +``` + +Get operator registration for a blueprint + +#### getOperatorPreferences + +```solidity +function getOperatorPreferences(uint64 blueprintId, address operator) external view returns (struct Types.OperatorPreferences) +``` + +Get operator preferences for a blueprint (includes ECDSA public key) + +#### getOperatorPublicKey + +```solidity +function getOperatorPublicKey(uint64 blueprintId, address operator) external view returns (bytes) +``` + +Get operator's ECDSA public key for gossip network identity + +_Returns the key used for signing/verifying gossip messages_ + +#### isOperatorRegistered + +```solidity +function isOperatorRegistered(uint64 blueprintId, address operator) external view returns (bool) +``` + +Check if operator is registered for a blueprint + +#### Events + +#### OperatorRegistered + +```solidity +event OperatorRegistered(uint64 blueprintId, address operator, bytes ecdsaPublicKey, string rpcAddress) +``` + +Emitted when an operator registers for a blueprint + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | ------------------------------------------------ | +| blueprintId | uint64 | The blueprint ID | +| operator | address | The operator address (wallet) | +| ecdsaPublicKey | bytes | The ECDSA public key for gossip network identity | +| rpcAddress | string | The operator's RPC endpoint | + +#### OperatorUnregistered + +```solidity +event OperatorUnregistered(uint64 blueprintId, address operator) +``` + +#### OperatorPreferencesUpdated + +```solidity +event OperatorPreferencesUpdated(uint64 blueprintId, address operator, bytes ecdsaPublicKey, string rpcAddress) +``` + +Emitted when an operator updates their preferences + +##### Parameters + +| Name | Type | Description | +| -------------- | ------- | -------------------------------------------------------- | +| blueprintId | uint64 | The blueprint ID | +| operator | address | The operator address | +| ecdsaPublicKey | bytes | The updated ECDSA public key (may be empty if unchanged) | +| rpcAddress | string | The updated RPC endpoint (may be empty if unchanged) | diff --git a/pages/developers/api/reference/generated/ITanglePaymentsInternal.mdx b/pages/developers/api/reference/generated/ITanglePaymentsInternal.mdx new file mode 100644 index 00000000..093608b5 --- /dev/null +++ b/pages/developers/api/reference/generated/ITanglePaymentsInternal.mdx @@ -0,0 +1,24 @@ +--- +title: ITanglePaymentsInternal +description: Auto-generated Solidity API reference. +--- + +# ITanglePaymentsInternal + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITanglePaymentsInternal.sol + +### ITanglePaymentsInternal + +#### Functions + +#### distributePayment + +```solidity +function distributePayment(uint64 serviceId, uint64 blueprintId, address token, uint256 amount, address[] operators, uint16[] exposures, uint256 totalExposure) external +``` + +#### depositToEscrow + +```solidity +function depositToEscrow(uint64 serviceId, address token, uint256 amount) external +``` diff --git a/pages/developers/api/reference/generated/ITangleRewards.mdx b/pages/developers/api/reference/generated/ITangleRewards.mdx new file mode 100644 index 00000000..2c7cb27a --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleRewards.mdx @@ -0,0 +1,80 @@ +--- +title: ITangleRewards +description: Auto-generated Solidity API reference. +--- + +# ITangleRewards + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleRewards.sol + +### ITangleRewards + +Reward distribution and claiming interface + +#### Functions + +#### claimRewards + +```solidity +function claimRewards() external +``` + +Claim accumulated rewards (native token) + +#### claimRewards + +```solidity +function claimRewards(address token) external +``` + +Claim accumulated rewards for a specific token + +#### claimRewardsBatch + +```solidity +function claimRewardsBatch(address[] tokens) external +``` + +Claim accumulated rewards for multiple tokens + +#### claimRewardsAll + +```solidity +function claimRewardsAll() external +``` + +Claim accumulated rewards for all pending tokens + +#### pendingRewards + +```solidity +function pendingRewards(address account) external view returns (uint256) +``` + +Get pending rewards for an account (native token) + +#### pendingRewards + +```solidity +function pendingRewards(address account, address token) external view returns (uint256) +``` + +Get pending rewards for an account and token + +#### rewardTokens + +```solidity +function rewardTokens(address account) external view returns (address[]) +``` + +List tokens with non-zero pending rewards for an account + +_Convenience view; mappings are not enumerable._ + +#### Events + +#### RewardsClaimed + +```solidity +event RewardsClaimed(address account, address token, uint256 amount) +``` diff --git a/pages/developers/api/reference/generated/ITangleSecurityView.mdx b/pages/developers/api/reference/generated/ITangleSecurityView.mdx new file mode 100644 index 00000000..30c2f1fb --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleSecurityView.mdx @@ -0,0 +1,44 @@ +--- +title: ITangleSecurityView +description: Auto-generated Solidity API reference. +--- + +# ITangleSecurityView + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleSecurityView.sol + +### ITangleSecurityView + +Minimal view interface for reading service security requirements + operator commitments. + +#### Functions + +#### getServiceSecurityRequirements + +```solidity +function getServiceSecurityRequirements(uint64 serviceId) external view returns (struct Types.AssetSecurityRequirement[]) +``` + +#### getServiceSecurityCommitmentBps + +```solidity +function getServiceSecurityCommitmentBps(uint64 serviceId, address operator, enum Types.AssetKind kind, address token) external view returns (uint16) +``` + +#### treasury + +```solidity +function treasury() external view returns (address payable) +``` + +#### getService + +```solidity +function getService(uint64 serviceId) external view returns (struct Types.Service) +``` + +#### getServiceOperators + +```solidity +function getServiceOperators(uint64 serviceId) external view returns (address[]) +``` diff --git a/pages/developers/api/reference/generated/ITangleServices.mdx b/pages/developers/api/reference/generated/ITangleServices.mdx new file mode 100644 index 00000000..7e62887d --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleServices.mdx @@ -0,0 +1,413 @@ +--- +title: ITangleServices +description: Auto-generated Solidity API reference. +--- + +# ITangleServices + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleServices.sol + +### ITangleServices + +Service lifecycle management interface + +#### Functions + +#### requestService + +```solidity +function requestService(uint64 blueprintId, address[] operators, bytes config, address[] permittedCallers, uint64 ttl, address paymentToken, uint256 paymentAmount) external payable returns (uint64 requestId) +``` + +Request a new service + +#### requestServiceWithExposure + +```solidity +function requestServiceWithExposure(uint64 blueprintId, address[] operators, uint16[] exposureBps, bytes config, address[] permittedCallers, uint64 ttl, address paymentToken, uint256 paymentAmount) external payable returns (uint64 requestId) +``` + +Request a service with explicit exposure commitments + +#### requestServiceWithSecurity + +```solidity +function requestServiceWithSecurity(uint64 blueprintId, address[] operators, struct Types.AssetSecurityRequirement[] securityRequirements, bytes config, address[] permittedCallers, uint64 ttl, address paymentToken, uint256 paymentAmount) external payable returns (uint64 requestId) +``` + +Request a service with multi-asset security requirements + +_Each operator must provide security commitments matching these requirements when approving_ + +#### approveService + +```solidity +function approveService(uint64 requestId, uint8 stakingPercent) external +``` + +Approve a service request (as operator) - simple version + +#### approveServiceWithCommitments + +```solidity +function approveServiceWithCommitments(uint64 requestId, struct Types.AssetSecurityCommitment[] commitments) external +``` + +Approve a service request with multi-asset security commitments + +_Commitments must match the security requirements specified in the request_ + +#### rejectService + +```solidity +function rejectService(uint64 requestId) external +``` + +Reject a service request (as operator) + +#### createServiceFromQuotes + +```solidity +function createServiceFromQuotes(uint64 blueprintId, struct Types.SignedQuote[] quotes, bytes config, address[] permittedCallers, uint64 ttl) external payable returns (uint64 serviceId) +``` + +Create a service instantly using pre-signed operator quotes + +_No approval flow needed - operators have pre-committed via signatures_ + +##### Parameters + +| Name | Type | Description | +| ---------------- | -------------------------- | ---------------------------------------- | +| blueprintId | uint64 | The blueprint to use | +| quotes | struct Types.SignedQuote[] | Array of signed quotes from operators | +| config | bytes | Service configuration | +| permittedCallers | address[] | Addresses allowed to call jobs | +| ttl | uint64 | Service time-to-live (must match quotes) | + +#### extendServiceFromQuotes + +```solidity +function extendServiceFromQuotes(uint64 serviceId, struct Types.SignedQuote[] quotes, uint64 extensionDuration) external payable +``` + +Extend a service using pre-signed operator quotes + +#### terminateService + +```solidity +function terminateService(uint64 serviceId) external +``` + +Terminate a service (as owner) + +#### addPermittedCaller + +```solidity +function addPermittedCaller(uint64 serviceId, address caller) external +``` + +Add a permitted caller to a service + +#### removePermittedCaller + +```solidity +function removePermittedCaller(uint64 serviceId, address caller) external +``` + +Remove a permitted caller from a service + +#### joinService + +```solidity +function joinService(uint64 serviceId, uint16 exposureBps) external +``` + +Join an active service (Dynamic membership only) + +#### joinServiceWithCommitments + +```solidity +function joinServiceWithCommitments(uint64 serviceId, uint16 exposureBps, struct Types.AssetSecurityCommitment[] commitments) external +``` + +Join an active service with per-asset security commitments (Dynamic membership only) + +#### leaveService + +```solidity +function leaveService(uint64 serviceId) external +``` + +Leave an active service (Dynamic membership only) + +#### scheduleExit + +```solidity +function scheduleExit(uint64 serviceId) external +``` + +Schedule exit from an active service when exit queues are enabled + +#### executeExit + +```solidity +function executeExit(uint64 serviceId) external +``` + +Execute a scheduled exit after the queue delay + +#### cancelExit + +```solidity +function cancelExit(uint64 serviceId) external +``` + +Cancel a scheduled exit before execution + +#### forceExit + +```solidity +function forceExit(uint64 serviceId, address operator) external +``` + +Force exit an operator from a service (if permitted by config) + +#### forceRemoveOperator + +```solidity +function forceRemoveOperator(uint64 serviceId, address operator) external +``` + +Force remove an operator from a service (blueprint manager only) + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ---------------------- | +| serviceId | uint64 | The service ID | +| operator | address | The operator to remove | + +#### billSubscription + +```solidity +function billSubscription(uint64 serviceId) external +``` + +Bill a subscription service for the current period + +#### billSubscriptionBatch + +```solidity +function billSubscriptionBatch(uint64[] serviceIds) external returns (uint256 totalBilled, uint256 billedCount) +``` + +Bill multiple subscription services in one call + +#### getBillableServices + +```solidity +function getBillableServices(uint64[] serviceIds) external view returns (uint64[] billable) +``` + +Get billable services from a list of candidates + +#### fundService + +```solidity +function fundService(uint64 serviceId, uint256 amount) external payable +``` + +Fund a service escrow balance + +#### getServiceRequest + +```solidity +function getServiceRequest(uint64 requestId) external view returns (struct Types.ServiceRequest) +``` + +Get service request + +#### getServiceRequestSecurityRequirements + +```solidity +function getServiceRequestSecurityRequirements(uint64 requestId) external view returns (struct Types.AssetSecurityRequirement[]) +``` + +Get security requirements for a service request + +#### getServiceRequestSecurityCommitments + +```solidity +function getServiceRequestSecurityCommitments(uint64 requestId, address operator) external view returns (struct Types.AssetSecurityCommitment[]) +``` + +Get security commitments for a service request by operator + +#### getService + +```solidity +function getService(uint64 serviceId) external view returns (struct Types.Service) +``` + +Get service info + +#### isServiceActive + +```solidity +function isServiceActive(uint64 serviceId) external view returns (bool) +``` + +Check if service is active + +#### isServiceOperator + +```solidity +function isServiceOperator(uint64 serviceId, address operator) external view returns (bool) +``` + +Check if address is operator in service + +#### getServiceOperator + +```solidity +function getServiceOperator(uint64 serviceId, address operator) external view returns (struct Types.ServiceOperator) +``` + +Get operator info for a service + +#### getServiceOperators + +```solidity +function getServiceOperators(uint64 serviceId) external view returns (address[]) +``` + +Get the list of operators for a service + +#### getServiceSecurityRequirements + +```solidity +function getServiceSecurityRequirements(uint64 serviceId) external view returns (struct Types.AssetSecurityRequirement[]) +``` + +Get persisted security requirements for an active service + +#### getServiceEscrow + +```solidity +function getServiceEscrow(uint64 serviceId) external view returns (struct PaymentLib.ServiceEscrow) +``` + +Get service escrow details + +#### getExitRequest + +```solidity +function getExitRequest(uint64 serviceId, address operator) external view returns (struct Types.ExitRequest) +``` + +Get exit request for an operator + +#### getExitStatus + +```solidity +function getExitStatus(uint64 serviceId, address operator) external view returns (enum Types.ExitStatus) +``` + +Get exit status for an operator + +#### getExitConfig + +```solidity +function getExitConfig(uint64 serviceId) external view returns (struct Types.ExitConfig) +``` + +Get exit configuration for a service + +#### canScheduleExit + +```solidity +function canScheduleExit(uint64 serviceId, address operator) external view returns (bool canExit, string reason) +``` + +Check if operator can schedule exit now + +#### getServiceSecurityCommitments + +```solidity +function getServiceSecurityCommitments(uint64 serviceId, address operator) external view returns (struct Types.AssetSecurityCommitment[]) +``` + +Get persisted security commitments for an active service by operator + +#### isPermittedCaller + +```solidity +function isPermittedCaller(uint64 serviceId, address caller) external view returns (bool) +``` + +Check if address can call jobs on service + +#### serviceCount + +```solidity +function serviceCount() external view returns (uint64) +``` + +Get current service count + +#### Events + +#### ServiceRequested + +```solidity +event ServiceRequested(uint64 requestId, uint64 blueprintId, address requester) +``` + +#### ServiceRequestedWithSecurity + +```solidity +event ServiceRequestedWithSecurity(uint64 requestId, uint64 blueprintId, address requester) +``` + +#### ServiceApproved + +```solidity +event ServiceApproved(uint64 requestId, address operator) +``` + +#### ServiceRejected + +```solidity +event ServiceRejected(uint64 requestId, address operator) +``` + +#### ServiceActivated + +```solidity +event ServiceActivated(uint64 serviceId, uint64 requestId, uint64 blueprintId) +``` + +#### ServiceTerminated + +```solidity +event ServiceTerminated(uint64 serviceId) +``` + +#### OperatorJoinedService + +```solidity +event OperatorJoinedService(uint64 serviceId, address operator, uint16 exposureBps) +``` + +#### OperatorLeftService + +```solidity +event OperatorLeftService(uint64 serviceId, address operator) +``` + +#### SubscriptionBilled + +```solidity +event SubscriptionBilled(uint64 serviceId, uint256 amount, uint64 period) +``` diff --git a/pages/developers/api/reference/generated/ITangleSlashing.mdx b/pages/developers/api/reference/generated/ITangleSlashing.mdx new file mode 100644 index 00000000..2b935915 --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleSlashing.mdx @@ -0,0 +1,107 @@ +--- +title: ITangleSlashing +description: Auto-generated Solidity API reference. +--- + +# ITangleSlashing + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleSlashing.sol + +### ITangleSlashing + +Slashing interface for Tangle protocol + +#### Functions + +#### proposeSlash + +```solidity +function proposeSlash(uint64 serviceId, address operator, uint256 amount, bytes32 evidence) external returns (uint64 slashId) +``` + +Propose a slash against an operator + +##### Parameters + +| Name | Type | Description | +| --------- | ------- | ------------------------------------ | +| serviceId | uint64 | The service where violation occurred | +| operator | address | The operator to slash | +| amount | uint256 | Amount to slash | +| evidence | bytes32 | Evidence hash | + +##### Return Values + +| Name | Type | Description | +| ------- | ------ | ------------------------------------ | +| slashId | uint64 | The ID of the created slash proposal | + +#### disputeSlash + +```solidity +function disputeSlash(uint64 slashId, string reason) external +``` + +Dispute a slash proposal + +#### executeSlash + +```solidity +function executeSlash(uint64 slashId) external returns (uint256 actualSlashed) +``` + +Execute a slash proposal + +#### executeSlashBatch + +```solidity +function executeSlashBatch(uint64[] slashIds) external returns (uint256 totalSlashed, uint256 executedCount) +``` + +Execute a batch of slashes + +#### getExecutableSlashes + +```solidity +function getExecutableSlashes(uint64 fromId, uint64 toId) external view returns (uint64[] ids) +``` + +Get list of executable slash IDs in a range + +#### cancelSlash + +```solidity +function cancelSlash(uint64 slashId, string reason) external +``` + +Cancel a slash proposal + +#### setSlashConfig + +```solidity +function setSlashConfig(uint64 disputeWindow, bool instantSlashEnabled, uint16 maxSlashBps) external +``` + +Update slashing configuration + +#### getSlashProposal + +```solidity +function getSlashProposal(uint64 slashId) external view returns (struct SlashingLib.SlashProposal) +``` + +Get slash proposal details + +#### Events + +#### SlashProposed + +```solidity +event SlashProposed(uint64 serviceId, address operator, uint256 amount, bytes32 evidence) +``` + +#### SlashExecuted + +```solidity +event SlashExecuted(uint64 serviceId, address operator, uint256 amount) +``` diff --git a/pages/developers/api/reference/generated/ITangleToken.mdx b/pages/developers/api/reference/generated/ITangleToken.mdx new file mode 100644 index 00000000..5c92135a --- /dev/null +++ b/pages/developers/api/reference/generated/ITangleToken.mdx @@ -0,0 +1,94 @@ +--- +title: ITangleToken +description: Auto-generated Solidity API reference. +--- + +# ITangleToken + +Source: https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleGovernance.sol + +### ITangleToken + +Interface for the TNT governance token + +#### Functions + +#### getVotes + +```solidity +function getVotes(address account) external view returns (uint256) +``` + +Get the current voting power of an account + +#### getPastVotes + +```solidity +function getPastVotes(address account, uint256 blockNumber) external view returns (uint256) +``` + +Get historical voting power at a past block + +#### getPastTotalSupply + +```solidity +function getPastTotalSupply(uint256 blockNumber) external view returns (uint256) +``` + +Get the total supply at a past block + +#### delegates + +```solidity +function delegates(address account) external view returns (address) +``` + +Get the delegate of an account + +#### delegate + +```solidity +function delegate(address delegatee) external +``` + +Delegate voting power to another address + +#### delegateBySig + +```solidity +function delegateBySig(address delegatee, uint256 nonce, uint256 expiry, uint8 v, bytes32 r, bytes32 s) external +``` + +Delegate using EIP-712 signature + +#### totalSupply + +```solidity +function totalSupply() external view returns (uint256) +``` + +Standard ERC20 functions + +#### balanceOf + +```solidity +function balanceOf(address account) external view returns (uint256) +``` + +#### transfer + +```solidity +function transfer(address to, uint256 amount) external returns (bool) +``` + +#### approve + +```solidity +function approve(address spender, uint256 amount) external returns (bool) +``` + +#### transferFrom + +```solidity +function transferFrom(address from, address to, uint256 amount) external returns (bool) +``` diff --git a/pages/developers/blueprint-contexts/_meta.ts b/pages/developers/blueprint-contexts/_meta.ts index e37dc8ff..519f3ec3 100644 --- a/pages/developers/blueprint-contexts/_meta.ts +++ b/pages/developers/blueprint-contexts/_meta.ts @@ -2,10 +2,8 @@ import { Meta } from "nextra"; const meta: Meta = { introduction: "Introduction to Contexts", - "eigenlayer-context": "Eigenlayer Context", "evm-provider-context": "EVM Provider Context", "keystore-context": "Keystore Context", - "services-context": "Services Context", "tangle-client-context": "Tangle Client Context", }; diff --git a/pages/developers/blueprint-contexts/eigenlayer-context.mdx b/pages/developers/blueprint-contexts/eigenlayer-context.mdx deleted file mode 100644 index 45cfe32c..00000000 --- a/pages/developers/blueprint-contexts/eigenlayer-context.mdx +++ /dev/null @@ -1,86 +0,0 @@ ---- -title: Eigenlayer Context ---- - -import GithubFileReaderDisplay from '/components/GithubFileReaderDisplay'; - -# Eigenlayer Context - -The `EigenlayerContext` trait provides a standardized interface for interacting with Eigenlayer services and utilities in your Blueprint. - -## Overview - -The `EigenlayerContext` trait provides access to core Eigenlayer services and functionality: - -### Core Services - -- AVS Registry Reader/Writer - For interacting with the AVS registry contracts -- Operator Info Service - For managing operator information and state -- BLS Aggregation Service - For handling BLS signature aggregation - -### Operator Management - -- Query operator stakes and details -- Track stake history and updates -- Retrieve operator public keys -- Get operator IDs and addresses - -### Stake Management - -- Get stake information for specific blocks -- Track total stake history -- Query stake updates and changes -- Monitor stake across different quorums - - - -## Using the Context - -### 1. Define Your Context - -First, define your context struct that implements the `EigenlayerContext` trait: - - - -### 2. Implement methods for the context - -You can implement methods for the context to provide custom functionality: - - - -### 3. Use the Context in Jobs - -You can then use this context in your jobs to access Eigenlayer functionality: - - - -### 4. Instantiate the Context - -Finally, instantiate your context in your main runner: - - diff --git a/pages/developers/blueprint-contexts/evm-provider-context.mdx b/pages/developers/blueprint-contexts/evm-provider-context.mdx index 26e12a95..37b2b1d8 100644 --- a/pages/developers/blueprint-contexts/evm-provider-context.mdx +++ b/pages/developers/blueprint-contexts/evm-provider-context.mdx @@ -6,14 +6,16 @@ import GithubFileReaderDisplay from '/components/GithubFileReaderDisplay'; # EVM Provider Context +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/contexts + The `EvmInstrumentedClientContext` trait provides a standardized [alloy-rs](https://github.com/alloy-rs) EVM provider for interacting with EVM-compatible blockchain networks in your Blueprint. -## Overview +## Context Summary The `EvmInstrumentedClientContext` trait provides access to an EVM provider: - -## Using the Context - -### 1. Define Your Context - -First, define your context struct that implements the `ServicesContext` trait: - - - -### 2. Access Services Client Functionality - -You can then use this context to access the services client and interact with Tangle services: - - diff --git a/pages/developers/blueprint-contexts/tangle-client-context.mdx b/pages/developers/blueprint-contexts/tangle-client-context.mdx index 1b36e26e..66048eb2 100644 --- a/pages/developers/blueprint-contexts/tangle-client-context.mdx +++ b/pages/developers/blueprint-contexts/tangle-client-context.mdx @@ -6,44 +6,46 @@ import GithubFileReaderDisplay from '/components/GithubFileReaderDisplay'; # Tangle Client Context -The `TangleClientContext` trait provides a standardized interface for interacting with the Tangle network in your Blueprint through a Subxt `OnlineClient`. +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/contexts -## Overview +The `TangleEvmClientContext` trait provides a standardized interface for interacting with the Tangle EVM protocol from a Blueprint. It exposes a typed `TangleEvmClient` configured from the blueprint environment. -The `TangleClientContext` trait provides access to a Subxt client that enables: +## Context Summary -- Transaction Submission - Send transactions to the Tangle network -- State Queries - Read storage and state from the chain -- Event Monitoring - Subscribe to and process chain events -- RPC Calls - Make direct RPC calls to Tangle nodes +The `TangleEvmClientContext` trait provides access to an EVM client that enables: + +- Transaction submission to core protocol contracts +- Service and operator state queries +- Job submission and result reporting +- Access to operator identity and keystore-backed signing ## Using the Context ### 1. Define Your Context -First, define your context struct that implements the `TangleClientContext` trait: +First, define your context struct that implements the `TangleEvmClientContext` trait: -### 2. Access Subxt Client Functionality +### 2. Access Tangle EVM Client Functionality -You can then use this context to access the Subxt client and interact with Tangle: +You can then use this context to access the client and interact with the Tangle EVM protocol: diff --git a/pages/developers/blueprint-qos.mdx b/pages/developers/blueprint-qos.mdx index bd0541da..14355958 100644 --- a/pages/developers/blueprint-qos.mdx +++ b/pages/developers/blueprint-qos.mdx @@ -4,339 +4,247 @@ title: Quality of Service (QoS) Integration # Quality of Service (QoS) Integration Guide -This guide explains how to integrate and use the Blueprint SDK's Quality of Service (QoS) system to add comprehensive observability, monitoring, and dashboard capabilities to any Blueprint. QoS provides unified metrics collection, log aggregation, heartbeat monitoring, and visualization through a cohesive interface. +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/qos -## Prerequisites +This guide explains how to integrate the Blueprint SDK Quality of Service (QoS) system for observability, monitoring, and dashboards. QoS combines heartbeats, metrics, logs, and Grafana dashboards into a single service that you can run alongside any Blueprint. -- Understanding of Blueprint concepts and execution model -- Familiarity with Tangle Network architecture -- Basic knowledge of observability concepts (metrics, logging, monitoring) - -## QoS Overview +## QoS Summary The Blueprint QoS system provides a complete observability stack: -- **Heartbeat Service**: Sends periodic heartbeats to Tangle to prevent slashing -- **Metrics Collection**: Captures system and application metrics -- **Logging**: Aggregates logs via Loki for centralized querying -- **Dashboards**: Creates Grafana visualizations automatically -- **Server Management**: Optionally runs containerized instances of Prometheus, Loki, and Grafana +- **Heartbeat Service**: submits periodic liveness signals to the status registry +- **Metrics Collection**: exports system and job metrics via a Prometheus-compatible endpoint +- **Logging**: streams logs to Loki (optional) +- **Dashboards**: builds Grafana dashboards (optional) +- **Server Management**: can run Grafana/Loki/Prometheus containers for you + +## What QoS Exposes -The QoS system is designed to be added to any Blueprint type (Tangle, Eigenlayer, P2P, or Cron) as a background service. +QoS always exposes a Prometheus-compatible metrics endpoint when metrics are enabled. Grafana and Loki are optional and can be managed by QoS or connected externally. -## Integrating QoS into a Blueprint +| Component | Default Endpoint | Notes | +| ------------------ | ------------------------------------- | ---------------------------------------------------------------------- | +| Prometheus metrics | `http://:9090/metrics` | Includes `/health` plus Prometheus v1 API routes like `/api/v1/query`. | +| Grafana UI | `http://:3000` | Only when configured or managed by QoS. | +| Loki push API | `http://:3100/loki/api/v1/push` | Only when configured or managed by QoS. | -The integration process involves setting up the QoS configuration and implementing the HeartbeatConsumer trait. Here's a step-by-step guide. +## Integrating QoS with BlueprintRunner -### Main Blueprint Setup +If you use `BlueprintRunner`, it wires the HTTP RPC endpoint, keystore URI, and status registry address into QoS for you: ```rust -#[tokio::main] -async fn main() -> Result<(), blueprint_sdk::Error> { - let env = BlueprintEnvironment::load()?; - - // Create your Blueprint's primary context - let context = MyContext::new(env.clone()).await?; - - // Configure QoS system - let qos_config = blueprint_qos::default_qos_config(); - let heartbeat_consumer = Arc::new(MyHeartbeatConsumer::new()); - - // Standard Blueprint runner setup with QoS - BlueprintRunner::builder(TangleConfig::default(), env) - .router(Router::new() - .route(JOB_ID, handler.layer(TangleLayer)) - .with_context(context)) - .producer(producer) - .consumer(consumer) - .qos_service(qos_config, Some(heartbeat_consumer)) - .run() - .await -} +let qos_config = blueprint_qos::default_qos_config(); +let heartbeat_consumer = Arc::new(MyHeartbeatConsumer::new()); + +BlueprintRunner::builder(TangleEvmConfig::default(), env) + .router(router) + .qos_service(qos_config, Some(heartbeat_consumer)) + .run() + .await?; ``` -### Implementing HeartbeatConsumer +Note: `BlueprintRunner::qos_service` enables `manage_servers(true)` internally. If you want to avoid managed containers, pass a config with `grafana_server: None` and `loki_server: None`. + +## HeartbeatConsumer and Keystore Requirements -To enable the heartbeat service, you must implement the `HeartbeatConsumer` trait, which is responsible for sending heartbeat signals to the Tangle Network: +Heartbeats require a keystore with an ECDSA key. Use `BLUEPRINT_KEYSTORE_URI` or `--keystore-path` so QoS can sign heartbeats. + +```bash +cargo tangle key --algo ecdsa --keystore ./keystore --name operator +export BLUEPRINT_KEYSTORE_URI="$(pwd)/keystore" +``` + +Implement the heartbeat consumer using the current trait signature: ```rust +use blueprint_qos::heartbeat::{HeartbeatConsumer, HeartbeatStatus}; +use blueprint_qos::error::Result as QoSResult; +use std::future::Future; +use std::pin::Pin; + #[derive(Clone)] -struct MyHeartbeatConsumer { - // Add any required fields for heartbeat submission -} +struct MyHeartbeatConsumer; impl HeartbeatConsumer for MyHeartbeatConsumer { - fn consume_heartbeat( + fn send_heartbeat( &self, - service_id: u64, - blueprint_id: u64, - metrics_data: String, - ) -> Result<(), Box> { - // Implement custom heartbeat logic here, specific to blueprint - Ok(()) + _status: &HeartbeatStatus, + ) -> Pin> + Send>> { + Box::pin(async move { Ok(()) }) } } ``` -## QoS Configuration Options +## Configuration Options -### Using Default Configuration - -The simplest way to get started is with the default configuration: +### Default Configuration ```rust let qos_config = blueprint_qos::default_qos_config(); ``` -This initializes a configuration with: - -- Heartbeat service (disabled until configured) -- Metrics collection -- Loki logging -- Grafana integration -- Automatic server management set to `false` +This enables metrics, Loki logging, and Grafana integration. Whether containers start depends on `manage_servers` (BlueprintRunner forces it on; see note above). -### Custom Configuration +### Bring Your Own Observability Stack -Customize the configuration for your specific needs: +Point QoS at your existing Grafana/Loki/Prometheus stack by overriding the configs and keeping `manage_servers` off: ```rust let qos_config = QoSConfig { - heartbeat: Some(HeartbeatConfig { - service_id: Some(42), - blueprint_id: Some(7), - interval_seconds: 60, - jitter_seconds: 5, + metrics: Some(MetricsConfig { + prometheus_server: Some(PrometheusServerConfig { + host: "0.0.0.0".into(), + port: 9090, + use_docker: false, + ..Default::default() + }), + ..Default::default() }), - metrics: Some(MetricsConfig::default()), - loki: Some(LokiConfig::default()), grafana: Some(GrafanaConfig { - endpoint: "http://localhost:3000".into(), - admin_user: Some("admin".into()), - admin_password: Some("admin".into()), - folder: None, + url: "http://grafana.internal:3000".into(), + api_key: Some(std::env::var("GRAFANA_API_KEY")?), + prometheus_datasource_url: Some("http://prometheus.internal:9090".into()), + ..Default::default() + }), + loki: Some(LokiConfig { + url: "http://loki.internal:3100/loki/api/v1/push".into(), + ..Default::default() }), - grafana_server: Some(GrafanaServerConfig::default()), - loki_server: Some(LokiServerConfig::default()), - prometheus_server: Some(PrometheusServerConfig::default()), - docker_network: Some("blueprint-network".into()), + manage_servers: false, + ..blueprint_qos::default_qos_config() +}; +``` + +### Managed Observability Stack + +QoS can spin up Grafana, Loki, and Prometheus containers for you. Make sure Docker is available. + +```rust +let qos_config = QoSConfig { manage_servers: true, - service_id: Some(42), - blueprint_id: Some(7), + grafana_server: Some(GrafanaServerConfig { + admin_user: "admin".into(), + admin_password: "change-me".into(), + allow_anonymous: false, + data_dir: "/var/lib/grafana".into(), + ..Default::default() + }), + loki_server: Some(LokiServerConfig { + data_dir: "/var/lib/loki".into(), + config_path: Some("./loki-config.yaml".into()), + ..Default::default() + }), + prometheus_server: Some(PrometheusServerConfig { + host: "0.0.0.0".into(), + port: 9090, + use_docker: true, + config_path: Some("./prometheus.yml".into()), + data_path: Some("./prometheus-data".into()), + ..Default::default() + }), + docker_network: Some("blueprint-observability".into()), docker_bind_ip: Some("0.0.0.0".into()), + ..blueprint_qos::default_qos_config() }; ``` -### Using the Builder Pattern +### Builder Pattern -The builder pattern provides a fluent API for configuration: +Use the builder when you want explicit wiring for heartbeats or custom datasources: ```rust let qos_service = QoSServiceBuilder::new() .with_heartbeat_config(HeartbeatConfig { - service_id: Some(service_id), - blueprint_id: Some(blueprint_id), - interval_seconds: 60, - jitter_seconds: 5, + service_id, + blueprint_id, + interval_secs: 60, + jitter_percent: 10, + max_missed_heartbeats: 3, + status_registry_address, }) .with_heartbeat_consumer(Arc::new(consumer)) + .with_http_rpc_endpoint(env.http_rpc_endpoint.to_string()) + .with_keystore_uri(env.keystore_uri.clone()) + .with_status_registry_address(status_registry_address) .with_metrics_config(MetricsConfig::default()) - .with_loki_config(LokiConfig::default()) .with_grafana_config(GrafanaConfig::default()) - .with_prometheus_server_config(PrometheusServerConfig { - host: "0.0.0.0".into(), - port: 9090, - ..Default::default() - }) + .with_loki_config(LokiConfig::default()) + .with_prometheus_server_config(PrometheusServerConfig::default()) .manage_servers(true) - .with_ws_rpc_endpoint(ws_endpoint) - .with_keystore_uri(keystore_uri) - .build()?; + .build() + .await?; ``` -## Recording Blueprint Metrics and Events +## Recording Metrics and Events -### Job Performance Tracking - -Tracking job execution and performance in your job handlers is essential for monitoring and optimization: +Track job execution and errors in your handlers: ```rust -pub async fn process_job( - Context(ctx): Context, - TangleArg(data): TangleArg, -) -> Result> { - let start_time = std::time::Instant::now(); - - // Process the job - let result = perform_processing(&data)?; - - // Record job execution metrics - if let Some(qos) = &ctx.qos_service { - qos.record_job_execution( - JOB_ID, - start_time.elapsed().as_secs_f64(), - ctx.service_id, - ctx.blueprint_id - ); - } - - Ok(TangleResult::Success(result)) +if let Some(qos) = &ctx.qos_service { + qos.record_job_execution( + JOB_ID, + start_time.elapsed().as_secs_f64(), + ctx.service_id, + ctx.blueprint_id, + ); } ``` -### Error Tracking - -Tracking job errors is crucial for monitoring and alerts: - ```rust -match perform_complex_operation() { - Ok(value) => Ok(TangleResult::Success(value)), - Err(e) => { - if let Some(qos) = &ctx.qos_service { - qos.record_job_error(JOB_ID, "complex_operation_failure"); - } - Err(e.into()) - } +if let Some(qos) = &ctx.qos_service { + qos.record_job_error(JOB_ID, "complex_operation_failure"); } ``` -## Automatic Dashboard Creation - -QoS can automatically create Grafana dashboards that display your Blueprint's metrics: +## Creating Grafana Dashboards ```rust -// Create a custom dashboard for your Blueprint -if let Some(mut qos) = qos_service { - if let Err(e) = qos.create_dashboard("My Blueprint") { - error!("Failed to create dashboard: {}", e); - } else { - info!("Created Grafana dashboard for My Blueprint"); - } -} +let mut qos_service = qos_service; +qos_service.create_dashboard("My Blueprint").await?; ``` -The dashboard includes: +The default dashboard template lives at `crates/qos/config/grafana_dashboard.json` in the SDK. -- System resource usage (CPU, memory, disk, network) -- Job execution metrics (frequency, duration, error rates) -- Log visualization panels (when Loki is configured) -- Service status and uptime information +## Accessing Metrics in Code -## Accessing QoS in Context - -Typically, you'll want to store the QoS service in your Blueprint context: +You can query the metrics provider directly (for custom metrics or status checks): ```rust -#[derive(Clone)] -pub struct MyContext { - #[config] - pub env: BlueprintEnvironment, - pub data_dir: PathBuf, - pub qos_service: Option>>, - pub service_id: u64, - pub blueprint_id: u64, -} - -impl MyContext { - pub async fn new(env: BlueprintEnvironment) -> Result { - // Initialize QoS service - let qos_service = initialize_qos(&env)?; - - Ok(Self { - data_dir: env.data_dir.clone().unwrap_or_else(default_data_dir), - qos_service: Some(Arc::new(qos_service)), - service_id: 42, - blueprint_id: 7, - env, - }) - } -} -``` - -You can then access the QoS service in your job handlers: - -```rust -pub async fn my_job( - Context(ctx): Context, - TangleArg(data): TangleArg, -) -> Result> { - // Access QoS metrics provider - if let Some(qos) = &ctx.qos_service { - if let Some(provider) = qos.provider() { - let cpu_usage = provider.get_cpu_usage()?; - info!("Current CPU usage: {}%", cpu_usage); - } +use blueprint_qos::metrics::types::MetricsProvider; + +if let Some(qos) = &ctx.qos_service { + if let Some(provider) = qos.provider() { + let system_metrics = provider.get_system_metrics().await; + let _cpu = system_metrics.cpu_usage; + provider + .add_custom_metric("custom.label".into(), "value".into()) + .await; } - - // Job implementation - Ok(TangleResult::Success(())) } ``` -## Server Management - -QoS can automatically manage Grafana, Prometheus, and Loki servers: - -```rust -// Configure server management -let qos_config = QoSConfig { - grafana_server: Some(GrafanaServerConfig { - port: 3000, - container_name: "blueprint-grafana".into(), - image: "grafana/grafana:latest".into(), - ..Default::default() - }), - loki_server: Some(LokiServerConfig { - port: 3100, - container_name: "blueprint-loki".into(), - image: "grafana/loki:latest".into(), - ..Default::default() - }), - prometheus_server: Some(PrometheusServerConfig { - port: 9090, - container_name: "blueprint-prometheus".into(), - image: "prom/prometheus:latest".into(), - host: "0.0.0.0".into(), - ..Default::default() - }), - docker_network: Some("blueprint-network".into()), - manage_servers: true, - ..Default::default() -}; -``` - -For proper operation with Docker containers, ensure: - -1. Your application binds metrics endpoints to `0.0.0.0` (not `127.0.0.1`) -2. Prometheus configuration uses `host.docker.internal` to access host metrics -3. Docker is installed and the user has the necessary permissions -4. A common Docker network is used for all containers - ## Best Practices ✅ DO: -- Initialize QoS early in your Blueprint's startup sequence -- Add QoS as a background service using `BlueprintRunner::background_service()` -- Record job execution metrics for all important jobs -- Use `#[derive(Clone)]` for your `HeartbeatConsumer` implementation -- Access QoS APIs through your Blueprint's context +- Initialize QoS early in your Blueprint startup sequence. +- Use `BlueprintRunner::qos_service(...)` to auto-wire RPC + keystore + status registry. +- Keep Prometheus reachable (bind to `0.0.0.0` if scraped externally). +- Replace default Grafana credentials when using managed servers. ❌ DON'T: -- Don't create separate QoS instances for different components -- Avoid using hardcoded admin credentials in production code -- Don't pass the QoS service directly between jobs; use the context pattern -- Don't forget to bind Prometheus metrics server to `0.0.0.0` for Docker accessibility -- Don't ignore QoS shutdown or creation errors; they may indicate more serious issues +- Don't enable heartbeats without setting `BLUEPRINT_KEYSTORE_URI`. +- Don't expose managed Grafana publicly without auth. +- Don't ignore QoS startup errors; they usually indicate misconfigured ports or credentials. ## QoS Components Reference -| Component | Primary Struct | Config | Purpose | -| ----------------- | ------------------ | ---------------------- | ------------------------------------------------- | -| Unified Service | `QoSService` | `QoSConfig` | Main entry point for QoS integration | -| Heartbeat | `HeartbeatService` | `HeartbeatConfig` | Sends periodic liveness signals to chain | -| Metrics | `MetricsService` | `MetricsConfig` | Collects system and application metrics | -| Logging | N/A | `LokiConfig` | Configures log aggregation to Loki | -| Dashboards | `GrafanaClient` | `GrafanaConfig` | Creates and manages Grafana dashboards | -| Server Management | `ServerManager` | Various server configs | Manages Docker containers for observability stack | +| Component | Primary Struct | Config | Purpose | +| ----------------- | ------------------ | ----------------- | ------------------------------------------ | +| Unified Service | `QoSService` | `QoSConfig` | Main entry point for QoS integration | +| Heartbeat | `HeartbeatService` | `HeartbeatConfig` | Liveness signals to the status registry | +| Metrics | `MetricsService` | `MetricsConfig` | System + job metrics and Prometheus export | +| Logging | N/A | `LokiConfig` | Log aggregation via Loki | +| Dashboards | `GrafanaClient` | `GrafanaConfig` | Dashboards and datasources | +| Server Management | `ServerManager` | Server configs | Manages Docker containers for the stack | diff --git a/pages/developers/blueprint-runner/background-services.mdx b/pages/developers/blueprint-runner/background-services.mdx index 9ba37bab..ac840d1a 100644 --- a/pages/developers/blueprint-runner/background-services.mdx +++ b/pages/developers/blueprint-runner/background-services.mdx @@ -6,7 +6,9 @@ import GithubFileReaderDisplay from '/components/GithubFileReaderDisplay'; # Background Services -Background services are optional components in the [Blueprint Runner](/developers/blueprint-runner/introduction) architecture that run continuously to support the operation of your Actively Validated Service (AVS). This document explains how background services work, how to configure them, and best practices for implementation. +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/runner + +Background services are optional components in the [Blueprint Runner](/developers/blueprint-runner/introduction) architecture that run continuously to support the operation of your blueprint. This document explains how background services work, how to configure them, and best practices for implementation. ## What are Background Services? @@ -19,9 +21,19 @@ Background services refer to any long-running processes that operate independent Unlike job handlers that execute in response to specific requests, background services operate autonomously to provide supporting functionality. +## Built-in Keepers + +The SDK provides built-in background keepers for common lifecycle automation: + +- **EpochKeeper**: Triggers at epoch boundaries for inflation distribution and reward calculations. +- **StreamKeeper**: Manages payment stream settlement and fee drip processing. +- **RoundKeeper**: Coordinates round-based protocols (threshold signatures, MPC ceremonies). + +These keepers handle the timing and coordination logic so your blueprint can focus on the actual work. + ## Common Use Cases -Background services could be used for various purposes in Blueprints, with the following being only a few examples: +Background services can be used for various purposes in Blueprints: ### Data Collection and Aggregation @@ -40,7 +52,7 @@ Background services are typically configured in the `main.rs` file of your Bluep The only requirement for a background service is that it implements the `BackgroundService` trait: @@ -48,7 +50,7 @@ This creates a workspace with two main packages: Before building the runner, define the jobs that your Blueprint will execute. Jobs are defined in the library package: ### Step 4: Configuring the Router -The [router](/developers/blueprint-runner/routers) directs job calls to the appropriate handlers. In the example below, we configure the router with a single route for our defined job and specify that it is on Tangle with the `TangleLayer`: +The [router](/developers/blueprint-runner/routers) directs job calls to the appropriate handlers. In the example below, we configure the router with routes for our jobs and specify that they are on Tangle EVM with the `TangleEvmLayer`: This configuration: 1. Creates a new router -2. Adds a route for the `square` job with ID `XSQUARE_JOB_ID` -3. Applies the `TangleLayer` to add metadata to job results -4. Adds a filter layer to only process jobs that match the service ID +2. Adds routes for the `square`, `verified_square`, and `consensus_square` jobs +3. Applies the `TangleEvmLayer` to add Tangle EVM metadata to job results For more details on routers, see the [Routers documentation](/developers/blueprint-runner/routers). @@ -92,48 +93,48 @@ For more details on routers, see the [Routers documentation](/developers/bluepri Some blueprints may require one or more services to run in the background. Any number of background services can be set to run for a Blueprint Runner. With a background service defined, it just needs to be added to the Blueprint Runner: ### Step 6: Configuring a Producer with the Blueprint Runner's Builder -[Producers](/developers/blueprint-runner/producers) listen for events and prepare them for processing. The following example uses a `TangleProducer` as seen in [Step 3](#step-3-creating-a-producer-and-a-consumer): +[Producers](/developers/blueprint-runner/producers) listen for events and prepare them for processing. The following example uses a `TangleEvmProducer` as seen in [Step 3](#step-3-creating-a-producer-and-a-consumer): -This producer listens for finalized blocks on the Tangle network and converts them into job calls that can be processed by the router. +This producer polls `JobSubmitted` events on the Tangle Jobs contract and converts them into job calls that can be processed by the router. For more details on producers, see the [Producers documentation](/developers/blueprint-runner/producers). ### Step 7: Configuring a Consumer with the Blueprint Runner's Builder -[Consumers](/developers/blueprint-runner/consumers) handle the results of processed jobs. In the example above, we set up a Tangle consumer as seen in [Step 3](#step-3-creating-a-producer-and-a-consumer): +[Consumers](/developers/blueprint-runner/consumers) handle the results of processed jobs. In the example above, we set up a Tangle EVM consumer as seen in [Step 3](#step-3-creating-a-producer-and-a-consumer): -This consumer processes job results and can send transactions to the Tangle network based on those results. +This consumer processes job results and submits them back to the Tangle EVM contract. For more details on consumers, see the [Consumers documentation](/developers/blueprint-runner/consumers). @@ -142,9 +143,9 @@ For more details on consumers, see the [Consumers documentation](/developers/blu Implement customized shutdown logic to handle cleanup and resource release: @@ -176,4 +177,4 @@ After building your Blueprint Runner, you might want to explore: ## Conclusion -Building a Blueprint Runner involves setting up various components that work together to execute your Tangle Blueprint. By following this guide and adhering to best practices, you can create a robust and efficient Blueprint Runner for your Actively Validated Service on the Tangle Network. +Building a Blueprint Runner involves setting up various components that work together to execute your Tangle Blueprint. By following this guide and adhering to best practices, you can create a robust and efficient Blueprint Runner for your blueprint service on the Tangle Network. diff --git a/pages/developers/blueprint-runner/consumers.mdx b/pages/developers/blueprint-runner/consumers.mdx index 3d8d025d..75d94734 100644 --- a/pages/developers/blueprint-runner/consumers.mdx +++ b/pages/developers/blueprint-runner/consumers.mdx @@ -6,6 +6,8 @@ import GithubFileReaderDisplay from '/components/GithubFileReaderDisplay'; # Consumers +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/runner + Consumers are a vital component of the [Blueprint Runner](/developers/blueprint-runner/introduction) architecture that handle the results of processed jobs. This document explains how consumers work, how to configure them, and best practices for implementation. ## What are Consumers? @@ -33,17 +35,20 @@ Consumers are typically configured in the `main.rs` file of your Blueprint binar ### Basic Consumer Setup -First, you define the consumer you want to use. For example, a `TangleConsumer` that listens for finalized blocks on Tangle. After which, you pass the consumer to the Blueprint Runner's builder. +First, you define the consumer you want to use. For example, a `TangleEvmConsumer` that submits job results back to the Tangle EVM contracts. After which, you pass the consumer to the Blueprint Runner's builder. ```rust let env = BlueprintEnvironment::load()?; -let sr25519_signer = env.keystore().first_local::()?; -let sr25519_pair = env.keystore().get_secret::(&sr25519_signer)?; -let sr25519_signer = TanglePairSigner::new(sr25519_pair.0); +let tangle_client = env.tangle_evm_client().await?; +let service_id = env + .protocol_settings + .tangle_evm()? + .service_id + .ok_or_else(|| blueprint_sdk::Error::Other("No service ID configured".to_string()))?; -let tangle_client = env.tangle_client().await?; -let tangle_producer = TangleProducer::finalized_blocks(tangle_client.rpc_client.clone()).await?; -let tangle_consumer = TangleConsumer::new(tangle_client.rpc_client.clone(), sr25519_signer); +let tangle_producer = TangleEvmProducer::new(tangle_client.clone(), service_id); +let tangle_consumer = TangleEvmConsumer::new(tangle_client.clone()); +let tangle_config = TangleEvmConfig::default(); BlueprintRunner::builder(tangle_config, env) .router(router) // Assuming your router is already defined @@ -57,25 +62,25 @@ BlueprintRunner::builder(tangle_config, env) Blueprint Runners can utilize various types of consumers depending on the requirements: -### Tangle Consumer +### Tangle EVM Consumer -A Tangle Consumer is a consumer that handles transactions on the Tangle, submitting job results for a Blueprint. +A Tangle EVM Consumer is a consumer that submits job results to the Tangle Jobs contract. -### EVM Consumer +### Generic EVM Consumer -An EVM Consumer is a consumer that handles transactions on the EVM, submitting job results for a Blueprint. +A generic EVM Consumer handles job results that encode EVM transaction requests. diff --git a/pages/developers/blueprint-runner/introduction.mdx b/pages/developers/blueprint-runner/introduction.mdx index 9ae8c9c7..01c7de48 100644 --- a/pages/developers/blueprint-runner/introduction.mdx +++ b/pages/developers/blueprint-runner/introduction.mdx @@ -6,7 +6,9 @@ import CardGrid from "../../../components/CardGrid.tsx" # Blueprint Runners -Blueprint Runners are the core orchestration components that execute Tangle Blueprints. They manage the lifecycle of jobs, coordinate between different components, and ensure the reliable execution of your Actively Validated Service (AVS). +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/runner + +Blueprint Runners are the core orchestration components that execute Tangle Blueprints. They manage the lifecycle of jobs, coordinate between different components, and ensure reliable execution of your blueprint service. ## Getting Started @@ -79,7 +81,7 @@ The architecture of a Blueprint Runner consists of the following components: ## Blueprint Runner Lifecycle -The Blueprint Runner follows a well-defined lifecycle to ensure reliable execution of your AVS: +The Blueprint Runner follows a well-defined lifecycle to ensure reliable execution of your blueprint: 1. **Initialization**: The Blueprint Runner loads configuration, sets up components, and establishes connections. 2. **Event Processing**: [Producers](/developers/blueprint-runner/producers) listen for events and convert them into job inputs. diff --git a/pages/developers/blueprint-runner/jobs.mdx b/pages/developers/blueprint-runner/jobs.mdx index eb3df71f..b2184bb6 100644 --- a/pages/developers/blueprint-runner/jobs.mdx +++ b/pages/developers/blueprint-runner/jobs.mdx @@ -6,6 +6,8 @@ import GithubFileReaderDisplay from '/components/GithubFileReaderDisplay'; # Jobs +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/runner + Jobs are the core building blocks of a [Blueprint Runner](/developers/blueprint-runner/introduction). They define the computational tasks that your Blueprint will execute in response to events. This document explains how jobs work, how to define them, and how to integrate them with other components of your Blueprint. ## What are Jobs? @@ -32,9 +34,9 @@ Jobs in a Blueprint are defined in the library package of your project. A job de Here's an example of a simple job definition from the Incredible Squaring example: @@ -42,8 +44,8 @@ In this example: - `XSQUARE_JOB_ID` is a constant that uniquely identifies the job - `square` is the function that implements the job's logic -- The job takes a single input parameter `x` of type `i32` -- The job returns a result of type `i32`, which is the square of the input +- The job takes a single ABI-encoded input parameter `x` extracted by `TangleEvmArg` +- The job returns a `TangleEvmResult`, which the runner ABI-encodes for submission ## Job Context @@ -57,7 +59,7 @@ Jobs can access context information provided by the Blueprint Runner. This conte Context is typically passed to jobs through the router configuration: ```rust -let router = Router::builder() +let router = Router::new() .route(MY_JOB_ID, my_job) .with_context(my_context); ``` @@ -67,9 +69,9 @@ let router = Router::builder() Jobs need to be registered to a route with the [router](/developers/blueprint-runner/routers) to be accessible. This is done when defining a Blueprint Runner: diff --git a/pages/developers/blueprint-runner/producers.mdx b/pages/developers/blueprint-runner/producers.mdx index 8bdf9bc9..34e5c8f4 100644 --- a/pages/developers/blueprint-runner/producers.mdx +++ b/pages/developers/blueprint-runner/producers.mdx @@ -6,6 +6,8 @@ import GithubFileReaderDisplay from '/components/GithubFileReaderDisplay'; # Producers +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/runner + Producers are a key component of the [Blueprint Runner](/developers/blueprint-runner/introduction) architecture that capture events and prepare them for processing. This document explains how producers work, how to configure them, and how to implement them. ## What are Producers? @@ -23,12 +25,18 @@ Producers are typically configured in the `main.rs` file of your Blueprint binar ### Basic Producer Setup -First, you define the producer you want to use. For example, a `TangleProducer` that listens for finalized blocks on Tangle. After which, you pass the producer to the Blueprint Runner's builder. +First, you define the producer you want to use. For example, a `TangleEvmProducer` that listens for `JobSubmitted` events on Tangle EVM. After which, you pass the producer to the Blueprint Runner's builder. ```rust let env = BlueprintEnvironment::load()?; -let tangle_client = env.tangle_client().await?; -let tangle_producer = TangleProducer::finalized_blocks(tangle_client.rpc_client.clone()).await?; +let tangle_client = env.tangle_evm_client().await?; +let service_id = env + .protocol_settings + .tangle_evm()? + .service_id + .ok_or_else(|| blueprint_sdk::Error::Other("No service ID configured".to_string()))?; +let tangle_producer = TangleEvmProducer::new(tangle_client.clone(), service_id); +let tangle_config = TangleEvmConfig::default(); BlueprintRunner::builder(tangle_config, env) .router(router) // Assuming your router is already defined @@ -47,19 +55,19 @@ These producers listen for events from a blockchain, such as smart contract even There are currently two built-in blockchain producers: -#### Tangle Producer +#### Tangle EVM Producer #### EVM Polling Producer @@ -54,9 +55,9 @@ Below is a real example from our Incredible Squaring Blueprint Example: Layers are used to filter job calls based on certain criteria. There are two places layers can be used: 1. A specific Route: - - A job can be given a layer. This can be seen in the above code example, where the job is in the `TangleLayer`. This allows the consumer, which operates in that layer, to see the results from that job. This simplifies your Blueprint when you have many jobs, routes, and consumers. + - A job can be given a layer. This can be seen in the above code example, where the job is wrapped in the `TangleEvmLayer`. This adds Tangle EVM metadata to job results for the consumer. 2. A whole Router: - - A layer can be used to filter job calls based on some criteria. This can also be seen in the above code example, where the router is given a filter for the service ID. This allows the router to only process jobs with a matching service ID. + - A layer can be used to filter job calls based on criteria you control (for example, metadata from Tangle EVM extractors). ## Integration with Other Components @@ -75,4 +76,4 @@ Now that you understand routers, check out: ## Conclusion -Routers are a fundamental component of Blueprint Runners, enabling efficient job execution and management. By properly configuring and utilizing routers, you can build robust and performant Blueprints and AVSs. +Routers are a fundamental component of Blueprint Runners, enabling efficient job execution and management. By properly configuring and utilizing routers, you can build robust and performant Blueprints. diff --git a/pages/developers/blueprint-sdk.mdx b/pages/developers/blueprint-sdk.mdx index 90e93364..3510a61b 100644 --- a/pages/developers/blueprint-sdk.mdx +++ b/pages/developers/blueprint-sdk.mdx @@ -1,117 +1,103 @@ -# Gadget SDK - -The Blueprint SDK is a powerful toolkit designed to streamline the development of Tangle Blueprints. It provides a comprehensive set of features and tools that make building multi-party services faster, easier, and more secure. - -## Getting Started - -import TableOfContentCards from "../../components/TableOfContentCards.tsx" - - + +## Navigation + + + title: "CLI", + href: "/developers/cli/quickstart", + subItems: [ + { title: "Install", href: "https://github.com/tangle-network/blueprint/tree/v2/cli", description: "Install cargo-tangle" }, + { title: "Keys", href: "/developers/cli/keys", description: "Create and manage keystores" }, + ], + }, + ]} +/> diff --git a/pages/developers/blueprints/introduction.mdx b/pages/developers/blueprints/introduction.mdx index 35513558..f2b2e49e 100644 --- a/pages/developers/blueprints/introduction.mdx +++ b/pages/developers/blueprints/introduction.mdx @@ -7,63 +7,82 @@ import CardGrid from "../../../components/CardGrid.tsx" -**Blueprints** are Infrastructure as Code templates and **Instances** are their instantiations on the Tangle Network. Developers build Blueprints using our [Blueprint SDK](https://github.com/tangle-network/blueprint) and deploy them to the Tangle Network. Users can discover Blueprints and create Instances of them, which are run by Tangle Operators. The Tangle Network incentivizes operators to run these instances by rewarding them with TNT tokens and fees from execution. +**Blueprints** are on-chain service definitions, and **Services** are their on-chain instantiations. Developers build +Blueprints using the [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2) and deploy them to Tangle v2. +Customers discover Blueprints, create Services, and pay for execution. Operators run Services with the Blueprint Manager, +earning fees and TNT incentives defined by the core protocol contracts in `tnt-core`. -A Blueprint Instance is a computational service. Blueprints are reusable templates so that useful services can be leveraged by many customers. A key benefit of instancing Blueprints is that each instance can have different operators and different restaked assets securing them. +A Blueprint Service is a reusable computational service. Each Service instance can select a different operator set, +security configuration, and payment token while sharing the same underlying blueprint logic. A Tangle Blueprint is defined by: -- An native program binary, a docker image, or a virtual machine binary. -- A set of smart contracts that provide programmability over the service's output verification and handling of malicious failures +- A **manager contract** (Blueprint Service Manager) that defines registration, request, and job verification logic. +- A **blueprint definition** (metadata URI, job schemas, artifact sources) used by the Blueprint Manager. +- **Executable artifacts** (native binary, container image, or WASM) for operators to run. -Tangle Blueprints specify their target environment for program execution. The Blueprint's binary can run natively on the operator's machine, in a virtual machine, or in a containerized environment. +Blueprints specify their runtime preferences so operators can execute services natively, in containers, or inside a VM. ## Detailed Interactions ### Developers -Developers interact with Tangle Blueprints by developing and deploying them to the blockchain. The smart contracts are deployed to the Tangle EVM, while the metadata is stored in Tangle's runtime. - -The Blueprint includes smart contracts responsible with all aspects of customizing a Blueprint's behavior and features. Developers have the flexibility to extend this contract, create child contracts, and anything that adheres to the function signatures expected by the runtime when executing logic. The Blueprint smart contract handles: +Developers deploy the blueprint manager contract and register a blueprint definition that points at the artifacts and job +schemas operators should run. The on-chain manager contract handles: 1. **Registration**: Specifies how Operators register for the Blueprint, allowing the developer to add additional fees, access control, KYC, and any other EVM-deployable functionality to the registration process. -2. **Request**: Defines how customer of Blueprints initiates Service Instances, providing the same customizability as the EVM for the initialization process of Service Instances. +2. **Request**: Defines how customers create Services and fund them (pay-once or subscription). 3. **On Job Hooks**: Allows developers to specify custom logic to be executed when a job is created. 4. **On Job Result Hooks**: Allows developers to specify custom logic to be executed when a job is completed, such as verifying a job's output. -5. **Custom slashing conditions**: Allows developers to specify custom slashing conditions for Operators, such as failing to provide a service or providing a malicious service. These would be callable by anyone or privileged callers but is entirely up to the developer to design. +5. **Custom slashing conditions**: Allows developers to define slashing logic for misbehavior, scoped to their Service. -Blueprints empower developers to create, customize, and monetize technical service configurations on the Tangle Network, defining features, behavior, resource requirements, and the Gadget binary for services such as Threshold MPC, zero-knowledge provers, AI infrastructure, and more. Developers can programmatically incentivize operators to restake on their services and update them to improve and benefit from their long-term success. Tangle Blueprints are designed for reuse by various users and projects. +Blueprints let developers define monetizable services (MPC, ZK proving, AI infrastructure, oracles) with on-chain +verification and off-chain execution handled by operators running the Blueprint Manager. ### Customers -Tangle Network provides a user-friendly platform for customers to discover, deploy, and manage Blueprint Instances. Customers can access tailored technical services with varying configurations and features, specify their requirements (e.g., threshold of participants, registration criteria), and select a subset of Operators to provide the service. The stake recursion process increases the economic security of reliable services. +Customers discover blueprints, configure services, and select operators based on requirements (security, resources, price, +or reputation). They fund services in ETH or ERC20 tokens and can choose a one-time payment or subscription model. -Customers interact with Tangle by instancing Blueprints, which create Blueprint Instances. To initialize an Instance, a customer: +To initialize a Service, a customer typically: -1. Selects a participation selection strategy that satisfies the Blueprint's Request smart contract constraints. -2. Provides metadata required by the Instance. -3. Pays the fee required for the Instance configuration. +1. Chooses an operator set and any service parameters required by the blueprint. +2. Provides metadata required by the Service instance. +3. Funds the service fee required by the blueprint manager contract. ### Operators -Operators are incentivized to operate the Blueprint Instances by receiving rewards from the Tangle Network and fees from users who pay for the service. The rewards and incentives are distributed proportionally to the amount of stake that the Operator has restaked on the Instance. +Operators register for blueprints, accept service assignments, and run the service artifacts through the Blueprint Manager. +They earn an operator share of service payments (claimable from the `Tangle` contract) plus TNT incentives. + +### Payments and Incentives + +Service payments are split by the core protocol contracts in `tnt-core`: + +- **Developer + Treasury** shares transfer immediately. +- **Operator** share accrues as pending rewards in the `Tangle` contract. +- **Staker** share flows to `ServiceFeeDistributor` for later claims. + +Inflation incentives (TNT) are distributed separately via `InflationPool` and `RewardVaults`. For details, see +`https://github.com/tangle-network/tnt-core/blob/v2/PAYMENT_FLOW.md` and +`https://github.com/tangle-network/tnt-core/blob/v2/docs/rewards-system.md`. ## Composability and Ecosystem diff --git a/pages/developers/blueprints/manager.mdx b/pages/developers/blueprints/manager.mdx index 9d9f3875..f434b556 100644 --- a/pages/developers/blueprints/manager.mdx +++ b/pages/developers/blueprints/manager.mdx @@ -2,13 +2,19 @@ import ExpandableImage from '../../../components/ExpandableImage'; ## Blueprint Manager +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/manager + -On Tangle, Blueprints have an offchain and an onchain lifecycle. The offchain component is managed by what we call the **Blueprint Manager**. The **Blueprint Manager** can be considered **_Tangle's Operator Node_**. This onchain and offchain logic functions as follows: +On Tangle, Blueprints have both on-chain and off-chain lifecycles. The off-chain component is managed by the +**Blueprint Manager**, which is the operator runtime for Tangle v2 services. This on-chain and off-chain logic +functions as follows: 1. Operators must register for Blueprints onchain. This indicates an operators willingness to accept requests for Blueprint Instances of that type. -2. Operators upon registering for Blueprints onchain, download the Blueprint's binary and metadata from the Tangle Network. This is handled by the Blueprint Manager, which listens for new registrations. -3. Operators upon accepting Blueprint Instance requests, execute the Blueprint's binary. This is where the target environment of the Blueprint is important. The Blueprint Manager is responsible for executing the Blueprint's binary in the correct environment be it natively or in Docker or an alternative VM. +2. Operators download the Blueprint's artifacts and metadata after registering. The Blueprint Manager watches chain + events and keeps the operator runtime in sync. +3. Operators execute Blueprint services when they are assigned. The Blueprint Manager runs artifacts natively, in a + container, or inside a VM based on the blueprint definition. ### Blueprint and Service Instance Lifecycle @@ -16,10 +22,12 @@ On Tangle, Blueprints have an offchain and an onchain lifecycle. The offchain co Blueprints interact with the Tangle Network in several key ways: -1. Blueprints are deployed to Tangle, with their metadata and smart contracts stored and deployed on-chain. -2. Blueprints are instantiated, triggering the creation of an Instance, which represents a single AVS. The Instance runs for some period of time. -3. Blueprints are destroyed once they reach their time-to-live (TTL) or run out of funds to incentivize operators to run their service. +1. Blueprints are deployed to Tangle, with their metadata and manager contracts stored on-chain. +2. Customers instantiate a Service, which represents a single configured service instance. +3. Services end once they reach their time-to-live (TTL) or run out of funds to pay operators. Blueprints provide a useful abstraction, allowing developers to create reusable service infrastructures as if they were smart contracts. This enables developers to monetize their work and align long-term incentives with the success of their creations, benefiting proportionally to their Blueprint's usage. -The Blueprint object is the core restaking object in Tangle, implemented primarily in the `pallet-services` module of the Tangle codebase. Assets are viewed as being restaked on Blueprints, with Operators running Instances of Blueprints and users restaking/staking their assets with those Operators. +Blueprints and Services are managed by the Tangle v2 contracts in `tnt-core`, including the core `Tangle` contract and +the `BlueprintServiceManager` stack. Staking and operator registration live in these contracts, and the Blueprint +Manager watches their events to decide what to run. diff --git a/pages/developers/blueprints/pricing-engine.mdx b/pages/developers/blueprints/pricing-engine.mdx index 9819899f..8f427f82 100644 --- a/pages/developers/blueprints/pricing-engine.mdx +++ b/pages/developers/blueprints/pricing-engine.mdx @@ -4,199 +4,77 @@ title: Blueprint Pricing # Blueprint Pricing -This guide explains how pricing works for blueprint execution and how to integrate pricing functionality into your applications that use Tangle Network blueprints. +Tangle v2 supports fixed-price and quote-based pricing. The pricing engine is the operator-side RFQ server that produces +signed quotes for instant service creation. -## Prerequisites +## When to Use the Pricing Engine -- Understanding of Blueprint concepts and execution model -- Familiarity with Tangle Network architecture -- Basic knowledge of cryptographic signatures and verification +- Use it when your blueprint expects RFQ quotes and you want customers to create services immediately with signed + operator quotes. +- If your blueprint uses standard pay-once or subscription pricing, you can skip the pricing engine. -## Pricing Workflow +## End-to-End Flow -The complete workflow for getting quotes and executing blueprints consists of: +1. **Operator runs `pricing-engine-server`** with a pricing config and the Tangle RPC + contract addresses. +2. **Customer requests quotes** over gRPC (`GetPrice`) with TTL, proof-of-work, and security requirements. +3. **Operator returns a signed quote** (ECDSA signature over ABI-encoded `QuoteDetails`). +4. **Customer creates a service** by calling + [`ITangleServices.createServiceFromQuotes`](/developers/api/reference/ITangleServices) with the selected quotes. + Use `extendServiceFromQuotes` to add TTL later. -1. **Finding Operators**: Retrieve all operators registered for the blueprint to be executed. +## Quote Format -2. **Requesting Quotes**: Request price quotes from operators: +The gRPC schema matches `ITangle.SignedQuote`. The `QuoteDetails` payload looks like: - - Generate a proof-of-work for the request - - Create a properly formatted price request - - Submit the request via gRPC (at the address specified by each operator on-chain) - -3. **Processing Quotes**: When you receive quotes from operators: - - - Verify their signatures - - Validate the proof-of-work - - Compare prices - -4. **Selecting Operators**: Choose which operators to use based on their pricing quotes, typically selecting the most cost-effective options. - -5. **Submitting Request**: Submit an on-chain request that includes your selected quotes via the `request_with_signed_price_quotes` services extrinsic. - -6. **Blueprint Execution**: The selected operators will execute the blueprint according to the agreed terms. - -## Pricing API - -To request price quotes, your application will use the gRPC API provided by operators. You just need to send a `GetPrice` gRPC request to the operator's gRPC endpoint. Here's the service definition in protobuf: - -```protobuf -// The pricing service definition -service Pricing { - // Retrieves a signed price quote for a given blueprint - rpc GetPrice (GetPriceRequest) returns (GetPriceResponse); -} -``` - -### Message Types - -Here are the key message types you'll work with: - -```protobuf -// The pricing service definition -service PricingEngine { - // Retrieves a signed price quote for a given blueprint - rpc GetPrice (GetPriceRequest) returns (GetPriceResponse); -} - -// Asset type enumeration -enum AssetType { - CUSTOM = 0; - ERC20 = 1; -} - -// Asset type definition -message Asset { - oneof asset_type { - // Custom asset with a numeric identifier - uint64 custom = 1; - // ERC20 token with an H160 address - bytes erc20 = 2; - } -} - -// Security requirements for an asset -message AssetSecurityRequirements { - // The asset type - Asset asset = 1; - // Minimum exposure percentage (0-100) - uint32 minimum_exposure_percent = 2; - // Maximum exposure percentage (0-100) - uint32 maximum_exposure_percent = 3; -} - -// Security commitment for an asset -message AssetSecurityCommitment { - // The asset type - Asset asset = 1; - // Committed exposure percentage (0-100) - uint32 exposure_percent = 2; -} - -// Resource requirement for a specific resource type -message ResourceRequirement { - // Resource kind (CPU, Memory, GPU, etc.) - string kind = 1; - // Quantity required - uint64 count = 2; -} - -// Pricing for a specific resource type -message ResourcePricing { - // Resource kind (CPU, Memory, GPU, etc.) - string kind = 1; - // Quantity of the resource - uint64 count = 2; - // Price per unit in USD with decimal precision - double price_per_unit_rate = 3; -} - -// Request message for GetPrice RPC -message GetPriceRequest { - // The blueprint ID - uint64 blueprint_id = 1; - // Time-to-live for service in blocks - uint64 ttl_blocks = 2; - // Proof of work to prevent DDOS - bytes proof_of_work = 3; - // Optional resource recommendations - repeated ResourceRequirement resource_requirements = 4; - // Security requirements for assets - AssetSecurityRequirements security_requirements = 5; -} - -// Response message for GetPrice RPC -message GetPriceResponse { - // The quote details - QuoteDetails quote_details = 1; - // Signature of the hash of the body - bytes signature = 2; - // Operator ID - bytes operator_id = 3; - // Proof of work response - bytes proof_of_work = 4; -} - -// The detailed quote information +```proto message QuoteDetails { - // The blueprint ID uint64 blueprint_id = 1; - // Time-to-live for service in blocks uint64 ttl_blocks = 2; - // Total cost in USD with decimal precision double total_cost_rate = 3; - // Timestamp when quote was generated uint64 timestamp = 4; - // Expiry timestamp uint64 expiry = 5; - // Resource pricing details repeated ResourcePricing resources = 6; - // Security commitments for assets - AssetSecurityCommitment security_commitments = 7; + repeated AssetSecurityCommitment security_commitments = 7; } ``` -## Implementation Steps +## Pricing Configuration -### 1. Generating Proof-of-Work +Pricing rates live in a TOML file. A default section applies to every blueprint, and you can override rates per +blueprint ID. The SDK ships a sample at +`crates/pricing-engine/config/default_pricing.toml`. -Before requesting a quote, you need to generate a valid proof-of-work for the request. The Operator pricing server uses the Equix Equihash Rust implementation for proof-of-work generation. +## Runbook (Operator) -### 2. Creating and sending a Price Request - -Next, create a price request with your requirements using the types defined in [Message Types](#message-types) and send it to each operator registered to the blueprint you want to run. Both the registered operators and their gRPC endpoints are available on-chain. - -### 3. Verifying Quote Signatures - -When you receive quotes, verify their authenticity by using the hash of the quote details and the operator's signature of the quote's hash that was included in the response. - -### 4. Selecting Operators - -After verifying the quotes, select the operators you want to use based on their pricing and security commitments. It is recommended to automatically select the lowest-priced operators. - -### 5. Requesting service with quotes - -After selecting the operators, submit your request to the blockchain using the `request_with_signed_price_quotes` services extrinsic with the selected operators and their quotes. - -## Understanding Pricing Calculations - -Operators calculate prices using this formula: +`pricing-engine-server` reads `operator.toml` and the pricing config, then watches the Tangle v2 contracts: +```bash +OPERATOR_HTTP_RPC=https://rpc.tangle.tools \ +OPERATOR_WS_RPC=wss://rpc.tangle.tools \ +OPERATOR_TANGLE_CONTRACT=0x... \ +OPERATOR_RESTAKING_CONTRACT=0x... \ +OPERATOR_STATUS_REGISTRY_CONTRACT=0x... \ +cargo run -p blueprint-pricing-engine --bin pricing-engine-server ``` -Price = Resource Cost × Duration Factor × Security Factor -``` -This means that the total cost of a blueprint execution is the sum of the prices from all selected operators. +Important environment variables: + +- `OPERATOR_CONFIG_PATH` (operator.toml) +- `PRICING_CONFIG_PATH` (pricing TOML) +- `OPERATOR_HTTP_RPC` / `OPERATOR_WS_RPC` +- `OPERATOR_TANGLE_CONTRACT` +- `OPERATOR_RESTAKING_CONTRACT` +- `OPERATOR_STATUS_REGISTRY_CONTRACT` -Where: +## Security Notes -- **Resource Cost** = resource_count × price_per_unit_rate -- **Duration Factor** = time_blocks × BLOCK_TIME -- **Security Factor** = Factor based on security requirements +- `GetPrice` uses proof-of-work to rate-limit RPC abuse. +- Quotes include TTL + expiry and are signed with the operator ECDSA key to prevent replay. -Understanding this helps you estimate costs and evaluate quotes effectively. +## Source of Truth -## Best Practices +- Pricing engine README: https://github.com/tangle-network/blueprint/blob/v2/crates/pricing-engine/README.md +- Pricing proto: https://github.com/tangle-network/blueprint/blob/v2/crates/pricing-engine/proto/pricing.proto 1. **Get Multiple Quotes**: Always request quotes from all registered operators to compare prices 2. **Verify All Signatures**: Always verify the signature of each quote before using it diff --git a/pages/developers/blueprints/use-cases.mdx b/pages/developers/blueprints/use-cases.mdx index 119cb471..c329e097 100644 --- a/pages/developers/blueprints/use-cases.mdx +++ b/pages/developers/blueprints/use-cases.mdx @@ -3,7 +3,7 @@ import GithubRepoCard, { GithubRepoList } from "../../../components/GithubRepoCa # Use Cases -Tangle Network enables developers to rapidly build and deploy secure multi-party services through our Blueprint system. Blueprints are reusable templates that can be instantly deployed as live services backed by Tangle's decentralized operator network. +Tangle Network enables developers to rapidly build and deploy secure multi-party services through our Blueprint system. Blueprints are reusable templates that can be instantly deployed as live services backed by Tangle's decentralized operator network, including AI agents, inference workflows, and verifiable computation pipelines. ## Bridges @@ -21,24 +21,6 @@ Tangle Network enables developers to rapidly build and deploy secure multi-party displayStyle="row" /> -{/* TODO: Add URL */} - - - -{/* TODO: Add URL */} - - - ## AI --eigenlayer [BLS|ECDSA] -``` - -#### Create Options - -- `-n, --name `: Name of your AVS project -- `--eigenlayer`: Specify either BLS or ECDSA variant - -### Deploy AVS Contracts - -```bash -cargo tangle blueprint deploy eigenlayer [OPTIONS] -``` - -#### Deploy Options - -- `--rpc-url `: HTTP RPC endpoint to connect to (required unless --devnet is used) -- `--contracts-path `: Path to your contracts directory (defaults to "./contracts") -- `--ordered-deployment`: Deploy contracts in an interactive ordered manner (if omitted, deploys as they are found) -- `-w, --network `: Network to deploy to (local, testnet, mainnet) [default: local] -- `--devnet`: Start a local devnet using Anvil (only valid with --network local) -- `-k, --keystore-path `: Path to your keystore (defaults to ./keystore) - -### Run AVS Service - -```bash -cargo tangle blueprint run [OPTIONS] -``` - -#### Run Options - -- `-p, --protocol eigenlayer`: Specify Eigenlayer protocol -- `-u, --rpc-url `: HTTP RPC endpoint URL (required) -- `-k, --keystore-path `: Path to the keystore (defaults to ./keystore) -- `-b, --binary-path `: Path to the AVS binary (optional, will build if not provided) -- `-w, --network `: Network to connect to (local, testnet, mainnet) -- `-d, --data-dir `: Data directory path (defaults to ./data) -- `-n, --bootnodes `: Optional bootnodes to connect to -- `-f, --settings-file `: Path to the protocol settings env file (defaults to ./settings.env) - -### Required Environment Variables for Eigenlayer - -The following environment variables must be set in your settings.env file: - -- `REGISTRY_COORDINATOR_ADDRESS`: Address of the Registry Coordinator contract -- `OPERATOR_STATE_RETRIEVER_ADDRESS`: Address of the Operator State Retriever contract -- `DELEGATION_MANAGER_ADDRESS`: Address of the Delegation Manager contract -- `SERVICE_MANAGER_ADDRESS`: Address of the Service Manager contract -- `STAKE_REGISTRY_ADDRESS`: Address of the Stake Registry contract -- `STRATEGY_MANAGER_ADDRESS`: Address of the Strategy Manager contract -- `AVS_DIRECTORY_ADDRESS`: Address of the AVS Directory contract -- `REWARDS_COORDINATOR_ADDRESS`: Address of the Rewards Coordinator contract - -## Example Workflow - -1. **Create a New AVS Project** - - ```bash - cargo tangle blueprint create -n my-avs --eigenlayer bls - ``` - -2. **Build Your AVS** - - ```bash - cargo build --release - ``` - -3. **Deploy Contracts** - - ```bash - # Deploy to local devnet - cargo tangle blueprint deploy eigenlayer \ - --devnet \ - --ordered-deployment - - # Or deploy to testnet - cargo tangle blueprint deploy eigenlayer \ - --network testnet \ - --rpc-url \ - --ordered-deployment - ``` - -4. **Run Your Service** - ```bash - cargo tangle blueprint run \ - -p eigenlayer \ - -u - ``` - -## Troubleshooting - -Common issues and solutions: - -1. **Deployment Failures** - - - Verify RPC endpoint is accessible - - Ensure correct network is specified (local, testnet, mainnet) - - Check contract constructor arguments - - Verify sufficient funds for deployment - -2. **Service Issues** - - Check settings.env file contains all required contract addresses - - Verify contract addresses are correct for the chosen network - - Ensure binary is built with correct features - - Check network connectivity and RPC endpoint accessibility diff --git a/pages/developers/cli/installation.mdx b/pages/developers/cli/installation.mdx index 0db69cdd..04cf3364 100644 --- a/pages/developers/cli/installation.mdx +++ b/pages/developers/cli/installation.mdx @@ -4,6 +4,8 @@ title: Installation # Tangle CLI Installation +CLI source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/cli + ## Prerequisites 1. Install Rust diff --git a/pages/developers/cli/keys.mdx b/pages/developers/cli/keys.mdx index 0c82dada..2043b4d3 100644 --- a/pages/developers/cli/keys.mdx +++ b/pages/developers/cli/keys.mdx @@ -4,9 +4,11 @@ title: Tangle CLI Key Commands # Tangle CLI Key Commands -This guide covers the key management commands available in the Tangle CLI tool. These commands allow you to generate, import, export, and list cryptographic keys used by Tangle and Eigenlayer protocols. +CLI source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/cli -## Key Commands Overview +This guide covers the key management commands available in the Tangle CLI tool. These commands allow you to generate, import, export, and list cryptographic keys used by Tangle protocols. + +## Key Commands The Tangle CLI provides a set of commands for managing cryptographic keys: @@ -64,10 +66,8 @@ cargo tangle key import --keystore-path [OPTIONS] - `-k, --keystore-path ` (**required**): The path to the keystore - `-t, --key-type ` (optional): The type of key to import (sr25519, ed25519, ecdsa, bls381, bls377, bn254) - `-x, --secret ` (optional): The secret key to import (hex format without 0x prefix). Only required if key type is also specified -- `-p, --protocol ` (optional): The protocol you are generating keys for (Eigenlayer or Tangle) which only applies to ECDSA keys [default: tangle] - -When importing a key, you must specify the key type and secret key. If you don't specify a key type and secret, the CLI will prompt you interactively. Specifying the secret but not -the key type will also prompt you to select a key type. + When importing a key, you must specify the key type and secret key. If you don't specify a key type and secret, the CLI will prompt you interactively. Specifying the secret but not + the key type will also prompt you to select a key type. ### Examples @@ -75,9 +75,6 @@ the key type will also prompt you to select a key type. # Import an ECDSA key for Tangle cargo tangle key import --keystore-path ./keystore --key-type ecdsa --secret -# Import an ECDSA key for Eigenlayer -cargo tangle key import --keystore-path ./keystore --key-type ecdsa --protocol eigenlayer --secret - # Import an sr25519 key cargo tangle key import --keystore-path ./keystore --key-type sr25519 --secret @@ -152,7 +149,7 @@ cargo tangle key generate-mnemonic --word-count 24 The Tangle CLI supports the following key types: -- `sr25519`: Schnorrkel/Ristretto x25519 (used by Substrate) +- `sr25519`: Schnorrkel/Ristretto x25519 - `ed25519`: Edwards-curve Digital Signature Algorithm - `ecdsa`: Elliptic Curve Digital Signature Algorithm - `bls381`: Boneh-Lynn-Shacham signatures on BLS12-381 curve diff --git a/pages/developers/cli/quickstart.mdx b/pages/developers/cli/quickstart.mdx index 3d299c51..c7b4646b 100644 --- a/pages/developers/cli/quickstart.mdx +++ b/pages/developers/cli/quickstart.mdx @@ -4,6 +4,8 @@ title: Quickstart # Tangle CLI Quickstart +CLI source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/cli + ## Pre-requisites - [git](https://git-scm.org) @@ -11,9 +13,8 @@ title: Quickstart ## Creating a New Tangle Blueprint -1. For an in-depth guide to commands, see the [Tangle Blueprint Commands](./tangle.mdx). If you are - looking to get started building an Eigenlayer AVS, see the [eigenlayer section](./eigenlayer.mdx) instead. - To get started right away with creating a Tangle Blueprint, open your terminal and run: +1. For an in-depth guide to commands, see the [Tangle Blueprint Commands](./tangle.mdx). To get started right away + with creating a Tangle Blueprint, open your terminal and run: ```shell cargo tangle blueprint create --name diff --git a/pages/developers/cli/tangle.mdx b/pages/developers/cli/tangle.mdx index 4655fc39..d045b2bd 100644 --- a/pages/developers/cli/tangle.mdx +++ b/pages/developers/cli/tangle.mdx @@ -1,283 +1,123 @@ --- -title: Tangle Blueprints +title: Tangle CLI Blueprint Commands --- # Tangle CLI Blueprint Commands -This guide covers the commands available for creating, testing, and interacting with Tangle Blueprints. -It also walks you through a full [demo](#step-by-step-demo) for creating, deploying, and running a Tangle Blueprint locally. +CLI source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/cli -## Creating a Blueprint +This page covers the `cargo-tangle` CLI surface for creating and operating blueprints on Tangle’s EVM protocol. -We offer a template for getting started with building a Tangle Blueprint. The `create` command allows you to -generate a new blueprint from our template or even a custom one: +Note: the CLI still uses the legacy flag name `--restaking-contract`. It refers to the staking contract (`MultiAssetDelegation`). -```shell -cargo tangle blueprint create --name -``` - -Options: - -- `--name` (**required**): The name of the blueprint -- `--repo` (**optional and conflicts with `--path`**): The repository to pull the template from (defaults to our blueprint template) -- `--branch` (**optional and conflicts with `--tag`**): The branch of the template to pull from if `--repo` is specified -- `--tag` (**optional and conflicts with `--branch`**): The tag of the template to pull from if `--repo` is specified -- `--path` (**optional and conflicts with `--repo`**): The path to copy a template from - -## Deploying a Blueprint - -Deploy your Blueprint to the Tangle Network: - -```shell -cargo tangle blueprint deploy tangle --ws-rpc-url --http-rpc-url --keystore-path -``` - -Options: - -- `--ws-rpc-url`: WebSocket RPC URL (default: `wss://rpc.tangle.tools`, automatically uses devnet URL - if `--devnet` is specified) -- `--http-rpc-url`: HTTP RPC URL (default: `https://rpc.tangle.tools`, automatically uses devnet URL - if `--devnet` is specified) -- `--package`: The blueprint package to deploy (used for workspaces with multiple packages) -- `--keystore-path`: Path to the keystore (default: `./keystore`) -- `--devnet`: Run a local devnet in background for testing, with WebSocket RPC URL - defaulting to `ws://127.0.0.1:9944` and HTTP RPC URL defaulting to `http://127.0.0.1:9944` - -## Listing Blueprints - -List Blueprints deployed on Tangle: - -```shell -cargo tangle blueprint list-blueprints --ws-rpc-url -``` - -Alias: `lb` - -Options: - -- `--ws-rpc-url`: WebSocket RPC URL (default: `ws://127.0.0.1:9944`) - -## Registering for a Blueprint - -Register as an operator for a Blueprint: - -```shell -cargo tangle blueprint register --ws-rpc-url --blueprint-id --keystore-uri -``` - -Alias: `reg` - -Options: - -- `--ws-rpc-url`: WebSocket RPC URL (default: `ws://127.0.0.1:9944`) -- `--blueprint-id` (**required**): The ID of the Blueprint to register for -- `--keystore-uri`: Path to the keystore (default: `./keystore`) - -## Requesting a Service - -Request a service from a Blueprint: - -```shell -cargo tangle blueprint request-service --ws-rpc-url --blueprint-id --min-exposure-percent --max-exposure-percent --target-operators --value --keystore-uri -``` - -Alias: `req` - -Options: - -- `--ws-rpc-url`: WebSocket RPC URL (default: `ws://127.0.0.1:9944`) -- `--blueprint-id` (**required**): The ID of the Blueprint to request service from -- `--min-exposure-percent`: Minimum exposure percentage (default: `50`) -- `--max-exposure-percent`: Maximum exposure percentage (default: `80`) -- `--target-operators` (**required**): List of target operator account IDs -- `--value` (**required**): Payment amount for the service -- `--keystore-uri`: Path to the keystore (default: `./keystore`) - -## Listing Service Requests - -List pending service requests: - -```shell -cargo tangle blueprint list-requests --ws-rpc-url -``` - -Alias: `ls` - -Options: - -- `--ws-rpc-url`: WebSocket RPC URL (default: `ws://127.0.0.1:9944`) +For the full command surface, use: -## Accepting a Service Request - -Accept a service request as an operator: - -```shell -cargo tangle blueprint accept-request --ws-rpc-url --min-exposure-percent --max-exposure-percent --restaking-percent --keystore-uri --request-id -``` - -Alias: `accept` - -Options: - -- `--ws-rpc-url`: WebSocket RPC URL (default: `ws://127.0.0.1:9944`) -- `--min-exposure-percent`: Minimum exposure percentage (default: `50`) -- `--max-exposure-percent`: Maximum exposure percentage (default: `80`) -- `--restaking-percent`: Restaking percentage (default: `50`) -- `--keystore-uri`: Path to the keystore (default: `./keystore`) -- `--request-id` (**required**): The ID of the request to accept - -## Rejecting a Service Request - -Reject a service request as an operator: - -```shell -cargo tangle blueprint reject-request --ws-rpc-url --keystore-uri --request-id +```bash +cargo tangle --help +cargo tangle blueprint --help ``` -Alias: `reject` - -Options: - -- `--ws-rpc-url`: WebSocket RPC URL (default: `ws://127.0.0.1:9944`) -- `--keystore-uri`: Path to the keystore (default: `./keystore`) -- `--request-id` (**required**): The ID of the request to reject - -## Running a Blueprint - -Run a Blueprint with the specified configuration: - -```shell -cargo tangle blueprint run --protocol tangle --rpc-url --keystore-path --settings-file -``` - -Options: - -- `--protocol`: Protocol to run (must be `tangle`) -- `--rpc-url`: HTTP RPC URL (default: `http://127.0.0.1:9944`) -- `--keystore-path`: Path to the keystore (default: `./keystore`) -- `--binary-path`: Path to the AVS binary (optional) -- `--network`: Type of network you are connecting to (local, testnet, mainnet) (default: `local`) -- `--data-dir`: Data directory path (default: `./data`) -- `--bootnodes`: Optional bootnodes to connect to -- `--settings-file`: Path to the protocol settings env file, if not specified then - you will be prompted for the required information (default: `./settings.env`) - -## Submitting a Job - -Submit a job to a running Blueprint: +## Create a Blueprint -```shell -cargo tangle blueprint submit --ws-rpc-url --service-id --blueprint-id --keystore-uri --job --params-file --watcher +```bash +cargo tangle blueprint create --name ``` -Options: - -- `--ws-rpc-url`: WebSocket RPC URL (default: `ws://127.0.0.1:9944`) -- `--service-id`: The service ID to submit the job to -- `--blueprint-id` (**required**): The Blueprint ID to submit the job to -- `--keystore-uri` (**required**): Path to the keystore -- `--job` (**required**): The job ID to submit -- `--params-file` (**optional**): Path to a JSON file containing job parameters -- `--watcher` (**optional**): Whether to wait for the job to complete. If specified, the command will block until the job is completed, returning the job result. - -## Step-by-Step Demo - -Below is a complete demo for creating, deploying, and running a Tangle Blueprint. Before we get started, there are a few things to note. -To walk through the flow of the demo, we act as two different accounts: - -1. The Blueprint owner -2. The Operator +## Generate Registration Inputs (Optional) -The Blueprint owner is the account that will deploy the Blueprint, request service, and submit jobs. -The Operator is the account that will respond to service requests and run Blueprints. +If you want to generate registration inputs without sending a transaction (for review or CI): -The Blueprint owner uses the `./deploy-keystore` keystore, while the Operator uses the `./test-keystore` keystore. Both of these -are generated when deploying with the `--devnet` flag. - -### Prerequisites - -1. Install Cargo Tangle CLI, see [installation](./installation.mdx). - -We will create a new Blueprint using the `create` command. For this demo, we won't make any changes to our blueprint. -The Blueprint generated from the template is ready to be tested as-is. - -```shell - cargo tangle blueprint create --name my-blueprint +```bash +cargo tangle blueprint preregister \ + --http-rpc-url https://... \ + --ws-rpc-url wss://... \ + --tangle-contract 0x... \ + --restaking-contract 0x... \ + --status-registry-contract 0x... \ + --settings-file ./settings.env ``` -With the blueprint created, we will navigate into the blueprint's directory. +## Register as an Operator for a Blueprint -```shell - cd my-blueprint -``` +Register your operator against the on-chain protocol contracts: -2. Deploy the Blueprint: - -Now we will deploy the newly created Blueprint using the `deploy` command. We use the `--devnet` flag to start a local devnet in -background for easy testing. We will leave this terminal running to keep the testnet running. - -```shell - cargo tangle blueprint deploy tangle --devnet +```bash +cargo tangle blueprint register \ + --http-rpc-url https://... \ + --ws-rpc-url wss://... \ + --keystore-path ./keystore \ + --tangle-contract 0x... \ + --restaking-contract 0x... \ + --status-registry-contract 0x... \ + --blueprint-id \ + --rpc-endpoint "https://operator.example.com" ``` -3. List deployed Blueprints: +## Service Lifecycle -In a new terminal in the blueprint's directory, we can list the deployed Blueprints using the `list-blueprints` command. This gives us some important information, namely the Blueprint ID. +The `cargo tangle blueprint service` namespace mirrors key service lifecycle actions: -```shell - cargo tangle blueprint list-blueprints -``` +- `service request` (request/approve flow) +- `service approve` / `service reject` +- `service join` / `service leave` (dynamic services) +- `service list` / `service requests` -4. Register to the deployed blueprint: +Examples and flags are subject to change; prefer `--help` output for your installed CLI build: -Before we can interact with the Blueprint, we need to register to it. - -```shell - cargo tangle blueprint register --blueprint-id 0 --keystore-uri ./test-keystore +```bash +cargo tangle blueprint service --help +cargo tangle blueprint service request --help ``` -5. Request service with another account: +Example (operator approval flow): -With the account that owns the Blueprint, we request service from our Operator. The target -operator is the account that will run the Blueprint and is given in the output of the deployment. +```bash +cargo tangle blueprint service requests \ + --http-rpc-url "$RPC_URL" \ + --tangle-contract "$TANGLE_CONTRACT" -```shell -cargo tangle blueprint request-service --blueprint-id 0 --target-operators 5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY --value 0 --keystore-uri ./deploy-keystore +cargo tangle blueprint service approve \ + --http-rpc-url "$RPC_URL" \ + --tangle-contract "$TANGLE_CONTRACT" \ + --restaking-contract "$RESTAKING_CONTRACT" \ + --status-registry-contract "$STATUS_REGISTRY_CONTRACT" \ + --request-id 42 ``` -6. List all service requests: +## Run the Operator Runtime -Now that we requested service, we must find that request and accept it. So we list the service -requests to get some information, specifically the request ID. +Run the blueprint manager/runner against a deployment: -```shell -cargo tangle blueprint list-requests +```bash +cargo tangle blueprint run \ + --protocol tangle-evm \ + --http-rpc-url https://... \ + --ws-rpc-url wss://... \ + --keystore-path ./keystore \ + --settings-file ./settings.env ``` -7. Accept the service request: - -With the account that will run the Blueprint, we accept the service request. +For local validation only, you can spawn a single service without on-chain transactions: -```shell -cargo tangle blueprint accept-request --request-id 0 --keystore-uri ./test-keystore +```bash +cargo tangle blueprint service spawn \ + --http-rpc-url https://... \ + --ws-rpc-url wss://... \ + --tangle-contract 0x... \ + --restaking-contract 0x... \ + --status-registry-contract 0x... \ + --blueprint-id \ + --service-id \ + --dry-run ``` -8. Run the Blueprint: +## Jobs (Submit + Watch) -We are now ready to start running the Blueprint, so that we are able to complete any submitted jobs. -Once we start running the Blueprint, the process will continue running until we manually stop it. -We will leave this terminal running so we can submit a job to be completed. - -```shell -cargo tangle blueprint run --protocol tangle --keystore-path ./test-keystore +```bash +cargo tangle blueprint jobs --help ``` -9. Submit a job for the running Blueprint to process: - -In a third terminal in the blueprint's directory, we run the following command to submit a job and wait for the result. - -```shell -cargo tangle blueprint submit --job 0 --blueprint-id 0 --service-id 0 --watcher --keystore-uri ./deploy-keystore -``` +## Next Reading -You should then see the result of the job in the terminal, meaning that demo was completed successfully! +- [Build a Tangle Blueprint](/developers/blueprints/introduction) +- [Endpoints and Integration](/developers/endpoints) diff --git a/pages/developers/contribute.mdx b/pages/developers/contribute.mdx index c24eebb1..db048244 100644 --- a/pages/developers/contribute.mdx +++ b/pages/developers/contribute.mdx @@ -2,9 +2,11 @@ import { RepoArea } from "../../components/RepoArea.tsx" # Contribute -All of Tangle's technology is open source and we invite researchers, developers and enthusiasts to participate in creating a world of more natural privacy. +All of Tangle's technology is open source and we invite researchers, developers, operators, and builders to improve the +protocol, SDK, and tooling for autonomous work. -Our mission **is** privacy and security. We encourage all users to report any security vulnerabilities or bugs that they discover while using our products. +Security and reliability are core to the mission. We encourage all users to report any security vulnerabilities or bugs +that they discover while using our products. ## Submitting a Bug Report diff --git a/pages/developers/deployment/introduction.mdx b/pages/developers/deployment/introduction.mdx index 893a9c92..dc17361d 100644 --- a/pages/developers/deployment/introduction.mdx +++ b/pages/developers/deployment/introduction.mdx @@ -27,4 +27,4 @@ cargo tangle blueprint deploy tangle --devnet See [deploy command reference](/developers/cli/tangle#deploying-a-blueprint) for all options. -[cargo-tangle]: /developers/cli/installation +[cargo-tangle]: https://github.com/tangle-network/blueprint/tree/v2/cli diff --git a/pages/developers/deployment/sources/_meta.ts b/pages/developers/deployment/sources/_meta.ts index e5d670e6..602ab794 100644 --- a/pages/developers/deployment/sources/_meta.ts +++ b/pages/developers/deployment/sources/_meta.ts @@ -4,7 +4,6 @@ const meta: Meta = { introduction: "Introduction", native: "Native", container: "Container", - tee: "Trusted Execution Environment (WIP)", wasm: "WASM (WIP)", testing: "Testing", }; diff --git a/pages/developers/deployment/sources/tee.mdx b/pages/developers/deployment/sources/tee.mdx deleted file mode 100644 index e7192949..00000000 --- a/pages/developers/deployment/sources/tee.mdx +++ /dev/null @@ -1,31 +0,0 @@ -# Trusted Execution Environment Sources - -The `TEE` (Trusted Execution Environment) source is used for blueprints that are built as container images, and intended to -be deployed to a [dstack] TEE. - -## Requirements - -The requirements for running TEE blueprints are available [here](/operators/manager/requirements#tee-sources) - -## Format - -The `TEE` source has the following format: - -```rust -// TBD... -``` - -Where: - -- TODO - -And they can be specified in the manifest of your binary crate like so: - -```toml -[package.metadata.blueprint] -sources = [ - { type = "TEE", ... }, -] -``` - -[dstack]: https://github.com/Dstack-TEE/dstack diff --git a/pages/developers/eigenlayer-avs/_meta.ts b/pages/developers/eigenlayer-avs/_meta.ts deleted file mode 100644 index e390d9d6..00000000 --- a/pages/developers/eigenlayer-avs/_meta.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - overview: "Overview", - "incredible-squaring-avs": "Incredible Squaring AVS", - "bls-template": "Eigenlayer BLS Template", - "ecdsa-template": "Eigenlayer ECDSA Template", - "bls-example": "Deploying an Eigenlayer AVS", -}; - -export default meta; diff --git a/pages/developers/eigenlayer-avs/bls-example.mdx b/pages/developers/eigenlayer-avs/bls-example.mdx deleted file mode 100644 index e7f2e988..00000000 --- a/pages/developers/eigenlayer-avs/bls-example.mdx +++ /dev/null @@ -1,70 +0,0 @@ -# Deploying and running an Eigenlayer AVS from the BLS Template - -The following is a step-by-step guide for deploying and running an AVS using the [`cargo-tangle` CLI](/developers/cli/installation). - -## Prerequisites - -- [`cargo-tangle` CLI](/developers/cli/installation) -- [docker desktop](https://docs.docker.com/desktop/) -- [foundry](https://book.getfoundry.sh/) (stable) -- [rust](https://www.rust-lang.org/tools/install) - -## Generating the AVS from the Template - -Run the following command, answering each prompt for project information. For anything you aren't certain about, the default selection is a safe choice. This command creates a BLS AVS called `my-avs`. - -```bash -cargo tangle blueprint create --name my-avs --eigenlayer bls -``` - -Now that you have generated your AVS project, move to the newly created directory. - -```bash -cd my-avs -``` - -## Deploying the AVS to a local testnet - -The AVS is fully ready to be deployed, perfect for testing locally. The following command will start a local testnet in the background and deploy the AVS's necessary contracts. - -```bash -cargo tangle blueprint deploy eigenlayer \ - --devnet \ - --ordered-deployment -``` - -In this case, you want to deploy the contracts in an ordered manner. Specifically, you need to deploy the TangleTaskManager before the TangleServiceManager, since the Service Manager takes the Task Manager's address as a constructor argument. - -When you deploy the TangleTaskManager, you are also given the option to initialize it. You want to make sure you do this, supplying the initialization arguments when prompted. - -You will be prompted for each contract's constructor arguments (if it has any). You can find the addresses of the contracts you need in your project's settings.env file. Below is a complete list of addresses you will need. Some are zero addresses because they aren't actually used. - -### Default Addresses for demo deployment - -| Address | Value | -| -------------------- | ------------------------------------------ | -| Registry Coordinator | Obtained from Deployment output | -| Pauser Registry | Obtained from Deployment output | -| Initial Owner | 0x70997970C51812dc3A010C7d01b50e0d17dc79C8 | -| Aggregator | 0xa0Ee7A142d267C1f36714E4a8F75612F20a79720 | -| Generator | 0x15d34AAf54267DB7D7c367839AAf71A00a2C6A65 | -| AVS Directory | 0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 | -| Rewards Coordinator | 0xb7f8bc63bbcad18155201308c8f3540b07f84f5e | -| Stake Registry | 0x4c5859f0f772848b2d91f1d83e2fe57935348029 | -| Tangle Task Manager | Obtained from Deployment output | - -Once all contracts have been deployed, you will notice that it continues running to keep the testnet alive. - -## Running the AVS - -In a new terminal, run the following command in the project's directory (make sure to replace the task manager address and the RPC URL with the values obtained from your deployment output): - -```bash -TASK_MANAGER_ADDRESS= cargo tangle blueprint run \ - -p eigenlayer \ - -u \ - --keystore-path ./test-keystore -``` - -Running this once, will register the example operator (it uses Anvil's account 0 by default) and then exit the process. Once the registration is complete, you simply need to run the same command again. -Upon running the command for a second time, you should see "Successfully ran job function!" printed to the terminal, signifying that the job function was successfully executed! diff --git a/pages/developers/eigenlayer-avs/bls-template.mdx b/pages/developers/eigenlayer-avs/bls-template.mdx deleted file mode 100644 index 3303ac6f..00000000 --- a/pages/developers/eigenlayer-avs/bls-template.mdx +++ /dev/null @@ -1,177 +0,0 @@ ---- -title: Using the EigenLayer BLS Template ---- - -# Building AVS with the EigenLayer BLS Template - -## Introduction - -This will guide you through the process of utilizing our BLS Template for starting your own AVS with BLS-oriented Smart -Contracts. The [Incredible Squaring AVS](/developers/eigenlayer-avs/incredible-squaring-avs) is an example of an AVS -that uses BLS contracts. - -## Prerequisites - -- [`cargo-tangle` CLI](/developers/cli/installation) -- [docker desktop](https://docs.docker.com/desktop/) -- [foundry](https://book.getfoundry.sh/) (stable) -- [rust](https://www.rust-lang.org/tools/install) - -## Complete Workflow - -### 1. Installation - -```shell -cargo install cargo-tangle --git https://github.com/tangle-network/blueprint --force -``` - -### 2. Creating Your Project - -To create a new BLS AVS, use the following command: - -```shell -cargo tangle blueprint create --name --eigenlayer bls -``` - -Replace `` with your desired AVS name. You'll be prompted with configuration questions - if unsure, the default selections are recommended. - -### 3. Building Your AVS - -```shell -cargo build --release -``` - -### 4. Deploying Your Contracts - -For local development: - -```shell -# Deploy to local devnet -cargo tangle blueprint deploy eigenlayer \ - --devnet \ - --ordered-deployment -``` - -If you're using the template as is - -For testnet deployment: - -```shell -# Deploy to testnet -cargo tangle blueprint deploy eigenlayer \ - --network testnet \ - --rpc-url \ - --ordered-deployment -``` - -### Addresses for deployment - -If you're using the template as is, you can find the addresses of the contracts you need in your project's settings.env file. Others are below. - -| Address | Value | -| -------------------- | ------------------------------------------ | -| Registry Coordinator | Obtained from Deployment output | -| Pauser Registry | Obtained from Deployment output | -| Initial Owner | 0x70997970C51812dc3A010C7d01b50e0d17dc79C8 | -| Aggregator | 0xa0Ee7A142d267C1f36714E4a8F75612F20a79720 | -| Generator | 0x15d34AAf54267DB7D7c367839AAf71A00a2C6A65 | -| AVS Directory | 0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 | -| Rewards Coordinator | 0xb7f8bc63bbcad18155201308c8f3540b07f84f5e | -| Stake Registry | 0x4c5859f0f772848b2d91f1d83e2fe57935348029 | -| Tangle Task Manager | Obtained from Deployment output | - -Once all contracts have been deployed, you will notice that it continues running to keep the testnet alive. - -#### Deployment Options - -- `--rpc-url `: HTTP RPC endpoint (required unless using --devnet) -- `--contracts-path `: Path to contracts directory (defaults to "./contracts") -- `--ordered-deployment`: Interactive ordered deployment -- `-w, --network `: Target network (local, testnet, mainnet) -- `--devnet`: Start local Anvil devnet -- `-k, --keystore-path `: Keystore path (defaults to ./keystore) - -### 5. Running Your Service - -```shell -cargo tangle blueprint run \ - -p eigenlayer \ - -u \ - (OPTIONAL) --keystore-path ./test-keystore -``` - -#### Run Options - -- `-p, --protocol eigenlayer`: Specify Eigenlayer protocol -- `-u, --rpc-url `: HTTP RPC endpoint URL (required) -- `-k, --keystore-path `: Keystore path (defaults to ./keystore) -- `-b, --binary-path `: AVS binary path (optional) -- `-w, --network `: Network to connect to -- `-d, --data-dir `: Data directory path (defaults to ./data) -- `-n, --bootnodes `: Optional bootnodes -- `-f, --settings-file `: Protocol settings file path (defaults to ./settings.env) - -### 6. Required Environment Variables - -Your settings.env file must include: - -- `REGISTRY_COORDINATOR_ADDRESS`: Registry Coordinator contract address -- `OPERATOR_STATE_RETRIEVER_ADDRESS`: Operator State Retriever contract address -- `DELEGATION_MANAGER_ADDRESS`: Delegation Manager contract address -- `SERVICE_MANAGER_ADDRESS`: Service Manager contract address -- `STAKE_REGISTRY_ADDRESS`: Stake Registry contract address -- `STRATEGY_MANAGER_ADDRESS`: Strategy Manager contract address -- `AVS_DIRECTORY_ADDRESS`: AVS Directory contract address -- `REWARDS_COORDINATOR_ADDRESS`: Rewards Coordinator contract address - -### 7. Testing - -To verify your setup: - -```shell -cargo test -``` - -## Troubleshooting - -### Common Deployment Issues - -- Verify RPC endpoint accessibility -- Confirm network specification (local, testnet, mainnet) -- Check contract constructor arguments -- Ensure sufficient deployment funds - -### Service Issues - -- Verify settings.env contains all required addresses -- Confirm contract addresses match chosen network -- Check binary build features -- Verify network connectivity and RPC endpoint - -## Customizing Your AVS - -The template provides a foundation that you can build upon: - -- The contracts are named TangleServiceManager.sol, TangleTaskManager.sol, and ITangleTaskManager.sol by default. Use find-and-replace to rename as needed. -- For information on runners, see [blueprint runner documentation](/developers/tangle-avs#blueprint-runner-architecture). -- For an example job implementation, consult the [jobs documentation](/developers/tangle-avs#libsrclibrs). - -## Development Keys - -The aggregator key is - `47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a` -The task generator key is - `2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6` - -```rust -pub const ANVIL_PRIVATE_KEYS: [&str; 10] = [ - "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", - "59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d", - "5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a", - "7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6", - "47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a", - "8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba", - "92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e", - "4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356", - "dbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97", - "2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6", -]; -``` diff --git a/pages/developers/eigenlayer-avs/ecdsa-template.mdx b/pages/developers/eigenlayer-avs/ecdsa-template.mdx deleted file mode 100644 index 570c2a46..00000000 --- a/pages/developers/eigenlayer-avs/ecdsa-template.mdx +++ /dev/null @@ -1,172 +0,0 @@ ---- -title: Using the EigenLayer ECDSA Template ---- - -# Building AVS with the EigenLayer ECDSA Template - -## Introduction - -This guide will walk you through the process of utilizing our ECDSA Template to initiate your own AVS with -ECDSA-oriented Smart Contracts. For reference, the [Tangle AVS](https://github.com/tangle-network/avs) demonstrates an -implementation of an AVS using ECDSA contracts. - -## Prerequisites - -- [`cargo-tangle` CLI](/developers/cli/installation) -- [docker desktop](https://docs.docker.com/desktop/) -- [foundry](https://book.getfoundry.sh/) (stable) -- [rust](https://www.rust-lang.org/tools/install) - -## Complete Workflow - -### 1. Installation - -```shell -cargo install cargo-tangle --git https://github.com/tangle-network/blueprint --force -``` - -### 2. Creating Your Project - -To create a new ECDSA AVS, use the following command: - -```shell -cargo tangle blueprint create --name --eigenlayer ecdsa -``` - -Replace `` with your desired AVS name. You'll be prompted with configuration questions - if unsure, the default selections are recommended. - -### 3. Building Your AVS - -```shell -cargo build --release -``` - -### 4. Deploying Your Contracts - -For local development: - -```shell -# Deploy to local devnet -cargo tangle blueprint deploy eigenlayer \ - --devnet \ - --ordered-deployment -``` - -For testnet deployment: - -```shell -# Deploy to testnet -cargo tangle blueprint deploy eigenlayer \ - --network testnet \ - --rpc-url \ - --ordered-deployment -``` - -### Addresses for deployment - -If you're using the template as is, you can find the addresses of the contracts you need in your project's settings.env file. Others are below. - -| Address | Value | -| -------------------- | ------------------------------------------ | -| Registry Coordinator | Obtained from Deployment output | -| Pauser Registry | Obtained from Deployment output | -| Initial Owner | 0x70997970C51812dc3A010C7d01b50e0d17dc79C8 | -| Aggregator | 0xa0Ee7A142d267C1f36714E4a8F75612F20a79720 | -| Generator | 0x15d34AAf54267DB7D7c367839AAf71A00a2C6A65 | -| AVS Directory | 0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 | -| Rewards Coordinator | 0xb7f8bc63bbcad18155201308c8f3540b07f84f5e | -| Stake Registry | Obtained from Deployment output | -| Tangle Task Manager | Obtained from Deployment output | - -Once all contracts have been deployed, you will notice that it continues running to keep the testnet alive. - -#### Deployment Options - -- `--rpc-url `: HTTP RPC endpoint (required unless using --devnet) -- `--contracts-path `: Path to contracts directory (defaults to "./contracts") -- `--ordered-deployment`: Interactive ordered deployment -- `-w, --network `: Target network (local, testnet, mainnet) -- `--devnet`: Start local Anvil devnet -- `-k, --keystore-path `: Keystore path (defaults to ./keystore) - -### 5. Running Your Service - -```shell -cargo tangle blueprint run \ - -p eigenlayer \ - -u \ - (OPTIONAL) --keystore-path ./test-keystore -``` - -#### Run Options - -- `-p, --protocol eigenlayer`: Specify Eigenlayer protocol -- `-u, --rpc-url `: HTTP RPC endpoint URL (required) -- `-k, --keystore-path `: Keystore path (defaults to ./keystore) -- `-b, --binary-path `: AVS binary path (optional) -- `-w, --network `: Network to connect to -- `-d, --data-dir `: Data directory path (defaults to ./data) -- `-n, --bootnodes `: Optional bootnodes -- `-f, --settings-file `: Protocol settings file path (defaults to ./settings.env) - -### 6. Required Environment Variables - -Your settings.env file must include: - -- `REGISTRY_COORDINATOR_ADDRESS`: Registry Coordinator contract address -- `OPERATOR_STATE_RETRIEVER_ADDRESS`: Operator State Retriever contract address -- `DELEGATION_MANAGER_ADDRESS`: Delegation Manager contract address -- `SERVICE_MANAGER_ADDRESS`: Service Manager contract address -- `STAKE_REGISTRY_ADDRESS`: Stake Registry contract address -- `STRATEGY_MANAGER_ADDRESS`: Strategy Manager contract address -- `AVS_DIRECTORY_ADDRESS`: AVS Directory contract address -- `REWARDS_COORDINATOR_ADDRESS`: Rewards Coordinator contract address - -### 7. Testing - -To verify your setup: - -```shell -cargo test -``` - -## Troubleshooting - -### Common Deployment Issues - -- Verify RPC endpoint accessibility -- Confirm network specification (local, testnet, mainnet) -- Check contract constructor arguments -- Ensure sufficient deployment funds - -### Service Issues - -- Verify settings.env contains all required addresses -- Confirm contract addresses match chosen network -- Check binary build features -- Verify network connectivity and RPC endpoint - -## Customizing Your AVS - -The template provides a foundation that you can build upon: - -- The default contract is named TangleServiceManager.sol (with related test files TestTangleServiceManager.sol and TangleServiceManager.t.sol). Use find-and-replace to rename as needed. -- For an example of a blueprint runner, see [blueprint runner documentation](/developers/tangle-avs#blueprint-runner-architecture). -- For a job implementation example, consult the [job documentation](/developers/tangle-avs#libsrclibrs). - -## Development Keys - -```rust -pub const ANVIL_PRIVATE_KEYS: [&str; 10] = [ - "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", - "59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d", - "5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a", - "7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6", - "47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a", - "8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba", - "92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e", - "4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356", - "dbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97", - "2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6", -]; -``` diff --git a/pages/developers/eigenlayer-avs/incredible-squaring-avs.mdx b/pages/developers/eigenlayer-avs/incredible-squaring-avs.mdx deleted file mode 100644 index 064544c9..00000000 --- a/pages/developers/eigenlayer-avs/incredible-squaring-avs.mdx +++ /dev/null @@ -1,153 +0,0 @@ ---- -title: EigenLayer Incredible Squaring AVS ---- - -import GithubFileReaderDisplay from "/components/GithubFileReaderDisplay"; - -# Incredible Squaring AVS Example - -## Introduction - -This guide will walk you through building an EigenLayer AVS (Actively Validated Service) using Tangle Blueprints, using examples from the -[Incredible Squaring blueprint implementation](https://github.com/tangle-network/blueprint/tree/main/examples/incredible-squaring-eigenlayer). - -The Incredible Squaring AVS is a simple example that demonstrates how to build an AVS that squares numbers. While the computation is trivial, the example showcases the key components of an EigenLayer AVS built with Tangle Blueprints. - -## Blueprint Structure - -The Incredible Squaring AVS blueprint follows the basic library and binary structure: - -1. **Library**: Contains job definitions and core logic -2. **Binary**: Contains the Blueprint Runner implementation - -## Job Definitions - -Jobs are the core computational units in your Blueprint. For the Incredible Squaring AVS, we define jobs to handle various tasks: - - - -This job initializes a new task in the Incredible Squaring AVS. It takes parameters from an EVM event and processes them. - - - -This job computes the square of a number in the Incredible Squaring AVS and sends the result as a signed response to an Aggregator. - -## Working with EVM Smart Contracts - -To interact with EVM smart contracts, the blueprint uses the `alloy` crate - - - -## Blueprint Runner Architecture - -The Blueprint Runner is the core component that orchestrates the execution of your Blueprint. For the Incredible Squaring AVS, it consists of several key components: - -### 1. Setting Up the Provider - -First, create an HTTP provider to connect to the Ethereum network with a wallet enabled for transacting: - - - -### 2. Creating Contexts - -Next, create the contexts that will be used by your jobs: - - - -### 3. Setting Up Producers - -Producers listen for events and prepare them for processing. In the Incredible Squaring AVS, we set up producers to listen for EVM events: - - - -### 4. Configuring the Blueprint Runner - -Finally, set up the Blueprint Runner with the router, producers, consumers, and background services: - - - -The Blueprint Runner: - -- Uses a router to direct job calls to the appropriate handlers -- Sets up a producer that listen for events and prepare them for processing -- Our job automatically handles signed task responses, so we don't use a consumer -- Configures Aggregator as background services - -## Testing the Incredible Squaring AVS - -### 1. Prerequisites - -Before you begin, ensure you have the following installed: - -- Anvil -- Docker (DockerDesktop) - -### 2. Installation - -Clone this repository: - -```shell -git clone https://github.com/tangle-network/blueprint.git -cd blueprint -``` - -Install Anvil: - -```shell -curl -L https://foundry.paradigm.xyz | bash -foundryup -``` - -### 3. Running the AVS on a Testnet - -We have a test for running this AVS Blueprint on a local Anvil Testnet. You can run the test with the following: - -```shell -cargo test --package incredible-squaring-blueprint-eigenlayer test_eigenlayer_incredible_squaring_blueprint -``` - -## Best Practices and Considerations - -1. **Error Handling**: Implement robust error handling in your job functions to manage potential failures gracefully. -2. **Asynchronous Operations**: Use `async/await` for operations that may take time, such as network requests or complex computations. -3. **State Management**: If your AVS needs to maintain state between job executions, consider implementing a state management system. -4. **Testing**: Implement unit tests for your job logic and integration tests for the complete AVS. -5. **Logging**: Use appropriate logging to track the job execution process and aid in debugging. -6. **Gas Considerations**: Be aware of the gas costs associated with your on-chain interactions, especially when responding to events with transactions. -7. **Scalability**: Design your AVS to scale with the number of tasks you expect to process. -8. **Security**: Ensure your AVS follows security best practices, especially when handling cryptographic operations. diff --git a/pages/developers/eigenlayer-avs/overview.mdx b/pages/developers/eigenlayer-avs/overview.mdx deleted file mode 100644 index 644d9320..00000000 --- a/pages/developers/eigenlayer-avs/overview.mdx +++ /dev/null @@ -1,25 +0,0 @@ -import CardGrid from "../../../components/CardGrid.tsx" - -# Building an EigenLayer AVS - -## Getting Started - - diff --git a/pages/developers/p2p-networking/_meta.ts b/pages/developers/p2p-networking/_meta.ts index cd8279e0..51202450 100644 --- a/pages/developers/p2p-networking/_meta.ts +++ b/pages/developers/p2p-networking/_meta.ts @@ -1,7 +1,7 @@ import { Meta } from "nextra"; const meta: Meta = { - overview: "Overview", + overview: "P2P Fundamentals", usage: "Usage", testing: "Testing", extensions: "Extensions", diff --git a/pages/developers/p2p-networking/extensions/round-based.mdx b/pages/developers/p2p-networking/extensions/round-based.mdx index e69de29b..b68d0485 100644 --- a/pages/developers/p2p-networking/extensions/round-based.mdx +++ b/pages/developers/p2p-networking/extensions/round-based.mdx @@ -0,0 +1,6 @@ +# Round-Based Networking Extension + +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/networking/extensions/round-based + +This extension provides the `RoundBasedNetworkAdapter`, which bridges a `NetworkServiceHandle` with the `round-based` +protocol framework. See the [P2P usage guide](/developers/p2p-networking/usage) for a minimal integration example. diff --git a/pages/developers/p2p-networking/overview.mdx b/pages/developers/p2p-networking/overview.mdx index 47212fb0..15d3be23 100644 --- a/pages/developers/p2p-networking/overview.mdx +++ b/pages/developers/p2p-networking/overview.mdx @@ -1,13 +1,15 @@ --- -title: P2P Networking Overview +title: P2P Networking Fundamentals description: An overview of the P2P networking utilities provided by the blueprint SDK. --- import CardGrid from "../../../components/CardGrid.tsx" -# P2P Networking Overview +# P2P Networking Fundamentals -The [Blueprint SDK](https://github.com/tangle-network/blueprint) provides P2P networking utilities that allow developers to securely orchestrate +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/networking + +The [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2) provides P2P networking utilities that allow developers to securely orchestrate communications among multiple service operators. ## Examples diff --git a/pages/developers/p2p-networking/testing.mdx b/pages/developers/p2p-networking/testing.mdx index b219384d..e662322b 100644 --- a/pages/developers/p2p-networking/testing.mdx +++ b/pages/developers/p2p-networking/testing.mdx @@ -1,169 +1,48 @@ -# Testing Multi-node Blueprints +# Testing Multi-node P2P Blueprints -This guide explains how to test blueprints that require multiple nodes for distributed computation, such as threshold cryptography protocols or consensus protocols. We'll walk through setting up a test environment and running multi-node tests using our SDK's testing utilities. +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/networking -## Overview +When you build multi-operator protocols, the networking test utilities let you spin up multiple libp2p nodes and verify +handshakes and message flow without running a full chain. -When developing multi-node blueprints, testing requires simulating a network of nodes that can communicate and coordinate with each other. Our SDK provides testing utilities that make it easy to: - -- Set up a simulated multi-node network environment -- Configure individual nodes with custom handlers -- Submit jobs and verify results across nodes -- Test distributed protocols and consensus mechanisms - -## Test Environment Setup - -### Prerequisites - -First, include the necessary testing utilities in your project: - -```rust -use blueprint_sdk::testing::utils::tangle::TangleTestHarness; -use blueprint_sdk::testing::utils::harness::TestHarness; -use blueprint_sdk::testing::tempfile; -use blueprint_sdk::logging; -``` - -### Basic Setup - -1. **Initialize Logging and Error Handling**: - -```rust -logging::setup_log(); -``` - -2. **Create Test Harness**: - -```rust -let tmp_dir = tempfile::TempDir::new()?; -let harness = TangleTestHarness::setup(tmp_dir).await? -``` - -## Setting Up Multi-node Services - -### Node Configuration - -1. **Initialize Test Environment**: +## Example: Multi-node handshake + message ```rust -// N specifies number of nodes (e.g. N = 3) -let (mut test_env, service_id, blueprint_id) = harness.setup_services::(false).await?; -test_env.initialize().await?; -``` +use blueprint_sdk::crypto::k256::K256Ecdsa; +use blueprint_sdk::networking::service_handle::NetworkServiceHandle; +use blueprint_sdk::networking::test_utils::{create_whitelisted_nodes, setup_log, wait_for_all_handshakes}; +use blueprint_sdk::networking::types::MessageRouting; +use std::time::Duration; -2. **Configure Individual Nodes**: +#[tokio::test] +async fn test_p2p_protocol() { + setup_log(); -```rust -let handles = test_env.node_handles().await; -for handle in handles { -// Get node configuration -let config = handle.gadget_config().await; -// Initialize node-specific context -let blueprint_ctx = YourContext::new(config.clone()).await?; -// Add job handlers -let job_handler = YourJobHandler::new(&config, blueprint_ctx.clone()).await?; -handle.add_job(job_handler).await; -} -``` + // Create 3 nodes that trust the same whitelist. + let mut nodes = create_whitelisted_nodes::(3, "test-net", "sum-test", false); -3. **Allow Time for Network Setup**: - -```rust -// Wait for network handshakes -tokio::time::sleep(std::time::Duration::from_secs(10)).await; -``` - -4. **Start the Environment**: - -```rust -test_env.start().await?; -``` - -## Running Tests - -### Submitting Jobs - -To test your blueprint's functionality, submit jobs and verify their results: - -```rust -// Submit a job with arguments -let job = harness - .submit_job( - service_id, - JOB_ID, - vec![InputValue::Uint16(2)] // Example job argument -) -.await?; -logging::info!("Submitted job {JOB_ID} with service ID {service_id}"); -``` - -### Verifying Results - -Wait for job completion and verify the results: - -```rust -// Wait for job execution -let results = harness.wait_for_job_execution(service_id, job).await?; -assert_eq!(results.service_id, service_id); - -// Verify outputs -if !expected_outputs.is_empty() { -assert_eq!( - results.result.len(), - expected_outputs.len(), - "Number of outputs doesn't match expected" -); + let mut handles = Vec::new(); + for node in nodes.iter_mut() { + handles.push(node.start().await.expect("start node")); + } -// Add more verification logic as needed... + let mut handle_refs: Vec<&mut NetworkServiceHandle> = handles.iter_mut().collect(); + wait_for_all_handshakes(handle_refs.as_mut_slice(), Duration::from_secs(10)).await; + + let routing = MessageRouting { + message_id: 1, + round_id: 0, + sender: handles[0].local_peer_id, + recipient: None, + }; + handles[0] + .send(routing, b"hello world".to_vec()) + .expect("send message"); } ``` -## Best Practices - -1. **Error Handling**: Always implement proper error handling and logging to diagnose test failures. - -2. **Network Delays**: Include appropriate delays for network initialization and handshakes. - -3. **Verification**: Thoroughly verify all job outputs against expected results. +## Tips -4. **Cleanup**: Use temporary directories that are automatically cleaned up after tests. - -5. **Logging**: Implement comprehensive logging to track test progress and debug issues. - -## Example: Complete Test Structure - -Here's a complete example showing how to structure a multi-node test: - -```rust -#[tokio::test(flavor = "multi_thread")] -async fn test_blueprint() -> color_eyre::Result<()> { - logging::setup_log(); - let tmp_dir = tempfile::TempDir::new()?; - let harness = TangleTestHarness::setup(tmp_dir).await?; - - // Initialize nodes - let (mut test_env, service_id, ) = harness.setup_services::<3>(false).await?; - test_env.initialize().await?; - - // Configure nodes - let handles = test_env.node_handles().await; - for handle in handles { - // Add handlers - // ... - } - - // Wait for network setup - tokio::time::sleep(std::time::Duration::from_secs(10)).await; - test_env.start().await?; - - // Run test jobs - let job = harness.submit_job(service_id, JOB_ID, vec![/ args /]).await?; - let results = harness.wait_for_job_execution(service_id, job).await?; - - // Verify results - assert_eq!(results.service_id, service_id); - - // Additional verification... - Ok(()) -} -``` +- Use `create_whitelisted_nodes` to keep key management consistent across your test network. +- `wait_for_all_handshakes` gives deterministic readiness checks before you start sending messages. +- If you need to inspect traffic, read from `handle.next_protocol_message()` in each node loop. diff --git a/pages/developers/p2p-networking/usage.mdx b/pages/developers/p2p-networking/usage.mdx index 4c29ed31..1d5539d4 100644 --- a/pages/developers/p2p-networking/usage.mdx +++ b/pages/developers/p2p-networking/usage.mdx @@ -1,181 +1,119 @@ # Using the P2P Networking Utilities -To spin up a P2P network, following two methods are provided on `GadgetConfiguration`: +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/networking -- `GadgetConfiguration::libp2p_network_config()` -- `GadgetConfiguration::libp2p_start_network()` +To spin up a P2P network, build a `NetworkConfig` and start a `NetworkService`. The service spawns its event loop and +returns a `NetworkServiceHandle` you can clone into jobs and background services. ## Example -Here's an example of how to spin up a P2P network and send messages to it. - ```rust -use blueprint_sdk::networking::service_handle::NetworkServiceHandle; -use blueprint_sdk::networking::InstanceMsgPublicKey; -fn example_usage(config: GadgetConfiguration) -> Result<(), GadgetError> { - let allowed_keys: HashSet = /* ... */; - - // Create the `NetworkConfig` based on the `GadgetConfiguration` - let network_config = config.libp2p_network_config("my/protocol/1.0.0")?; +use blueprint_sdk::crypto::k256::K256Ecdsa; +use blueprint_sdk::networking::{AllowedKeys, NetworkConfig, NetworkService}; +use blueprint_sdk::networking::types::MessageRouting; +use libp2p::identity::Keypair; +use std::collections::HashSet; +use std::time::Duration; + +async fn example_usage() -> Result<(), blueprint_sdk::networking::error::Error> { + let instance_key_pair = K256Ecdsa::generate_with_seed(None).expect("instance key"); + let local_key = Keypair::generate_ed25519(); + let listen_addr = "/ip4/0.0.0.0/tcp/0".parse().expect("multiaddr"); + + let config = NetworkConfig:: { + network_name: "my-blueprint".to_string(), + instance_id: "service-1".to_string(), + instance_key_pair: instance_key_pair.clone(), + local_key, + listen_addr, + target_peer_count: 10, + bootstrap_peers: vec![], + enable_mdns: true, + enable_kademlia: true, + using_evm_address_for_handshake_verification: false, + }; - // Start up the network, getting a handle back - let network_handle = config.libp2p_start_network(network_config, allowed_keys)?; + let mut allowed = HashSet::new(); + allowed.insert(K256Ecdsa::public_from_secret(&instance_key_pair)); - // Use the handle to receive p2p messages from the network - loop { - if let Some(msg) = network_handle.next_protocol_message() { - println!("Received message: {:?}", msg); - } + let (allowed_keys_tx, allowed_keys_rx) = crossbeam_channel::unbounded(); + let service = NetworkService::new( + config, + AllowedKeys::InstancePublicKeys(allowed), + allowed_keys_rx, + )?; - tokio::time::sleep(std::time::Duration::from_millis(100)).await; - } + let mut handle = service.start(); - // Use the handle to send p2p messages to the network - let p2p_routing = MessageRouting { - /// Unique identifier for this message - message_id: 1, - /// The round/sequence number this message belongs to - round_id: 1, - /// The sender's information - sender: ParticipantInfo { - /// The public key of the sender - public_key: InstanceMsgPublicKey(/* ... */), - /// The address of the sender - address: /* ... */ - }, - /// Recipient information for direct messages - recipient: Some(ParticipantInfo { - public_key: InstanceMsgPublicKey(/* ... */), - address: /* ... */ - }), - }; - network_handle.send(p2p_routing, /* ...some bytes (Vec)... */); + // Optional: update the whitelist later. + // allowed_keys_tx.send(AllowedKeys::InstancePublicKeys(updated_set)).ok(); - // Send gossip messages to the network - let gossip_routing = MessageRouting { + let routing = MessageRouting { message_id: 1, - round_id: 1, - sender: ParticipantInfo { - public_key: InstanceMsgPublicKey(/* ... */), - address: /* ... */ - }, + round_id: 0, + sender: handle.local_peer_id, recipient: None, }; - network_handle.send(gossip_routing, /* ...some bytes (Vec)... */); + handle + .send(routing, b"hello from a blueprint".to_vec()) + .expect("send message"); - Ok(()) + loop { + if let Some(message) = handle.next_protocol_message() { + println!("Received: {:?}", message); + } + tokio::time::sleep(Duration::from_millis(100)).await; + } } ``` -## Integrating Networking with Service contexts - -The P2P networking utilities can be integrated into service contexts to manage network state and handle messages. It exposes an interface for you to send messages to other peers of your service as well as gossip messages to the entire network of service instance operators. +If you prefer to verify handshakes using EVM addresses, set `using_evm_address_for_handshake_verification: true` and use +`AllowedKeys::EvmAddresses` with the operator address set for the service. -## Context Constructor +## Integrating Networking with Blueprint Contexts -Create a context that you can pass into your jobs and background services. +Store the `NetworkServiceHandle` inside your context so job handlers and background services can send or receive messages: ```rust -/// The context holds necessary information for the service to run. -#[derive(Clone, KeystoreContext, TangleClientContext, ServicesContext)] -pub struct BlsContext { - #[config] - pub config: GadgetConfiguration, - #[call_id] - pub call_id: Option, - pub network_backend: NetworkServiceHandle, - pub store: Arc>, - pub identity: sp_core::ecdsa::Pair, -} +use blueprint_sdk::crypto::k256::K256Ecdsa; +use blueprint_sdk::networking::NetworkServiceHandle; +use blueprint_sdk::{Router, tangle_evm::TangleEvmLayer}; -// Core context management implementation -impl BlsContext { - /// Creates a new service context with the provided configuration - /// - /// # Errors - /// Returns an error if: - /// - Network initialization fails - /// - Configuration is invalid - pub async fn new(config: GadgetConfiguration) -> Result { - let operator_keys: HashSet = config - .tangle_client() - .await? - .get_operators() - .await? - .values() - .map(|key| InstanceMsgPublicKey(*key)) - .collect(); - - let network_config = config.libp2p_network_config(NETWORK_PROTOCOL)?; - let identity = network_config.instance_key_pair.0.clone(); - - let network_backend = config.libp2p_start_network(network_config, operator_keys)?; - - let keystore_dir = PathBuf::from(&config.keystore_uri).join("bls.json"); - let store = Arc::new(LocalDatabase::open(keystore_dir)); - - Ok(Self { - config, - call_id: None, - network_backend, - store, - identity, - }) - } +#[derive(Clone)] +struct P2pContext { + network: NetworkServiceHandle, } -``` -### Round Based Job +let context = P2pContext { + network: handle.clone(), +}; -`round-based` is a [library for building structure round based protocols](https://github.com/LFDT-Lockness/round-based), especially MPC protocols. There are a variety of benefits to structuring your protocol in this way and it can streamline the separation between networking and protocol logic. +let router = Router::new() + .route(MY_JOB_ID, my_job.layer(TangleEvmLayer)) + .with_context(context); +``` + +## Round-Based Protocols -To leverage a `round-based` protocol that handles sending, receiving, and processing messages use the `RoundBasedNetworkAdapter` available from the SDK and in the `gadget-networking-round-based-extension` crate. +`round-based` is a library for building structured round-based protocols (especially MPC). The SDK ships a compatibility +layer in the `blueprint-networking-round-based-extension` crate. ```rust -#[job( - id = 0, - params(t), - event_listener( - listener = TangleEventListener, - pre_processor = services_pre_processor, - post_processor = services_post_processor, - ), -)] -pub async fn keygen(t: u16, context: BlsContext) -> Result, GadgetError> { - // Get configuration and compute deterministic values - let blueprint_id = context - .blueprint_id() - .map_err(|e| KeygenError::ContextError(e.to_string()))?; - let call_id = context - .current_call_id() - .await - .map_err(|e| KeygenError::ContextError(e.to_string()))?; - // Setup party information - let (i, operators) = context - .get_party_index_and_operators() - .await - .map_err(|e| KeygenError::ContextError(e.to_string()))?; - let parties: HashMap = operators - .into_iter() - .enumerate() - .map(|(j, (_, ecdsa))| (j as PartyIndex, InstanceMsgPublicKey(ecdsa))) - .collect(); - let n = parties.len() as u16; - let i = i as u16; - // Create a new round based network adapter - let network = RoundBasedNetworkAdapter::::new( - context.network_backend.clone(), - i, - parties.clone(), - crate::context::NETWORK_PROTOCOL, - ); - - // Create a new round based party - let party = round_based::party::MpcParty::connected(network); - - // Run the keygen protocol - let output = crate::keygen_state_machine::bls_keygen_protocol(party, i, t, n, call_id).await?; - - Ok(output) -} +use blueprint_networking_round_based_extension::RoundBasedNetworkAdapter; +use blueprint_sdk::crypto::k256::K256Ecdsa; +use libp2p::PeerId; +use round_based::{PartyIndex, party::MpcParty}; +use std::collections::HashMap; + +let parties: HashMap = /* map your party indices to peer IDs */; +let adapter = RoundBasedNetworkAdapter::::new( + handle.clone(), + my_index, + &parties, + "my/protocol", +); + +let party = MpcParty::connected(adapter); ``` + +`MyMsg` should implement `round_based::ProtocolMessage` and be `Serialize`/`Deserialize` so it can be encoded over the wire. diff --git a/pages/developers/precompiles/_meta.ts b/pages/developers/precompiles/_meta.ts deleted file mode 100644 index cb944d1c..00000000 --- a/pages/developers/precompiles/_meta.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - overview: "Solidity Precompiles", - features: "Precompiles For Key Features", - utility: "Utility Precompiles", - ux: "User Experience", -}; - -export default meta; diff --git a/pages/developers/precompiles/features/governance/preimage.mdx b/pages/developers/precompiles/features/governance/preimage.mdx deleted file mode 100644 index 15425278..00000000 --- a/pages/developers/precompiles/features/governance/preimage.mdx +++ /dev/null @@ -1,122 +0,0 @@ ---- -title: Preimage Precompile Contract -description: Learn how to take the first necessary step to submit a proposal on-chain by submitting a preimage that contains the action to be carried out in the proposal, using Tangle’s Preimage Precompile. ---- - -import GithubFileReaderDisplay from "../../../../../components/GithubFileReaderDisplay"; - -# Interacting with the Preimage Precompile - -## Introduction - -Tangle features native on-chain governance that enables stakeholders to participate in the direction of the network. The Substrate Preimage Pallet allows token holders to take the first step toward creating a proposal by submitting the preimage (the action to be carried out in the proposal) on-chain. The hash of the preimage is required to submit the proposal. - -The Preimage Precompile interacts directly with Substrate’s Preimage Pallet. This pallet is coded in Rust and is normally not accessible from the Ethereum side of Tangle. However, the Preimage Precompile enables the necessary functionality—creating and managing preimages—directly from a Solidity interface. - -The Preimage Precompile is located at the following address on both Tangle Mainnet and Tangle Testnet: - -```text -0x0000000000000000000000000000000000000806 -``` - ---- - -## The Preimage Solidity Interface - -Below is the Tangle Preimage interface, which you can use to interact with the on-chain Preimage Pallet: - - - -The two key functions are: - -- `notePreimage` — registers a preimage on-chain. -- `unnotePreimage` — clears an unrequested preimage from storage. - -These actions emit the `PreimageNoted` and `PreimageUnnoted` events, respectively. - ---- - -## Interact with the Solidity Interface - -The following sections walk through how to use the Preimage Precompile with Remix and Polkadot.js Apps on Tangle. The examples below show Tangle Testnet steps, but you can follow a similar approach on Tangle Mainnet. - -### Checking Prerequisites - -To follow along, you will need: - -- MetaMask installed and connected to Tangle Testnet -- An account on Tangle Testnet that has some test tokens - -### Remix Set Up - -1. Go to the official [Remix](https://remix.ethereum.org) website. -2. In the File Explorer pane, create a new file (for example, “Preimage.sol”), and paste the interface code provided above (the Tangle Preimage interface) into the file. - -### Compile the Contract - -1. Click on the “Solidity Compiler” tab in Remix. -2. Select the version of Solidity that is compatible with the interface (for example, 0.8.3 or higher). -3. Click on “Compile Preimage.sol.” - -### Access the Contract - -1. Switch to the “Deploy & Run Transactions” tab. (Note that you are not truly deploying a contract; you are going to interact with the already-deployed Preimage Precompile on Tangle.) -2. In the “ENVIRONMENT” dropdown, select “Injected Provider – MetaMask,” which should already be connected to Tangle Testnet. -3. Ensure “Preimage.sol” is selected in the “CONTRACT” dropdown. -4. In the “At Address” field, provide the Preimage Precompile’s address, which on Tangle is: - `0x0000000000000000000000000000000000000806` -5. Click “At Address.” You should see the Precompile listed under “Deployed Contracts.” - ---- - -### Submit a Preimage of a Proposal - -To create a new on-chain proposal, you must first register a preimage that encodes the proposed action. Use Polkadot.js Apps to generate both the encoded proposal and preimage hash, then use the Preimage Precompile’s notePreimage function to store it on-chain. - -#### Getting the Encoded Proposal and Preimage Hash - -Follow these steps in Polkadot.js Apps (with Tangle Testnet selected in the network dropdown): - -1. Navigate to the “Governance” tab. -2. Select “Preimages” in the left-hand menu or from the dropdown. -3. Click on “+ Add preimage.” - -Then: - -1. Select any account in the dropdown (no on-chain action will be taken here). -2. Choose the appropriate pallet and dispatchable function you wish to propose (for example, the “system” pallet and the “remark” function). -3. Provide any unique content for the remark. -4. Click “Submit preimage,” but when prompted to sign, do NOT finalize the transaction. Just retrieve the data. - -On the following screen: - -1. Expand the triangle icon to reveal the encoded proposal bytes. -2. Copy the bytes representing the encoded proposal. You’ll supply these bytes when calling the notePreimage function from Remix. - -> Note: Do not sign or submit the transaction in Polkadot.js Apps. You only need to copy the proposal bytes. - ---- - -#### Storing the Preimage On-Chain - -Now return to Remix to call the notePreimage function on the Preimage Precompile: - -1. Expand the Preimage Precompile you interacted with. -2. Find and expand the “notePreimage” function. -3. Paste in the proposal bytes you copied from Polkadot.js Apps. -4. Click “transact” and confirm the transaction in MetaMask. - -Once the transaction is confirmed, the preimage for your proposal is on-chain. You can now proceed with submitting the actual proposal (for example, in the referenda workflow) referencing the hash of this registered preimage. - ---- - -### Removing a Preimage - -If you wish to remove a preimage, follow the same approach above, but use the unnotePreimage function from the deployed interface. In that case, you’ll need to provide the preimage hash (bytes32) you want to clear from on-chain storage. - ---- - -That covers the typical operations for registering and removing preimages on Tangle using the Preimage Precompile. By following these steps, you can confidently take the first step toward submitting proposals and interacting with Tangle’s on-chain governance system. diff --git a/pages/developers/precompiles/features/multi-asset-delegation.mdx b/pages/developers/precompiles/features/multi-asset-delegation.mdx deleted file mode 100644 index f239f09e..00000000 --- a/pages/developers/precompiles/features/multi-asset-delegation.mdx +++ /dev/null @@ -1,145 +0,0 @@ ---- -title: "Multi Asset Delegation Precompile Contract" -description: "Leverage Tangle's multi-asset delegation functionality through a specialized precompiled contract accessible via the Ethereum API." -keywords: ["solidity", "ethereum", "delegation", "multi-asset", "tangle", "precompiled", "contracts"] ---- - -import GithubFileReaderDisplay from "../../../../components/GithubFileReaderDisplay"; - -# Interacting with the MultiAssetDelegation Precompile - -## Introduction - -Tangle supports multi-asset delegation through a built-in pallet in its runtime. To make it easy for developers to interact with this pallet using Ethereum-compatible tools, Tangle includes a precompiled contract at the following address: - -- Tangle Mainnet and Tangle Testnet: `0x0000000000000000000000000000000000000822` - -By calling this precompile using any standard Ethereum tool (Remix, web3 libraries, etc.), you can deposit assets, schedule asset withdrawals, delegate assets, and more—all without needing to interact directly with Substrate APIs. This guide demonstrates how to connect to the precompile contract and use its functions via the Tangle multi-asset delegation interface. - ---- - -## The Tangle MultiAssetDelegation Solidity Interface - -Below is the Solidity interface that provides access to the multi-asset delegation functionality of Tangle: - - - ---- - -## Using the MultiAssetDelegation Precompile on Tangle - -Below is an overview of how you can interact with the Tangle multi-asset delegation precompile in Remix. The same procedures also work for other Ethereum-compatible tools and libraries. - -### Prerequisites - -- MetaMask installed and connected to either Tangle Testnet or Mainnet -- An account funded with the relevant asset(s) on Tangle to deposit or delegate - -### Accessing the Precompile in Remix - -1. Navigate to the [Remix IDE](https://remix.ethereum.org). -2. Create a new file named "MultiAssetDelegation.sol" (or any name you prefer), and paste in the Solidity interface shown above. -3. In the "Compile" tab, compile "MultiAssetDelegationInterface.sol". -4. In the "Deploy & Run" tab, from the ENVIRONMENT dropdown, select "Injected Provider - MetaMask". -5. Under "CONTRACT", choose the compiled interface ("MultiAssetDelegation - MultiAssetDelegation.sol"). -6. In the text field next to the "At Address" button, enter the precompile address: - `0x0000000000000000000000000000000000000822` -7. Click on "At Address" to load the precompiled contract. The interface methods will appear under "Deployed Contracts." - ---- - -## Example Calls - -Below are example usages for some core methods on the multi-asset delegation interface. Make sure you have the correct asset IDs and token addresses (for ERC20 assets) whenever calling these methods. All calls below are made against the loaded precompile in Remix. - -### 1. Deposit Assets - -1. Expand the `deposit(uint256 assetId, address tokenAddress, uint256 amount, uint8 lockMultiplier)` section. -2. Set `assetId` to `0` for ERC20 tokens, or provide another valid ID for other assets. -3. Provide the ERC20 contract address in `tokenAddress` if `assetId` is `0`. For native assets, set this to the zero address. -4. Enter the `amount` you wish to deposit (in your asset's smallest unit, e.g., Wei for WETH). -5. Specify a `lockMultiplier` value for lock duration rewards (`0` for no lock, higher values for increased rewards). -6. Click "transact" and confirm the MetaMask popup. - -### 2. Schedule a Withdrawal - -1. Expand `scheduleWithdraw(uint256 assetId, address tokenAddress, uint256 amount)`. -2. Fill in `assetId` (`0` for ERC20), `tokenAddress` (if `assetId` is `0`), and `amount`. -3. Click "transact" and approve the MetaMask transaction. -4. The withdrawal enters an unbonding period; you can execute it after the period ends or cancel it anytime. - -### 3. Execute a Scheduled Withdrawal - -1. Expand `executeWithdraw()`. -2. Click "transact". -3. Confirm the MetaMask transaction. -4. Any previously scheduled withdrawals that are now eligible will be executed. - -### 4. Cancel a Scheduled Withdrawal - -1. Expand `cancelWithdraw(uint256 assetId, address tokenAddress, uint256 amount)`. -2. Provide the details of the scheduled withdrawal you wish to cancel (`assetId`, `tokenAddress` for ERC20 if needed, and the identical amount). -3. Click "transact" and confirm the Metamask transaction. - -### 5. Delegate to an Operator - -1. Expand `delegate(bytes32 operator, uint256 assetId, address tokenAddress, uint256 amount, uint64[] memory blueprintSelection)`. -2. Enter the operator (as a bytes32 account ID), the asset ID, token address if assetId is `0` (ERC20), the amount to delegate. -3. Provide `blueprintSelection` as an array of blueprint IDs the operator should participate in. -4. Click "transact" to delegate your assets to the operator. - -### 6. Schedule Unstake for Delegators - -1. Expand `scheduleDelegatorUnstake(bytes32 operator, uint256 assetId, address tokenAddress, uint256 amount)`. -2. Input the operator you previously delegated to and the relevant asset parameters. -3. Enter the amount you wish to unstake. -4. Click "transact" and confirm in MetaMask. - -### 7. Execute Scheduled Unstake for Delegators - -1. Expand `executeDelegatorUnstake()`. -2. Click "transact" and confirm the transaction. -3. Any eligible scheduled unstake operations will finalize. - -### 8. Cancel Scheduled Unstake - -1. Expand `cancelDelegatorUnstake(bytes32 operator, uint256 assetId, address tokenAddress, uint256 amount)`. -2. Provide the same operator, asset details, and amount that were set when scheduling the unstake. -3. Click "transact" to cancel the unstake operation. - -### 9. Check Overall Balance - -1. Expand `balanceOf(address who, uint256 assetId, address tokenAddress)`. -2. Enter the address you want to inspect, assetId (`0` for ERC20), and `tokenAddress` if needed. -3. Click "call" to see the total amount of assets held (deposited but not yet delegated). - -### 10. Check Delegated Balance - -1. Expand `delegatedBalanceOf(address who, uint256 assetId, address tokenAddress)`. -2. Enter the delegator's address, along with the relevant asset parameters. -3. Click "call" to see how many of the delegator’s tokens are actively delegated. - ---- - -## Asset Types - -- **Asset ID 0**: ERC20 tokens (requires token address) -- **Asset ID 1+**: Native or custom assets configured in the runtime - -## Lock Multipliers - -The `lockMultiplier` parameter in the deposit function allows users to lock their assets for longer periods in exchange for increased rewards: - -- `0`: No lock (standard delegation) -- Higher values: Longer lock periods with proportionally higher reward multipliers - -## Blueprint Selection - -When delegating, the `blueprintSelection` parameter specifies which service blueprints the operator should run on your behalf. This enables targeted delegation to specific services or applications. - -## More Information - -For a complete list of methods and their parameters, refer to the Solidity interface above. This interface exposes all the critical multi-asset delegation functionality provided by Tangle's runtime, enabling you to manage deposits, schedule and execute withdrawals, delegate tokens, and unstake as needed—all through an Ethereum-compatible workflow. Make sure to handle asset IDs, token addresses, and amounts accurately to avoid transaction failures. diff --git a/pages/developers/precompiles/features/staking.mdx b/pages/developers/precompiles/features/staking.mdx deleted file mode 100644 index c6610485..00000000 --- a/pages/developers/precompiles/features/staking.mdx +++ /dev/null @@ -1,91 +0,0 @@ ---- -title: "Staking Precompile Contract" -description: "Unlock the potential of staking on Tangle with a specialized precompiled contract accessible via the Ethereum API." -keywords: ["solidity", "ethereum", "staking", "tangle", "precompiled", "contracts"] ---- - -import GithubFileReaderDisplay from "../../../../components/GithubFileReaderDisplay"; - -# Interacting with the Staking Precompile - -## Introduction - -Tangle uses a staking system via a built-in pallet in the runtime. To allow developers to interact with this pallet using the Ethereum API, Tangle provides a precompiled contract located at address: - -- Tangle Mainnet and Tangle Testnet: `0x0000000000000000000000000000000000000800` - -By calling this precompile through any standard Ethereum tool (such as Remix or web3 libraries), you can use Solidity to bond tokens, nominate validators, unbond tokens, and more—all without having to directly use Substrate APIs. This guide shows how to connect to the precompile contract and make use of its functions using the Tangle staking interface. - ---- - -## The Tangle Staking Solidity Interface - -Below is the Solidity interface that wraps the Tangle staking functionality: - - - ---- - -## Using the Staking Precompile on Tangle - -Below is a step-by-step overview of how you might interact with the Tangle staking precompile using Remix as an example. The same approach applies to other tools or libraries capable of interacting with EVM contracts. - -### Prerequisites - -- MetaMask installed and connected to Tangle Testnet (or Tangle Mainnet) -- An account funded with native tokens on Tangle so you can bond or nominate - -### Accessing the Precompile in Remix - -1. Go to the [Remix IDE](https://remix.ethereum.org). -2. Create a new file named "StakingInterface.sol", and paste in the interface above. -3. In the "Compile" tab, compile "StakingInterface.sol". -4. In the "Deploy & Run" tab, select "Injected Provider - MetaMask" from the ENVIRONMENT dropdown. -5. In the "CONTRACT" dropdown, select "Staking - StakingInterface.sol" (the name may vary depending on your file). -6. In the text field next to the "At Address" button, enter the Tangle Staking precompile address: - `0x0000000000000000000000000000000000000800` -7. Click on "At Address" to load the already-deployed precompile into Remix. You should now see the interface methods under "Deployed Contracts." - ---- - -## Example Calls - -Below are simple examples of how to interact with a few of the core methods in the interface. All calls should be made against the loaded precompile in Remix. - -### 1. Read the Current Era - -1. Expand `currentEra()` -2. Click "call" -3. The result returned is the current era index on Tangle. - -### 2. Bond Tokens - -1. Expand `bond(uint256, bytes32)` -2. Enter the amount of tokens (in Wei) to bond. -3. Enter the "payee" as a bytes32-encoded value. For example, if you want staking rewards to go to your stash account, you might pass the stash account bytes in little-endian or a relevant encoding. -4. Click "transact" -5. Approve the MetaMask transaction. - -### 3. Nominate Validators - -1. Expand `nominate(bytes32[])` -2. Provide an array of validator stash addresses in bytes32 form (for example, ["0xabc123...","0xdef456..."]). -3. Click "transact" -4. Approve the MetaMask transaction to become a nominator for those validators. - -### 4. Unbond Tokens - -1. Expand `unbond(uint256)` -2. Enter the amount of tokens (in Wei) you want to unbond. -3. Click "transact" -4. Approve the MetaMask transaction. -5. Remember that there is an unbonding period before tokens become available. After this period, you can call withdrawUnbonded(uint32) to remove them from the staking system entirely. - ---- - -## More Information - -Please Refer to the Solidity interface above for more methods and details on how to interact with the Tangle staking precompile. diff --git a/pages/developers/precompiles/overview.mdx b/pages/developers/precompiles/overview.mdx deleted file mode 100644 index 5eb1db05..00000000 --- a/pages/developers/precompiles/overview.mdx +++ /dev/null @@ -1,90 +0,0 @@ ---- -title: Solidity Precompiles -description: An overview of the available Solidity precompiles on Tangle. Precompiles enable you to interact with Substrate features using the Ethereum API. ---- - -# Overview of the Precompiled Contracts on Tangle - -## Overview - -On Tangle Network, a precompiled contract is native Substrate code that has an Ethereum-style address and can be called using the Ethereum API, like any other smart contract. The precompiles allow you to call the Substrate runtime directly which is not normally accessible from the Ethereum side of Tangle. - -The Substrate code responsible for implementing precompiles can be found in the EVM pallet. The EVM pallet includes the [standard precompiles found on Ethereum and some additional precompiles that are not specific to Ethereum](https://github.com/polkadot-evm/frontier/tree/master/frame/evm/precompile). It also provides the ability to create and execute custom precompiles through the generic [`Precompiles` trait](https://polkadot-evm.github.io/frontier/rustdocs/pallet_evm/trait.Precompile.html). There are several custom Tangle-specific precompiles that have been created. - -The Ethereum precompiled contracts contain complex functionality that is computationally intensive, such as hashing and encryption. The custom precompiled contracts on Tangle provide access to Substrate-based functionality such as staking, governance, and more. - -The Tangle-specific precompiles can be interacted with through familiar and easy-to-use Solidity interfaces using the Ethereum API, which are ultimately used to interact with the underlying Substrate interface. This flow is depicted in the following diagram: - -```mermaid -sequenceDiagram - participant User - participant EVM - participant Tangle - User->>EVM: Interact with precompile - EVM->>Tangle: Calls the precompile on Tangle Runtime - Tangle->>Tangle: Access the Substrate runtime - Tangle->>EVM: Return result - EVM->>User: Return result -``` - -## Precompiled Contract Addresses - -The precompiled contracts are categorized by address and based on the origin network. If you were to convert the precompiled addresses to decimal format, and break them into categories by numeric value, the categories are as follows: - -- **0-1023** - [Ethereum MainNet precompiles](#ethereum-precompiles) -- **1024-2047** - precompiles that are [not in Ethereum and not Tangle specific](#non-tangle-specific-nor-ethereum-precompiles) -- **2048-4095** - [Tangle specific precompiles](#tangle-specific-precompiles) - -### Ethereum Precompiles - -| Precompile | Description | Address | -| ---------------------- | ----------------------------------------------------------------------------------------------------------------- | -------------------------------------------- | -| ECRECOVER | Recovers the public key associated with the given signature, a critical operation in verifying wallet signatures. | `0x0000000000000000000000000000000000000001` | -| SHA256 | Computes the SHA256 cryptographic hash function, widely used for data integrity verification. | `0x0000000000000000000000000000000000000002` | -| RIPEMD160 | Calculates the RIPEMD-160 hash, which is used in various security applications and protocols. | `0x0000000000000000000000000000000000000003` | -| Identity | A simple data copy operation. | `0x0000000000000000000000000000000000000004` | -| Modular Exponentiation | Performs modular exponentiation, a key operation in many cryptographic functions. | `0x0000000000000000000000000000000000000005` | -| BN128Add | Performs point addition on a BN128 elliptic curve. | `0x0000000000000000000000000000000000000006` | -| BN128Mul | Performs point multiplication on a BN128 elliptic curve. | `0x0000000000000000000000000000000000000007` | -| BN128Pairing | Checks the pairing on a BN128 elliptic curve. | `0x0000000000000000000000000000000000000008` | -| Blake2 | Computes the Blake2 cryptographic hash function. | `0x0000000000000000000000000000000000000009` | - -### Non Tangle Specific Nor Ethereum Precompiles - -| Precompile | Description | Address | -| ------------------- | ----------------------------------------------------------------- | -------------------------------------------- | -| SHA3FIPS256 | Computes the SHA3 (FIPS 202 compliant) hash function. | `0x0000000000000000000000000000000000000400` | -| Dispatch | Handles dispatching and managing contract calls and interactions. | `0x0000000000000000000000000000000000000401` | -| ECRecoverPublicKey | Recovers the public key from an elliptic curve signature. | `0x0000000000000000000000000000000000000402` | -| Curve25519Add | Adds two Curve25519 points. | `0x0000000000000000000000000000000000000403` | -| Curve25519ScalarMul | Multiplies a Curve25519 point by a scalar. | `0x0000000000000000000000000000000000000404` | -| Ed25519Verify | Verifies an Ed25519 signature. | `0x0000000000000000000000000000000000000405` | - -### Tangle Specific Precompiles - -| Precompile | Description | Address | -| --------------------- | --------------------------------------------------------------------------- | -------------------------------------------- | -| Staking | Handles staking-related operations. | `0x0000000000000000000000000000000000000800` | -| Vesting | Manages vesting schedules. | `0x0000000000000000000000000000000000000801` | -| Erc-20 Balances | Treats TNT as a native ERC20 token on the EVM side of Tangle. | `0x0000000000000000000000000000000000000802` | -| DemocracyInterface | An interface for interacting with on-chain governance. | `0x0000000000000000000000000000000000000803` | -| Batch | Allows for multiple calls to be made within a single transaction. | `0x0000000000000000000000000000000000000804` | -| Call Permit | Facilitates authorized calls (for gasless transactions). | `0x0000000000000000000000000000000000000805` | -| Preimage | Used for managing preimages, which are proposals before they become public. | `0x0000000000000000000000000000000000000806` | -| Precompile-Registry | Manages the registration of new precompiles. | `0x0000000000000000000000000000000000000807` | -| Ecdsa-Secp256k1 | Verifies ECDSA signatures using the secp256k1 curve. | `0x0000000000000000000000000000000000000816` | -| Ecdsa-Secp256r1 | Verifies ECDSA signatures using the secp256r1 curve. | `0x0000000000000000000000000000000000000817` | -| Ecdsa-Stark | Verifies ECDSA signatures using the Stark curve. | `0x0000000000000000000000000000000000000818` | -| Schnorr-Sr25519 | Verifies Schnorr signatures using the Sr25519 curve. | `0x0000000000000000000000000000000000000819` | -| Schnorr-Secp256k1 | Verifies Schnorr signatures using the secp256k1 curve. | `0x000000000000000000000000000000000000081a` | -| Schnorr-Ed25519 | Verifies Schnorr signatures using the Ed25519 curve. | `0x000000000000000000000000000000000000081b` | -| Schnorr-Ed448 | Verifies Schnorr signatures using the Ed448 curve. | `0x000000000000000000000000000000000000081c` | -| Schnorr-P256 | Verifies Schnorr signatures using the P256 curve. | `0x000000000000000000000000000000000000081d` | -| Schnorr-P384 | Verifies Schnorr signatures using the P384 curve. | `0x000000000000000000000000000000000000081e` | -| Schnorr-Ristretto255 | Verifies Schnorr signatures using the Ristretto255 curve. | `0x000000000000000000000000000000000000081f` | -| Schnorr-Taproot | Verifies Schnorr signatures using the Taproot Scheme. | `0x0000000000000000000000000000000000000820` | -| Bls12-381 | Performs operations on the BLS12-381 curve. | `0x0000000000000000000000000000000000000821` | -| Tangle LST | Provides functions for managing liquid staking pools. | `0x0000000000000000000000000000000000000809` | -| MultiAsset Delegation | Provides functions for managing multi-asset delegation. | `0x0000000000000000000000000000000000000822` | -| Credits | Provides functions for managing cloud credits system. | `0x0000000000000000000000000000000000000825` | -| Services | Provides functions for managing service blueprints and instances. | `0x0000000000000000000000000000000000000900` | diff --git a/pages/developers/precompiles/utility/non-specific.mdx b/pages/developers/precompiles/utility/non-specific.mdx deleted file mode 100644 index 0e671b05..00000000 --- a/pages/developers/precompiles/utility/non-specific.mdx +++ /dev/null @@ -1,168 +0,0 @@ ---- -title: Non-Network Specific Precompiles -description: Learn how to use precompiled contracts, which are not specific to Ethereum or Tangle, yet are supported for use in your application. -keywords: ethereum, Tangle, StorageCleaner, ECRecoverPublicKey, sha3FIPS256 ---- - -import NetworkTabs from "../../../../components/NetworkResources.tsx" - -# Non-Network Specific Precompiled Smart Contracts - -## Introduction - -A precompiled contract, or precompile, is a set of programmed functionalities hard-coded into the blockchain client. Precompiles perform computationally heavy tasks, such as cryptographic processes like hashing. Moving these functionalities to the blockchain client serves the dual purpose of making the computation more efficient than using a traditional smart contract and ensuring everyone has access to the complete and accurate set of processes and algorithms required to operate correctly. - -Precompile functionality is bundled and shared under a smart contract address, which allows interactions similar to those of a traditional smart contract. Some precompiled contracts are not specific to Ethereum or Tangle, but are supported for use in your Tangle-based application. - -The nonspecific precompiles currently included in this category include `StorageCleaner`, `ECRecoverPublicKey`, and `SHA3FIPS256`. - -In the next section, you will learn more about the functionalities included in these precompiles. - -## Clear Storage Entries with StorageCleaner - -The primary function of the `StorageCleaner` precompile is to clear storage entry key-value pairs for a smart contract marked as self-destructed, previously referred to as "suicided." `StorageCleaner` includes functionality to iterate over a list of addresses to identify self-destructed contracts and delete the appropriate storage entries associated with identified addresses. You can also input a numeric limit to prevent the precompile from consuming too much gas. - -With the implementation of [EIP-6780: SELFDESTRUCT](https://eips.ethereum.org/EIPS/eip-6780) as part of the Ethereum Cancun/Dencun upgrade, contracts can only be self-destructed in the same transaction where they are created. This limitation keeps storage entries small and allows them to be automatically deleted during destruction. The `StorageCleaner` precompile remains available when a legacy contract needs storage entries cleared. - -## Retrieve a Public Key with ECRecoverPublicKey - -The primary function of the `ECRecoverPublicKey` precompile is to recover the public key used to create a digital signature from a given message hash and signature. This precompile is similar to [ECRecover](https://www.evm.codes/precompiled?fork=cancun#0x01), with the exception of returning the public key of the account that signed the message rather than the account address. - -In the following sections, you will learn how to use the `ECRecoverPublicKey` precompile. - -### Checking Prerequisites - -The versions used in this example are v20.15.0 (Node.js) and 10.7.0 (npm). You will also need to install the [Web3](https://web3js.readthedocs.io/en/latest) package by executing: - -```bash -npm install --save web3 -``` - -To verify the installed version of Web3, you can use the `ls` command: - -```bash -npm ls web3 -``` - -This example uses version 4.11.1. You will also use [Remix](https://remix.ethereum.org), connecting it to the Tangle Testnet via MetaMask. - - - -### Retrieve Transaction Signature Values - -To use the `ECRecoverPublicKey` precompile, you must first sign a message to create and retrieve the message hash and transaction signature values (`v`, `r`, `s`) to pass as arguments in the contract call. Always use security best practices when handling private keys. - -Create a new file called `signMessage.js` in your project directory: - -```bash -touch signMessage.js -``` - -Open `signMessage.js` in your code editor and add the following script to initialize Web3 with the Tangle Testnet, sign and hash the message, and return the signature values: - -```js -// Example script to sign a message using an account on Tangle Testnet - -const { Web3 } = require('web3'); - -// Provider -const web3 = new Web3('https://testnet-rpc.tangle.tools'); - -// Address and Private Key -const address = '0x6Be02d1d3665660d22FF9624b7BE0551ee1Ac91b'; -const pk1 = '99B3C12287537E38C90A9219D4CB074A89A16E9CDB20BF85728EBD97C343E342'; -const msg = web3.utils.sha3('supercalifragilisticexpialidocious'); - -async function signMessage(pk) { - try { - // Sign and get Signed Message - const smsg = await web3.eth.accounts.sign(msg, pk); - console.log(smsg); - } catch (error) { - console.error(error); - } -} - -signMessage(pk1); -``` - -Return to your terminal command line to run the script with this command: - -```bash -node signMessage.js -``` - -A typical output for the code above may look like the following: - -```js -{ - message: '0xc2ae6711c7a897c75140343cde1cbdba96ebbd756f5914fde5c12fadf002ec97', - messageHash: '0xc51dac836bc7841a01c4b631fa620904fc8724d7f9f1d3c420f0e02adf229d50', - v: '0x1b', - r: '0x44287513919034a471a7dc2b2ed121f95984ae23b20f9637ba8dff471b6719ef', - s: '0x7d7dc30309a3baffbfd9342b97d0e804092c0aeb5821319aa732bc09146eafb4', - signature: '0x44287513919034a471a7dc2b2ed121f95984ae23b20f9637ba8dff471b6719ef7d7dc30309a3baffbfd9342b97d0e804092c0aeb5821319aa732bc09146eafb41b' -} -``` - -Save these values as you will need them in the next section. - -### Test ECRecoverPublicKey Contract - -You can now visit [Remix](https://remix.ethereum.org/) to test the precompiled contract. Note that you could also use the Web3.js library, but in this case, you can go to Remix to ensure it is using the precompiled contract on the blockchain. The Solidity code you can use to retrieve the public key is the following: - -```solidity -// SPDX-License-Identifier: MIT - -pragma solidity >=0.8.2 <0.9.0; - -contract RecoverPublicKey { - function recoverPublicKey( - bytes32 hash, - uint8 v, - bytes32 r, - bytes32 s - ) public view returns (bytes memory) { - address precompileAddress = 0x0000000000000000000000000000000000000402; - (bool success, bytes memory publicKey) = precompileAddress.staticcall( - abi.encodeWithSignature( - "ECRecoverPublicKey(bytes32,uint8,bytes32,bytes32)", - hash, - v, - r, - s - ) - ); - require(success, "ECRecoverPublicKey failed"); - return publicKey; - } -} -``` - -Using the Remix compiler and deployment, and with MetaMask pointing to Tangle Testnet, you can deploy the contract and call the `recoverPublicKey()` method. It returns the public key for the account that signed the message. You can then use this public key value for other cryptographic functions and verifications. - -## Create a Hash with SHA3FIPS256 - -SHA3-256 is part of the SHA-3 family of cryptographic hashes codified in [FIPS202](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf) that produces an output 256 bits in length. Although the name is similar to SHA256, the SHA-3 family is built with an entirely different algorithm and accordingly produces a different hash output than SHA256 for the same input. You can verify this yourself using this [SHA3-256 Hash Calculator tool](https://md5calc.com/hash/sha3-256). After calculating the SHA3-256 output, change the algorithm in the drop-down selector to SHA256 and take note of the resulting output. - -Currently, there is no SHA3-256 support in Solidity, so it needs to be called with inline assembly. The following sample code can be used to call this precompile on Tangle. - -```solidity -pragma solidity ^0.7.0; - -contract Precompiles { - function sha3fips(bytes memory data) public view returns (bytes32) { - bytes32[1] memory h; - assembly { - if iszero( - staticcall(not(0), 0x400, add(data, 32), mload(data), h, 32) - ) { - invalid() - } - } - return h[0]; - } -} -``` - -Using [Remix](https://remix.ethereum.org) with MetaMask pointing to Tangle Testnet, you can deploy the contract and call the `sha3fips(bytes memory data)` method to return the encoded string of the data parameter. diff --git a/pages/developers/precompiles/utility/registry.mdx b/pages/developers/precompiles/utility/registry.mdx deleted file mode 100644 index 14b4c016..00000000 --- a/pages/developers/precompiles/utility/registry.mdx +++ /dev/null @@ -1,43 +0,0 @@ ---- -title: Precompile Registry -description: Learn how to access and interact with the Precompile Registry on Tangle, which can be used to determine whether a given address is a precompile and if it’s active. ---- - -import GithubFileReaderDisplay from "../../../../components/GithubFileReaderDisplay"; - -# Precompile Registry on Tangle - -## Introduction - -The Precompile Registry is a single source of truth for all available precompiles on Tangle. It can be used to determine whether an address corresponds to a precompile and whether a precompile is active or deprecated. This helps developers prepare for potential backward-incompatible changes by providing an exit strategy or deprecation plan when precompiles evolve over time. - -A key additional purpose of the Precompile Registry is to allow any user to set "dummy code" (`0x60006000fd`) for a precompile address. By default, precompiles do not have bytecode. Some Solidity checks require contract bytecode to be non-empty in order to call functions. The dummy code can be used to bypass such checks. - -## Precompile Registry Addresses - -Below are the addresses for the Registry Precompile on Tangle: - -- Tangle Mainnet: `0x0000000000000000000000000000000000000807` -- Tangle Testnet: `0x0000000000000000000000000000000000000807` - -## The Solidity Interface - -Below is the Solidity interface for interacting with the Precompile Registry. It exposes three main functions: `isPrecompile`, `isActivePrecompile`, and `updateAccountCode`. - - - -## Interact with the Precompile Registry Using Remix - -You can interact with the Precompile Registry via [Remix](https://remix.ethereum.org/). Below is a general guide: - -1. Create a new file in Remix and paste the interface above (or load it from a GitHub repo of your choice). -2. In the "Compile" tab, compile the interface. -3. Go to the "Deploy and run transactions" tab: - - Choose "Injected Provider - MetaMask" in the ENVIRONMENT dropdown (or another environment that points to Tangle). - - Select the compiled interface in the CONTRACT dropdown. - - In the "At Address" field, input the Tangle Mainnet or Tangle Testnet address of the Registry Precompile (for example, `0x0000000000000000000000000000000000000806` for Tangle Testnet). - - Click "At Address." The precompile interface will appear under "Deployed Contracts." -4. Interact with any of the methods (e.g., call `isPrecompile` to check if an address is recognized as a precompile). diff --git a/pages/developers/precompiles/ux/batch.mdx b/pages/developers/precompiles/ux/batch.mdx deleted file mode 100644 index 3e8bf5aa..00000000 --- a/pages/developers/precompiles/ux/batch.mdx +++ /dev/null @@ -1,224 +0,0 @@ ---- -title: "Batch Precompile Contract" -description: "Learn how to transact multiple transfers and contract interactions at once via a Solidity interface with Tangle's Batch Precompile contract." -keywords: ["solidity", "ethereum", "batch", "transaction", "tangle", "precompiled", "contracts"] ---- - -import GithubFileReaderDisplay from "../../../../components/GithubFileReaderDisplay"; - -# Interacting with the Batch Precompile - -## Introduction - -The batch precompiled contract on Tangle allows developers to combine multiple EVM calls into one. - -Normally, having users interact with multiple contracts would require multiple transaction confirmations in the user's wallet. An example would be approving a smart contract's access to a token, then transferring it. With the batch precompile, developers can enhance user experience with batched transactions as it minimizes the number of transactions a user is required to confirm to one. Additionally, gas fees can be reduced since batching avoids multiple base gas fees (the initial 21000 units of gas spent to begin a transaction). - -The precompile interacts directly with the EVM pallet on Tangle. The caller of the batch function will have their address act as the `msg.sender` for all subtransactions, but unlike [delegate calls](https://docs.soliditylang.org/en/v0.8.15/introduction-to-smart-contracts.html#delegatecall-callcode-and-libraries), the target contract will still affect its own storage. It is effectively the same as if the user signed multiple transactions, but with only one confirmation. - -The precompile is located at the following addresses: - -- Tangle Mainnet: `0x0000000000000000000000000000000000000808` -- Tangle Testnet: `0x0000000000000000000000000000000000000808` - -## The Batch Solidity Interface - -Below is the Solidity interface for the batch precompile on Tangle, which exposes three functions: - - - -Below is more detail on how these functions work: - -### batchSome - -Performs multiple calls, where the same index of each array combines into the information required for a single subcall. If a subcall reverts, following subcalls will still be attempted. - -- `to` — array of addresses to direct subtransactions to, where each entry is a subtransaction -- `value` — array of native currency values to send in the subtransactions, where the index corresponds to the subtransaction of the same index in the `to` array. If this array is shorter than the `to` array, all the following subtransactions will default to a value of 0 -- `callData` — array of call data to include in the subtransactions, where the index corresponds to the subtransaction of the same index in the `to` array. If this array is shorter than the `to` array, all of the following subtransactions will include no call data -- `gasLimit` — array of gas limits for each subtransaction, where the index corresponds to the subtransaction of the same index in the `to` array. Values of 0 are interpreted as "unlimited" and will have all remaining gas of the batch transaction forwarded. If this array is shorter than the `to` array, all of the following subtransactions will have all remaining gas forwarded - -### batchSomeUntilFailure - -Performs multiple calls, where the same index of each array combines into the information required for a single subcall. If a subcall reverts, no following subcalls will be executed, but the successful subcalls remain intact. It does not revert the entire batch transaction. - -- `to` — array of addresses to direct subtransactions to -- `value` — array of native currency values to send -- `callData` — array of call data to include in each subtransaction -- `gasLimit` — array of gas limits for each subtransaction - -### batchAll - -Performs multiple calls atomically. If a subcall reverts, all subcalls will revert. - -- `to` — array of addresses to direct subtransactions to -- `value` — array of native currency values to send -- `callData` — array of call data to include in each subtransaction -- `gasLimit` — array of gas limits for each subtransaction - -## Interact with the Solidity Interface - -### Checking Prerequisites - -To follow along with this tutorial, you will need to have: - -- [MetaMask installed and connected to Tangle Testnet or Tangle Mainnet](#) -- Create or have two accounts on Tangle Testnet to test out the different features in the batch precompile -- At least one of the accounts will need to be funded with test tokens on Tangle Testnet. - -### Example Contract - -The following contract, `SimpleContract.sol`, will be used as an example of batching contract interactions, but in practice, any contract can be interacted with: - -```solidity -// SPDX-License-Identifier: GPL-3.0-only -pragma solidity >=0.8.3; - -contract SimpleContract { - mapping(uint256 => string) public messages; - - function setMessage(uint256 id, string memory message) public { - messages[id] = message; - } -} -``` - -### Remix Set Up - -You can interact with the batch precompile using [Remix](https://remix.ethereum.org). To add the interface and the example contract and follow along with this tutorial, you will need to: - -1. Click on the **File explorer** tab in Remix -2. Create a file named **Batch.sol**, and paste in the batch interface shown above -3. Create a file named **SimpleContract.sol**, and paste in the `SimpleContract` provided above - -### Compile the Contract - -Next, you will need to compile both files in Remix: - -1. Open the **Batch.sol** file -2. Click on the **Compile** tab -3. Click on **Compile Batch.sol** - -If the interface was compiled successfully, you'll see a green checkmark next to the **Compile** tab. Then, repeat these steps for **SimpleContract.sol**. - -### Access the Precompile - -Instead of deploying the batch precompile, you will access the interface given the address of the precompiled contract: - -1. Click on the **Deploy and Run** tab (below the **Compile** tab) in Remix. The precompiled contract is already deployed on Tangle -2. Select **Injected Provider - MetaMask** under **ENVIRONMENT**. MetaMask may prompt you to connect your account -3. Ensure the correct account is displayed under **ACCOUNT** -4. Select **Batch - Batch.sol** under **CONTRACT** -5. Copy the batch precompile address for Tangle (0x0000000000000000000000000000000000000808) and paste it into the **At Address** field -6. Click **At Address** - -A new instance of **Batch - Batch.sol** will appear under **Deployed Contracts**. - -### Deploy Example Contract - -On the other hand, `SimpleContract.sol` will be deployed as a new contract. After compiling `SimpleContract.sol`: - -1. Click on the **Deploy and Run** tab -2. Select **Injected Provider - MetaMask** under **ENVIRONMENT** -3. Ensure the correct account is displayed under **ACCOUNT** -4. Select **SimpleContract - SimpleContract.sol** in the **CONTRACT** dropdown -5. Click **Deploy** -6. Confirm the MetaMask transaction - -A new instance of **SimpleContract** will appear under **Deployed Contracts**. - -### Send Native Currency via Precompile - -Sending native currency with the batch precompile involves specifying which addresses to send to and how much to send, all in a single batch call. For this example, you'll use the **batchAll** function to send native currency atomically in Tangle Testnet: - -1. Make sure you have enough test tokens in your connected wallet on Tangle Testnet -2. Expand the **Batch - Batch.sol** precompile instance -3. Expand the **batchAll** function -4. For the **to** input, provide the addresses you want to send tokens to, for example: - `["ADDRESS_1", "ADDRESS_2"]` -5. For the **value** input, provide the amounts to send in Wei, for example: - `["100000000000000000", "200000000000000000"]` - which corresponds to 0.1 and 0.2 tokens respectively -6. For the **callData** and **gasLimit** inputs, provide empty arrays: - `[]` -7. Click **transact** -8. Confirm in MetaMask - -Once the transaction is complete, confirm both recipient addresses have the appropriate token balances in MetaMask or via a block explorer. - -### Find a Contract Interaction's Call Data - -Visual interfaces like Remix and libraries like [Ethers.js](https://github.com/ethers-io/ethers.js) or [Web3.js](https://github.com/ChainSafe/web3.js) encapsulate the call data used to interact with Solidity contracts, but you can also obtain it explicitly to use with the batch precompile. - -Try finding a transaction's call data manually in Remix: - -1. Expand the deployed `SimpleContract.sol` instance -2. Expand the **setMessage** function -3. Enter example values, such as `id = 1` and `message = "moonbeam"` -4. Instead of clicking **transact**, click the copy button next to it to copy the call data - -This copied string is the function selector plus encoded arguments for the function call. - -### Function Interaction via Precompile - -To batch contract interactions, function inputs must be encoded as call data within the `callData` array. For an atomic operation where a single subcall failure reverts all subcalls: - -1. Copy the address of your `SimpleContract.sol` instance -2. Expand the **Batch - Batch.sol** instance under **Deployed Contracts** -3. Expand the **batchAll** function -4. For **to**, insert your contract's address, for example: - `["INSERT_SIMPLE_CONTRACT_ADDRESS"]` -5. For **value**, if your function call does not require any native currency, pass an array with `[0]` -6. For **callData**, insert the call data string you obtained earlier in brackets, for example: - `["0x648345c800000000000000000000000000000000000000000000000000000000..."]` -7. For **gasLimit**, insert an empty array `[]` or the gas limit you wish to enforce -8. Click **transact** and confirm in MetaMask - -Afterwards, you can verify the contract state change by calling functions (for example, `messages(1)`) on the deployed `SimpleContract`. - -### Combining Subtransactions - -So far, each operation has been separate, but the real power of batching is to combine transfers and contract interactions into a single transaction. For instance, consider these arrays when using **batchAll**: - -• Three subtransactions: - -1. A native token transfer to some address -2. A call to `SimpleContract` that sets a message -3. Another call to `SimpleContract` that sets another message - -• The `to` array might look like this: - -```text -[ - "0x6Be02d1d3665660d22FF9624b7BE0551ee1Ac91b", - "0xd14b70a55F6cBAc06d4FA49b99be0370D0e1BD39", - "0xd14b70a55F6cBAc06d4FA49b99be0370D0e1BD39" -] -``` - -• The `value` array: - -```text -["1000000000000000000", "0", "0"] -``` - -• The `callData` array (first item is empty, so the native token transfer does nothing beyond sending currency; the next two strings correspond to calls of `setMessage` with different parameters): - -```text -[ - "0x", - "0x648345c8000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000009796f752061726520610000000000000000000000000000000000000000000000", - "0x648345c800000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000e61206d6f6f6e6265616d2070726f000000000000000000000000000000000000" -] -``` - -• And an empty array for `gasLimit`: - -```text -[] -``` - -Entering these arrays under the **batchAll** function will execute all three subtransactions—one token transfer and two contract interactions—in a single transaction. diff --git a/pages/developers/precompiles/ux/call-permit.mdx b/pages/developers/precompiles/ux/call-permit.mdx deleted file mode 100644 index 540cd5bc..00000000 --- a/pages/developers/precompiles/ux/call-permit.mdx +++ /dev/null @@ -1,238 +0,0 @@ ---- -title: "Call Permit Precompile Contract" -description: "Learn how to use the Call Permit Precompile contract on Tangle to sign a permit for any EVM call that can be dispatched by anyone or any smart contract." -keywords: [solidity, ethereum, call permit, permit, gasless transaction, tangle, precompiled, contracts] ---- - -import GithubFileReaderDisplay from "../../../../components/GithubFileReaderDisplay"; - -# Interacting with the Call Permit Precompile - -## Introduction - -The Call Permit Precompile on Tangle allows a user to sign a permit, an [EIP-712](https://eips.ethereum.org/EIPS/eip-712) signed message, for any EVM call. It can then be dispatched by anyone or any smart contract. The user who signed the permit is effectively “authorizing” another account or contract to execute the call on their behalf. This enables gas-less transactions because the dispatcher pays for fees on behalf of the signer. - -For example, Alice signs a call permit and Bob dispatches it. Bob pays for the transaction fees, so Alice does not need to hold any native tokens to cover gas. However, keep in mind that if the call includes a token transfer, the signer must have a sufficient balance of that token. - -## Precompile Address - -On Tangle, the Call Permit Precompile is located at the well-known address `0x0000000000000000000000000000000000000805`. Below are the addresses you can use depending on the network: - -- Tangle Mainnet: `0x0000000000000000000000000000000000000805` -- Tangle Testnet: `0x0000000000000000000000000000000000000805` - -## The Call Permit Solidity Interface - -Below is the recommended Solidity interface for interacting with the Call Permit Precompile on Tangle. Note that it follows the EIP-712 standard and can be used to dispatch gas-less transactions. - - - -When `dispatch` is called, the precompile checks the signed permit and the current nonce of the signer. If the permit is valid, the call is executed as if the signer itself had made the transaction. After a successful `dispatch`, the signer’s nonce is incremented automatically. - ---- - -## Setup the Example - -This section guides you through a simple usage example. You will: - -1. Deploy a sample contract, `SetMessage.sol`. -2. Generate and sign the permit using one account (for example, Alice). -3. Dispatch the call using another account (for example, Bob). - -### Prerequisites - -To follow this demonstration, you should: - -- Have [MetaMask installed](https://metamask.io/) in your browser. -- Connect MetaMask to Tangle Testnet (or Tangle Mainnet, if you prefer). -- Have at least two accounts on Tangle, one funded for paying fees (Bob) and one to act as the signer (Alice). - -### Example Contract (SetMessage.sol) - -Here is a simple contract used to illustrate the call permit process. It stores a string message: - -```solidity -// SPDX-License-Identifier: GPL-3.0 -pragma solidity 0.8.7; - -contract SetMessage { - string storedMessage; - - function set(string calldata x) public { - storedMessage = x; - } - - function get() public view returns (string memory) { - return storedMessage; - } -} -``` - -### Remix Setup - -A common way to work with the Tangle Precompile Registry is via [Remix](https://remix.ethereum.org). You can deploy the sample contract and interact with the Call Permit Precompile. Steps: - -1. Open Remix and enable the "File explorers". -2. Create a file named `SetMessage.sol` and paste the code above. -3. Also create a file named `CallPermit.sol` (or any name you choose) and paste the [interface](#the-call-permit-solidity-interface) from this documentation (if needed for reference). -4. Compile both files by selecting each and pressing the "Compile" button. - -### Deploying the Example Contract - -1. In Remix, go to the “Deploy & run transactions” panel. -2. Select “Injected Web3” or “Injected Provider - Metamask” from the Environment dropdown (ensuring your MetaMask is connected to Tangle). -3. Deploy `SetMessage.sol`. Confirm the transaction in MetaMask. -4. You should see the deployed contract under “Deployed Contracts”. - -Record or copy the newly deployed `SetMessage` contract address; you will need it when forming the permit data. - -### Accessing the Call Permit Precompile - -Since the Call Permit contract is precompiled and already deployed, you do not deploy it yourself. Instead, you point Remix to the address: - -1. Go to the “Deploy & run transactions” panel in Remix. -2. Leave the Environment set to “Injected Provider - Metamask”. -3. Next to "At Address", paste the well-known precompile address: - `0x0000000000000000000000000000000000000805` -4. Click “At Address” to tell Remix to load the Call Permit contract interface at that address. -5. Remix adds the Call Permit Precompile contract to your “Deployed Contracts” list. - ---- - -## Generate Call Permit Signature - -To dispatch a call permit, one must first sign the EIP-712 message that includes: - -- The signer’s address (`from`) -- The contract you want to call (`to`) -- The `value` (in wei/fungible tokens) -- The `data` you want to send, including function signatures and arguments -- A `gaslimit` to ensure the dispatcher doesn’t choose excessive gas -- The `deadline` for expiration -- The signer’s `nonce` from the Call Permit Precompile - -### Determining the Signer’s Nonce - -In Remix, expand the CallPermit precompile entry under “Deployed Contracts”; then input the signer’s address into the `nonces` function and press the button to read the current nonce. - -### Example Data - -For `SetMessage.sol`, suppose you want to set the message to "hello world". The contract’s function signature for `set(string)` is: - -- 4-byte function selector for `set(string)` -- Encoded string argument - -In hex, the payload can look like this: - -``` -0x4ed3885e -0000000000000000000000000000000000000000000000000000000000000020 -000000000000000000000000000000000000000000000000000000000000000b -68656c6c6f20776f726c64000000000000000000000000000000000000000000 -``` - -We recommend a gas limit of around 100000 for this example. - -### Signing the Permit in the Browser - -You can sign the permit in multiple ways. Below is an example using [JSFiddle](https://jsfiddle.net) and the MetaMask provider directly: - -1. In JSFiddle (or any similar environment), add Ethers.js as a resource. -2. Use this snippet (simplified example): - -```js -// Example snippet to sign the data via MetaMask in the browser - -// IMPORTANT: This is a simplified code snippet for demonstration only. -async function main() { - // Request accounts from MetaMask - const accounts = await window.ethereum.request({ method: "eth_requestAccounts" }); - const from = accounts[0]; - - // Replace these as appropriate - const to = "0x1234567890123456789012345678901234567890"; - const value = 0; // Setting to 0 for this example - const data = "0x4ed3885e..." // (truncated) your data from above - const gaslimit = 100000; - const nonce = 0; // The first time you do this, it might be 0 - const deadline = Math.floor(Date.now() / 1000) + 600; // 10 mins from "now" - - const typedData = JSON.stringify({ - types: { - EIP712Domain: [ - { name: "name", type: "string" }, - { name: "version", type: "string" }, - { name: "chainId", type: "uint256" }, - { name: "verifyingContract", type: "address" }, - ], - CallPermit: [ - { name: "from", type: "address" }, - { name: "to", type: "address" }, - { name: "value", type: "uint256" }, - { name: "data", type: "bytes" }, - { name: "gaslimit", type: "uint64" }, - { name: "nonce", type: "uint256" }, - { name: "deadline", type: "uint256" }, - ], - }, - primaryType: "CallPermit", - domain: { - name: "Call Permit Precompile", - version: "1", - chainId: 3799, // Tangle Testnet - verifyingContract: "0x0000000000000000000000000000000000000805", - }, - message: { - from, - to, - value, - data, - gaslimit, - nonce, - deadline, - }, - }); - - // Request the user to sign typed data - const signature = await window.ethereum.request({ - method: "eth_signTypedData_v4", - params: [from, typedData], - }); - - console.log("Signature:", signature); -} - -main().catch(console.error); -``` - -3. Run the snippet. MetaMask prompts you to sign. Approve the message, and you should see the signature in your console (it should look like a hex string, typically “0x” followed by 64 bytes plus the “v” byte). - -You can decode the signature into `v`, `r`, `s` fields using [Ethers.js](https://docs.ethers.org/v6/). You’ll need these fields to call `dispatch`. - -### Signing the Permit in Node.js - -Alternatively, you can use the MetaMask [`@metamask/eth-sig-util`](https://www.npmjs.com/package/@metamask/eth-sig-util) package with a private key in Node.js. Doing so requires you to be mindful about key storage. Once you have the signature, the process is the same: you break it down into `v`, `r`, and `s`. - ---- - -## Interact with the Precompile - -Once you have the call permit signature, you can test `dispatch` on Tangle. - -### Dispatch a Call - -1. In Remix, switch to the account that will pay fees (Bob). -2. Expand the Call Permit Precompile contract under “Deployed Contracts.” -3. Find and expand the `dispatch` function. -4. Fill in the fields with the same `from`, `to`, `value`, `data`, `gaslimit`, and `deadline` that you used for the signature. -5. Paste in `v`, `r`, and `s`. -6. Click “transact.” - -If your permit is valid and everything matches, the transaction should succeed. The call is effectively executed as if “Alice” had done it, while “Bob” pays the fees. - -### Verify the Result - -Return to the `SetMessage` contract you deployed. Call its `get` function to see if the stored message was updated to “hello world”. If so, congratulations! You have successfully dispatched a gas-less transaction on Tangle using the Call Permit Precompile. diff --git a/pages/developers/protocol-architecture.mdx b/pages/developers/protocol-architecture.mdx new file mode 100644 index 00000000..dfcd33df --- /dev/null +++ b/pages/developers/protocol-architecture.mdx @@ -0,0 +1,39 @@ +--- +title: Protocol Architecture +--- + +# Protocol Architecture + +For a system-level view (roles, flows, and code maps), start here: + +- [System Architecture](/developers/system-architecture/overview) +- [API Reference](/developers/api/reference) + +Tangle is the current protocol (EVM-based) composed of a small set of core contracts: + +## Core Contracts + +- `Tangle`: protocol entrypoint for blueprints, operator registration, service lifecycle, jobs, payments, and slashing coordination. +- `MultiAssetDelegation`: staking and delegation system (operator self-stake, deposits, delegation, exits, and slashing application). +- `ServiceFeeDistributor` + `StreamingPaymentManager`: distributes the staker share of service fees (including streamed payments). +- `TangleMetrics` + `InflationPool`: optional metrics-driven TNT incentive budgeting (pre-funded; no minting). +- `RewardVaults`: optional TNT incentives per delegated asset with a deposit cap (funded by `InflationPool`). +- `OperatorStatusRegistry`: heartbeat-based liveness tracking for services (often driven by the operator’s blueprint manager). + +## Migration (TNT) + +TNT distribution at protocol launch is handled via: + +- `TangleMigration`: Merkle + SP1/ZK-gated claim contract for legacy-chain (SS58) allocations. +- Deploy-time carveouts to prevent non-claimable balances being stuck (treasury module accounts; foundation allocation). + +## How To Integrate + +Contract addresses are deployment-dependent. Use the published addresses and RPC for your environment: + +- [Endpoints and Integration](/developers/endpoints) + +For mechanics: + +- [Incentives](/network/incentives-overview) +- [Metrics and Scoring](/network/metrics-and-scoring) diff --git a/pages/developers/system-architecture/overview.mdx b/pages/developers/system-architecture/overview.mdx new file mode 100644 index 00000000..491e46cf --- /dev/null +++ b/pages/developers/system-architecture/overview.mdx @@ -0,0 +1,65 @@ +--- +title: System Architecture +description: A system-level map of Tangle’s on-chain protocol, off-chain runtime, and how each role engages. +--- + +import GithubFileReaderDisplay from "/components/GithubFileReaderDisplay"; + +# System Architecture + +Tangle is an EVM protocol for instantiating and operating **services (“blueprints”)** secured by staked capital. The system is intentionally split into: + +- **On-chain contracts** (service lifecycle, payments, staking, incentives, slashing) +- **Off-chain runtime** (operators running blueprint managers/runners that react to events and submit results) + +## Roles and How They Engage + +- **Blueprint developers** publish blueprint definitions and optionally run a service manager contract. +- **Operators** register for blueprints, run the off-chain runtime, and submit results + heartbeats. +- **Stakers / delegators** delegate assets to operators and (optionally) constrain delegation to specific blueprints. +- **Customers** create services (by request/approve or by signed quotes) and submit jobs. + +## End-to-End Flow (High Level) + +1. Developer **creates a blueprint** (definition + schemas + execution sources). +2. Operators **register** for that blueprint and advertise preferences. +3. Customer creates a service via either: + - **Request/approve** (customer specifies operators + security requirements), or + - **RFQ/quotes** (operators sign quotes and the customer creates service from those quotes). +4. Customers submit **jobs**; operators submit **results**. +5. Protocol distributes **fees** (developer/protocol/operator/stakers) and tracks **metrics**. +6. Operators submit **heartbeats**; authorized parties can propose **slashes** with a dispute window. + +## Code Map + +### On-Chain (tnt-core) + +| Component | Responsibility | Code | +| ------------------------ | -------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------- | +| `Tangle` | Facet router (dispatches calls by selector) | https://github.com/tangle-network/tnt-core/blob/main/src/Tangle.sol | +| `Tangle*Facet` modules | Public protocol surface (blueprints/services/jobs/payments/quotes/…) | https://github.com/tangle-network/tnt-core/tree/main/src/facets/tangle | +| `core/*` modules | Shared implementations used by facets | https://github.com/tangle-network/tnt-core/tree/main/src/core | +| `MultiAssetDelegation` | Staking + delegation + exits + slashing application | [MultiAssetDelegation.sol](https://github.com/tangle-network/tnt-core/blob/main/src/staking/MultiAssetDelegation.sol) | +| `ServiceFeeDistributor` | Staker fee distribution (USD-weighted, per-asset commitments) | https://github.com/tangle-network/tnt-core/blob/main/src/rewards/ServiceFeeDistributor.sol | +| `OperatorStatusRegistry` | Heartbeats, QoS signals, and optional metric forwarding | [OperatorStatusRegistry.sol](https://github.com/tangle-network/tnt-core/blob/main/src/staking/OperatorStatusRegistry.sol) | +| `TangleMetrics` | Lightweight activity recorder used by incentives | https://github.com/tangle-network/tnt-core/blob/main/src/rewards/TangleMetrics.sol | +| `TangleMigration` | TNT legacy-chain migration claim (Merkle + SP1/ZK) | https://github.com/tangle-network/tnt-core/blob/v2/packages/migration-claim/src/TangleMigration.sol | + +### Off-Chain ([Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2)) + +| Component | Responsibility | Code | +| ---------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------------------------- | +| Manager | Loads services from chain, selects runtime sources, runs blueprints | https://github.com/tangle-network/blueprint/tree/v2/crates/manager | +| Tangle EVM contexts | Provides typed clients + operator identity + env | https://github.com/tangle-network/blueprint/blob/v2/crates/contexts/src/tangle_evm.rs | +| Tangle EVM producer/consumer | EVM event ingestion + result submission helpers | https://github.com/tangle-network/blueprint/tree/v2/crates/tangle-evm-extra/src | + +## The Entrypoint Contract (Composition) + +The protocol is exposed as a set of **facets** registered on the `Tangle` router. The router maps function selectors to facet implementations and delegates calls. + + diff --git a/pages/developers/system-architecture/rewards.mdx b/pages/developers/system-architecture/rewards.mdx new file mode 100644 index 00000000..a178e561 --- /dev/null +++ b/pages/developers/system-architecture/rewards.mdx @@ -0,0 +1,101 @@ +--- +title: Rewards & Incentives +description: How service fees are split, how stakers earn, and how optional TNT incentives are budgeted. +--- + +import GithubFileReaderDisplay from "/components/GithubFileReaderDisplay"; + +# Rewards & Incentives + +Tangle separates **service fees** (paid by customers) from **incentives** (TNT budgets funded explicitly). + +
    + Incentives flow for service fees and TNT budgets +
    + Service fees split through Payments.sol; TNT budgets flow through InflationPool, + RewardVaults, and ServiceFeeDistributor. +
    +
    + +Claim paths: staker fees + staker inflation via `ServiceFeeDistributor`, TNT staking incentives via `RewardVaults`, and operator/customer/developer TNT via `InflationPool`. + +Code references: [Payments.sol](https://github.com/tangle-network/tnt-core/blob/main/src/core/Payments.sol), [ServiceFeeDistributor.sol](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/ServiceFeeDistributor.sol), [InflationPool.sol](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/InflationPool.sol), [RewardVaults.sol](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/RewardVaults.sol), [TangleMetrics.sol](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/TangleMetrics.sol) + +## Service Fee Split (Developer / Protocol / Operator / Stakers) + +When a service pays a fee, `Payments.sol` calculates and routes the split: + +- Developer payment (blueprint owner or manager override) +- Protocol payment (treasury) +- Operator rewards (pending claims) +- Staker share (via `ServiceFeeDistributor`, when configured) + + + +## Staker Distribution (USD-weighted) + +`ServiceFeeDistributor` accounts for: + +- Delegation score (principal × lock multiplier) +- Optional USD weighting (oracle-enabled) +- Blueprint selection mode (All vs Fixed) + +## Inflation-Funded Incentives (TNT) + +`InflationPool` is a budgeted, pre-funded source of TNT incentives: + +- **Staking portion** is distributed to `RewardVaults` and paid to delegators (with operator commission). +- **Operator / Customer / Developer portions** accrue as pending TNT balances and are claimed from `InflationPool`. +- **Staker portion** (if `stakersBps > 0`) is distributed by service exposure and routed through `ServiceFeeDistributor`. + +This keeps incentives explicit and avoids hidden inflation. + +## How To Integrate + +If you are building on top of this system, the safe default flows are: + +- Use `Tangle` for service payments and let `Payments.sol` route splits. +- Read staker fee rewards from `ServiceFeeDistributor` (or let users claim directly). +- Read TNT incentives from `RewardVaults`, and optional staker inflation from `ServiceFeeDistributor`. + +## Example: Fee Split And Staker Payout + +Service pays 10 ETH with a 20/20/40/20 split: + +- 2 ETH to developer, 2 ETH to treasury, 4 ETH to operators, 2 ETH to stakers. +- Staker share goes to `ServiceFeeDistributor`, where delegators earn by score and blueprint selection. + +## Code + Tests + +- Split logic: `src/v2/core/Payments.sol` +- Staker distribution: `src/v2/rewards/ServiceFeeDistributor.sol` +- Inflation budgets: `src/v2/rewards/InflationPool.sol` +- Staking incentives: `src/v2/rewards/RewardVaults.sol` +- Tests: + - `test/v2/tangle/Payments.t.sol` + - `test/v2/rewards/ServiceFeeDistributor.t.sol` + - `test/v2/rewards/ServiceFeeDistributorStreaming.t.sol` + - `test/v2/Rewards.t.sol` + - `test/v2/InflationPool.t.sol` + + + +## Optional TNT Incentives + +The “inflation” surface is intentionally **budget-driven**: TNT incentives are funded by explicit budgets rather than assuming continuous minting. + +- [Incentives](/network/incentives-overview) +- [Tokenomics: Incentive Budgets](/network/tokenomics/inflation) diff --git a/pages/developers/tangle-avs.mdx b/pages/developers/tangle-avs.mdx deleted file mode 100644 index 08a7797b..00000000 --- a/pages/developers/tangle-avs.mdx +++ /dev/null @@ -1,125 +0,0 @@ -import GithubFileReaderDisplay from "../../components/GithubFileReaderDisplay"; - -# Getting Started with Tangle Blueprints - -Welcome to the Tangle Blueprint tutorial! This guide will walk you through creating a simple `Hello World` Blueprint for Tangle. By the end of this tutorial, you'll have a basic understanding of how to create, build, and deploy a Tangle Blueprint. - -## What are Tangle Blueprints? - -Tangle Blueprints are specifications for Actively Validated Services (AVS) on the Tangle Network. An AVS is an off-chain service that runs arbitrary computations for a user-specified period of time. Blueprints provide a useful abstraction, allowing developers to create reusable service infrastructures similar to smart contracts. - -## Prerequisites - -Before you begin, ensure you have the following installed: - -- [Rust](https://www.rust-lang.org/tools/install) -- [Forge](https://getfoundry.sh) -- [Tangle](https://github.com/tangle-network/tangle?tab=readme-ov-file#-getting-started-) -- [`cargo-tangle`](./cli/installation.mdx) CLI tool - -## Creating my First Blueprint - -See the [CLI Quickstart](./cli/quickstart.mdx) for instructions on creating a [blueprint](./blueprints/introduction.mdx). - -### Blueprint Workspace Structure - -Tangle Blueprints are structured as Rust workspaces with multiple packages: - -1. **Library Package**: Contains the core logic of your Blueprint, including job definitions. -2. **Binary Package**: Contains the entry point for your Blueprint runner. - -### Key Files - -#### lib/src/lib.rs - -This file contains the core logic of your Blueprint, including job definitions. Jobs are the main computational tasks that your Blueprint will execute. Here's an example of a simple "Hello World" job: - - - -This job takes an optional `who` parameter and returns a greeting. - -For more details on creating jobs, see our [Blueprint Job Documentation](/developers/blueprint-macros/jobs). - -#### bin/src/main.rs - -This file serves as the entry point for your Actively Validated Service (AVS) node. It sets up the runtime environment, initializes the necessary components, and starts the Blueprint Runner. Here's a breakdown of its key responsibilities: - -1. **Environment Setup**: It loads the configuration, initializes the logger, and sets up error handling. -2. **Client Initialization**: It creates a connection to the Tangle Network using the provided RPC endpoint. -3. **Router Configuration**: It sets up the router that directs job calls to the appropriate handlers. -4. **Producer Setup**: It configures producers that listen for events and prepare them for processing. -5. **Consumer Setup**: It configures consumers that handle processed results. -6. **Background Services**: It initializes optional background services required for jobs. -7. **Blueprint Runner**: It starts the Blueprint Runner, which orchestrates all components. - - - -## Blueprint Runner Architecture - -The Blueprint Runner is the core component that orchestrates the execution of your Blueprint. It consists of several key components: - -### Router - -The router is responsible for directing job calls to the appropriate job handlers based on job IDs. When a job is called, the router identifies the correct handler and passes the job parameters to it. - -### Producers - -Producers listen for events (such as on-chain events from Tangle Network) and prepare them for processing. They convert raw event data into a format that can be processed by your job handlers. - -### Consumers - -Consumers handle the results of processed jobs. They can perform actions such as sending transactions, updating state, or triggering other processes based on job results. - -### Background Services - -Background services are optional components that run continuously in the background. They can perform tasks such as monitoring, data collection, or periodic operations that are required for your jobs. - -## Building Your Project - -To build your project, run: - -```bash -cargo build -``` - -This command compiles your Rust code and checks for any errors. - -### Deploying Your Blueprint - -See [deploying your blueprint](./cli/quickstart.mdx#deploying-your-blueprint). - -## Next Steps - -Congratulations! You've created, built, and deployed your first Tangle Blueprint. Here are some suggestions for what to do next: - -1. Explore more complex job implementations in your library package. Learn more about [Jobs](/developers/blueprint-macros/jobs). {/* TODO: Use new jobs page */} - -2. Learn about [Context and Context Extensions](/developers/blueprint-macros/context) to manage dependencies and state in your Blueprint. - -3. Customize your Blueprint Runner with additional producers, consumers, and background services to handle more complex scenarios. - -4. If you're interested in building for EigenLayer, check out our guide on [Building an EigenLayer AVS](/developers/eigenlayer-avs). - -5. Implement tests for your Blueprint using `tokio::test`. Learn more about [Testing Blueprints](/developers/blueprint-macros/testing). - -6. Explore the Tangle network's features and how they interact with your Blueprint. Understand [EVM and Native Addresses](/developers/technicals/addresses) and [EVM to Substrate transfers](/developers/technicals/evm-substrate-transfers). - -7. Familiarize yourself with [EVM Precompiles](/developers/technicals/precompiles) available on Tangle Network. - -8. Learn about about advanced options when [Deploying Blueprints](./cli/tangle.mdx#deploying-a-blueprint). - -For more advanced topics and in-depth information, check out our other documentation pages and the [Rust async book](https://rust-lang.github.io/async-book/). - -## Feedback and Support - -If you encounter any issues or have questions, please don't hesitate to open an issue on our [GitHub repository](https://github.com/tangle-network/blueprint-template/issues). We're here to help you succeed in building with Tangle Blueprints! diff --git a/pages/developers/technicals/_meta.ts b/pages/developers/technicals/_meta.ts deleted file mode 100644 index 5b1b1191..00000000 --- a/pages/developers/technicals/_meta.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - addresses: "EVM and Native Addresses", - "evm-substrate-transfers": "EVM to Substrate transfers", - "json-rpc-endpoints": "EVM RPC Methods", - "deploy-using-hardhat": "Deploy Contracts with Hardhat", - "transaction-fees": "Calculating Transaction Fees", -}; - -export default meta; diff --git a/pages/developers/technicals/addresses.mdx b/pages/developers/technicals/addresses.mdx deleted file mode 100644 index 50555b58..00000000 --- a/pages/developers/technicals/addresses.mdx +++ /dev/null @@ -1,29 +0,0 @@ -# EVM Account Addresses on Tangle - -If you're interacting with a Frontier-enabled Substrate chain like Tangle Network, there are two account formats to be aware of: EVM accounts (H160) and Tangle native accounts (H256). You can hold Tangle assets in EVM addresses or native addresses and transfer them between the two, but it's important to understand how they are represented in relation to one another. - -To help you navigate between these formats, we've provided a handy **Address Converter** tool in [Resources](/resources/). Simply enter your Ethereum address (H160), and the tool will generate the corresponding Substrate address used on the Tangle Network chain. - -## Address Formats in Frontier-enabled Substrate Chains - -Frontier is a powerful toolset that allows Substrate-based blockchains to offer Ethereum Virtual Machine (EVM) compatibility. This means that developers can deploy and execute Solidity smart contracts on Substrate chains with minimal changes. When working with Frontier, it's essential to understand the different address formats and their relationships. In a Frontier-enabled Substrate chain, there are three primary address formats: - -1. **SS58 addresses**: SS58 is a simple account format designed for Substrate-based chains. It is heavily based on Bitcoin's Base-58-check format with a few alterations. The SS58 address is a base-58 encoded value that identifies a specific account on the Substrate chain. It consists of an address type, the encoded address, and a checksum. In the case of the Tangle Network, the chain ID and custom prefix used is `5845`, which yields the prefix `tg` when applied in conversion. - -2. **Ethereum-style addresses (H160)**: These addresses are 40 hex characters long (plus the "0x" prefix) and follow the Ethereum address format. They are derived from the private key used to sign transactions on the EVM side of the chain. - -3. **Substrate-style addresses (H256)**: These addresses are 256 bits long and are used natively by Substrate. They represent the raw, unencoded form of an account's public key or a hash value in Substrate. - -To bind an Ethereum H160 address with a Substrate H256 address, a truncated hash scheme is used. The first 160 bits (20 bytes) of the H256 address are taken and used as the corresponding H160 address. - -## Interacting with Frontier-enabled Substrate Chains - -When a user interacts with the EVM on a Frontier chain, they use their Ethereum-style address (H160). Behind the scenes, Frontier maps this H160 address to a corresponding Substrate-style address (H256) in the Substrate Balance pallet's storage. This mapping allows the user to hold and manage balances on the Substrate side. - -However, it's important to note that the user only has direct control over their H160 address and its associated private key. They cannot directly perform transactions or interact with Substrate-specific features using the mapped H256 address. To fully utilize the Substrate side of the chain, the user needs to have a separate SS58 address with its own private key. - -As a user, it's essential to understand the different address formats and their purposes when interacting with a Frontier-enabled Substrate chain. You'll need to manage your Ethereum-style address (H160) for EVM interactions and your SS58 address for Substrate-specific features. - -For developers building on a Frontier-enabled Substrate chain, it's crucial to be aware of these address formats and their relationships. You may need to provide clear instructions and tools to help users manage their addresses, perform cross-address transfers, and interact with both the EVM and Substrate components seamlessly. - -While the dual-address system may introduce some complexities, it also opens up a world of possibilities for interoperability and leveraging the strengths of both Ethereum and Substrate ecosystems. diff --git a/pages/developers/technicals/deploy-using-hardhat.mdx b/pages/developers/technicals/deploy-using-hardhat.mdx deleted file mode 100644 index 66824721..00000000 --- a/pages/developers/technicals/deploy-using-hardhat.mdx +++ /dev/null @@ -1,123 +0,0 @@ -# Deploy on Tangle using Hardhat - -## Pre-requisites and Assumptions - -This guide to deploying a smart contract on a Substrate-based blockchain network with EVM compatibility (similar to Moonbeam) using Hardhat assumes a basic understanding of Blockchain, Smart Contracts, Solidity, Hardhat and Substrate: You should be familiar with Ethereum as it forms the basis of any EVM-compatible blockchain. - -If any of the above assumptions do not hold true, we recommend taking the time to fill in the gaps in your knowledge and setup before proceeding. This will ensure that you can follow along with the guide effectively. - -## Important configurations - -| Configuration | Value | -| --------------------- | ---------- | -| Block Gas Limit | 60,000,000 | -| Transaction gas limit | 52,000,000 | - -## Setup - -1. **Install Node.js and npm** - Make sure you have Node.js and npm installed. The recommended versions are Node v14.17.6 and npm v6.14.15. - -2. **Install Hardhat** - Create a new directory and initialize an npm project. Then install Hardhat using npm: - -```bash -mkdir -cd -npm init -y -npm install --save-dev hardhat -``` - -3. **Create a new Hardhat project** - Run the following command to create a new Hardhat project: - -```bash -npx hardhat -``` - -Select "Create an empty hardhat.config.js" when prompted. - -## Configure Hardhat for Tangle Network - -1. **Install necessary plugins** - Install `@nomiclabs/hardhat-ethers`, `@nomiclabs/hardhat-waffle`, `ethereum-waffle`, and `ethers` plugins: - -```bash -npm install --save-dev @nomiclabs/hardhat-ethers @nomiclabs/hardhat-waffle ethereum-waffle ethers -``` - -2. **Update hardhat.config.js** - Open hardhat.config.js and replace its content with the following configuration, updating the placeholder fields: - -```javascript -require("@nomiclabs/hardhat-waffle"); - -module.exports = { - solidity: "0.8.0", - networks: { - tangle: { - url: "", - accounts: [`0x${}`], - chainId: , - gasPrice: 10000000000, - } - } -}; -``` - -
    -Tangle Chain Information -You can view the latest details on networks, chainIDs, RPC URLs on the [Network Information and Configurations](/network-information-configuration/) page. -
    - -Replace `` with the RPC URL of the Tangle Network. Replace `` with the private key of the account that will be used to deploy the contracts. `` should be replaced with the ChainId of the Tangle Network. - -## Deploy Contracts - -1. **Compile Contracts** - Assuming your contracts are in the contracts folder, you can compile them by running: - -```bash -npx hardhat compile -``` - -2. **Create a deployment script** - Create a new directory named scripts in your project root, then create a file in this directory, say deploy.js, with the following content: - -```javascript -async function main() { - const [deployer] = await ethers.getSigners(); - - console.log( - "Deploying contracts with the account:", - deployer.address - ); - - console.log("Account balance:", (await deployer.getBalance()).toString()); - - const Contract = await ethers.getContractFactory(""); - const contract = await Contract.deploy(); - - console.log("Contract address:", contract.address); -} - -main() - .then(() => process.exit(0)) - .catch((error) => { - console.error(error); - process.exit(1); - }); -``` - -Replace `` with the name of your contract and `` with the parameters required by your contract's constructor. - -3. **Run the deployment script** - You can now deploy your contract to Tangle Network using Hardhat by running: - -```bash -npx hardhat run --network tangle scripts/deploy.js -``` - -After running this command, Hardhat will execute the deployment script using the account and network configuration provided in `hardhat.config.js`. - -Make sure you update ``, ``, ``, ``, and `` with your actual values. diff --git a/pages/developers/technicals/evm-substrate-transfers.mdx b/pages/developers/technicals/evm-substrate-transfers.mdx deleted file mode 100644 index 45bb052e..00000000 --- a/pages/developers/technicals/evm-substrate-transfers.mdx +++ /dev/null @@ -1,129 +0,0 @@ -import EvmToSubstrateConverter from "../../../components/EvmToSubstrateConverter" -import { Callout } from "nextra/components" - -## Developer Resource - -### Cross-EVM/Substrate Token Transfers - -Handling cross-system token transfers between Substrate and EVM can be complex. Address mappings play a crucial role in facilitating these transfers. -While we provide - -#### Scenarios - -1. **Alice** only has an account on Tangle EVM using the Metamask wallet. -2. **Bob** has an account on Tangle using the Polkadot.js wallet, and another account on Tangle EVM using the Metamask wallet. -3. **Charlie** only has an account on Tangle using the Polkadot.js wallet. - -Assigned values: - -- **Alice's account:** `0xa5fAA47a324754354CB0A305941C8cCc6b5de296` -- **Bob's accounts:** `5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty` and `0x690B9A9E9aa1C9dB991C7721a92d351Db4FaC990` -- **Charlie's account:** `5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y` - -### Address Mapping Explanation - -Address mappings between Substrate and EVM are one-way transformations that facilitate token transfers between the two systems. - -- **Substrate to EVM:** When a Substrate address is converted to an EVM address, the resulting EVM address can be used to receive tokens on the EVM side. The conversion involves extracting and hashing a part of the Substrate address, providing a unique EVM address corresponding to the original Substrate address. -- **EVM to Substrate:** Once tokens are sent to the EVM address, the recipient can interact with the Substrate network by calling the `evm.withdraw` function. This allows the recipient to withdraw tokens from the EVM environment back to the Substrate environment. - -Key Points: - -- The conversion is a one-way mapping from Substrate to EVM. -- The resulting EVM address is a hash of part of the Substrate address. -- Tokens can be received on the EVM side using the EVM address. -- The `evm.withdraw` function facilitates the transfer of tokens back to the Substrate side. - -### Convert Substrate Address to EVM - -To convert a Substrate address to an EVM address, the following script can be used: - -```typescript -import { decodeAddress } from "https://esm.sh/@polkadot/util-crypto"; -import { u8aToHex } from "https://esm.sh/@polkadot/util"; - -const input = Deno.args[0]; -if (!input) { - console.error("usage: deno run substrateToEvm.ts "); - Deno.exit(1); -} -const accountId = decodeAddress(input); -const res = accountId.subarray(0, 20); -const output = u8aToHex(res); -console.log({ input, output }); -// run using: -// $ deno run substrateToEvm.ts -``` - -The script takes a Substrate address as input, decodes it, and then extracts the first 20 bytes of the account ID. These 20 bytes are then converted into a hexadecimal string, resulting in an EVM-compatible address. - -#### Convert EVM Address to Substrate - -Here is an example using the Deno Runtime and @polkadot/util to convert an address from EVM to Substrate: - -You can also use this convenient tool: - - - -```tsx -import { - blake2AsU8a, - encodeAddress, -} from "https://esm.sh/@polkadot/util-crypto"; -import { - hexToU8a, - stringToU8a, - u8aConcat, -} from "https://esm.sh/@polkadot/util"; - -const input = Deno.args[0]; -if (!input) { - console.error("usage: deno run evmToSubstrate.ts "); - Deno.exit(1); -} -const addr = hexToU8a(input); -const data = stringToU8a("evm:"); -const res = blake2AsU8a(u8aConcat(data, addr)); -const output = encodeAddress(res, 42); -console.log({ input, output }); -// run using: -// $ deno run evmToSubstrate.ts -``` - -**Note** -The conversion from an EVM address to a Substrate address is a one-way operation. Due to the hashing process, it is not possible to reverse the process and obtain the original EVM address from the resulting Substrate address. - -#### Case 1: Sending from Substrate to EVM - -Bob wants to send 100 TNT to Alice, but he does not have the 100 TNT on his EVM account in Metamask. Therefore, he uses his Tangle account in the Polkadot.js wallet. - -1. Alice's address is `0xa5fAA47a324754354CB0A305941C8cCc6b5de296`. -2. Bob converts Alice's address to a substrate address using the `evmToSubstrate` function: - -```tsx -evmToSubstrate("0xa5fAA47a324754354CB0A305941C8cCc6b5de296"); -// => 5C9ysBsWKpw3D8MFaEauFgdtMPqboS64YNYHyu1rCynLyKMZ -``` - -3. Bob sends the 100 TNT to `5C9ysBsWKpw3D8MFaEauFgdtMPqboS64YNYHyu1rCynLyKMZ`. -4. Alice receives the 100 TNT in her Metamask wallet. - -#### Case 2: Sending from EVM to Substrate - -Alice wants to send 50 TNT to Charlie. However, Charlie only has a Substrate account that he controls in his Polkadot.js wallet. - -1. Charlie's address is `5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y`. -2. Alice converts Charlie's address to an EVM address using the `substrateToEvm` function. - -```tsx -substrateToEvm("5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y"); -// => 0x90b5ab205c6974c9ea841be688864633dc9ca8a3 -``` - -3. Alice uses her Metamask and sends 50 TNT to - `0x90b5ab205c6974c9ea841be688864633dc9ca8a3`. -4. Charlie's balance on Substrate remains the same! - > Because: Charlie needs to withdraw the balance from his EVM account. -5. Charlie goes to Polkadot.js and calls: - `evm.withdraw("0x90b5ab205c6974c9ea841be688864633dc9ca8a3", 50 TNT)`. -6. Charlie sees that he has now received 50 TNT in his account. diff --git a/pages/developers/technicals/json-rpc-endpoints.mdx b/pages/developers/technicals/json-rpc-endpoints.mdx deleted file mode 100644 index 31f0be00..00000000 --- a/pages/developers/technicals/json-rpc-endpoints.mdx +++ /dev/null @@ -1,45 +0,0 @@ -# Substrate and Custom JSON-RPC Methods - -RPCs are exposed as a method on a specific module. This signifies that once available, you can invoke any RPC via `api.rpc..(...params[])`. This is also applicable for accessing Ethereum RPCs using the Polkadot.js API, in the format of `polkadotApi.rpc.eth.*`. - -Certain methods accessible via the Polkadot.js API interface are also available as JSON-RPC endpoints on Tangle Network nodes. This section offers some examples; you can request a list of exposed RPC endpoints by invoking `api.rpc.rpc.methods()` or the `rpc_methods` endpoint indicated below. - -## Supported Ethereum methods - -- **[eth_protocolVersion](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_protocolversion)** — returns `1` by default -- **[eth_syncing](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_syncing)** — returns an object with data about the sync status or `false` -- **[eth_hashrate](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_hashrate)** — returns `"0x0"` by default -- **[eth_coinbase](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_coinbase)** — returns the latest block author. Not necessarily a finalized block -- **[eth_mining](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_mining)** — returns `false` by default -- **[eth_chainId](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_chainid)** — returns the chain ID used for signing at the current block -- **[eth_gasPrice](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_gasprice)** — returns the base fee per unit of gas used. This is currently the minimum gas price for each network -- **[eth_accounts](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_accounts)** — returns a list of addresses owned by the client -- **[eth_blockNumber](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_blocknumber)** — returns the highest available block number -- **[eth_getBalance](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getbalance)** — returns the balance of the given address -- **[eth_getStorageAt](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getstorageat)** — returns content of the storage at a given address -- **[eth_getBlockByHash](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getblockbyhash)** — returns information about the block of the given hash including `baseFeePerGas` on post-London blocks -- **[eth_getBlockByNumber](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getblockbynumber)** — returns information about the block specified by block number including `baseFeePerGas` on post-London blocks -- **[eth_getTransactionCount](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_gettransactioncount)** — returns the number of transactions sent from the given address (nonce) -- **[eth_getBlockTransactionCountByHash](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getblocktransactioncountbyhash)** — returns the number of transactions in a block with a given block hash -- **[eth_getBlockTransactionCountByNumber](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getblocktransactioncountbynumber)** — returns the number of transactions in a block with a given block number -- **[eth_getUncleCountByBlockHash](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getunclecountbyblockhash)** — returns `"0x0"` by default -- **[eth_getUncleCountByBlockNumber](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getunclecountbyblocknumber)** — returns `"0x0"` by default -- **[eth_getCode](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getcode)** — returns the code at given address at given block number -- **[eth_sendTransaction](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_sendtransaction)** — creates new message call transaction or a contract creation, if the data field contains code. Returns the transaction hash, or the zero hash if the transaction is not yet available -- **[eth_sendRawTransaction](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_sendrawtransaction)** — creates new message call transaction or a contract creation for signed transactions. Returns the transaction hash, or the zero hash if the transaction is not yet available -- **[eth_call](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_call)** — executes a new message call immediately without creating a transaction on the block chain, returning the value of the executed call -- **[eth_estimateGas](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_estimategas)** — returns an estimate amount of how much gas is necessary for a given transaction to succeed. You can optionally specify a `gasPrice` or `maxFeePerGas` and `maxPriorityFeePerGas` -- **[eth_feeHistory](https://docs.alchemy.com/alchemy/apis/ethereum/eth-feehistory)** — returns `baseFeePerGas`, `gasUsedRatio`, `oldestBlock`, and `reward` for a specified range of up to 1024 blocks -- **[eth_getTransactionByHash](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_gettransactionbyhash)** — returns the information about a transaction with a given hash. EIP-1559 transactions have `maxPriorityFeePerGas` and `maxFeePerGas` fields -- **[eth_getTransactionByBlockHashAndIndex](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_gettransactionbyblockhashandindex)** — returns information about a transaction at a given block hash, and a given index position. EIP-1559 transactions have `maxPriorityFeePerGas` and `maxFeePerGas` fields -- **[eth_getTransactionByBlockNumberAndIndex](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_gettransactionbyblocknumberandindex)** — returns information about a transaction at a given block number, and a given index position. EIP-1559 transactions have `maxPriorityFeePerGas` and `maxFeePerGas` fields -- **[eth_getTransactionReceipt](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_gettransactionreceipt)** — returns the transaction receipt of a given transaction hash. After London support was added in runtime 1200, a new field named `effectiveGasPrice` has been added to the receipt, specifying the gas price of the transaction -- **[eth_getUncleByBlockHashAndIndex](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getunclebyblockhashandindex)** — returns `null` by default -- **[eth_getUncleByBlockNumberAndIndex](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getunclebyblocknumberandindex)** — returns `null` by default -- **[eth_getLogs](https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getlogs)** — returns an array of all logs matching a given filter object - -More information will be added to this page. - -### Polkadot.js API Utility Functions - -The Polkadot.js API also incorporates numerous utility libraries for computing frequently used cryptographic primitives and hash functions. You can view the full list at https://www.npmjs.com/package/@polkadot/util-crypto/v/0.32.19. diff --git a/pages/developers/technicals/transaction-fees.mdx b/pages/developers/technicals/transaction-fees.mdx deleted file mode 100644 index e3df8e67..00000000 --- a/pages/developers/technicals/transaction-fees.mdx +++ /dev/null @@ -1,53 +0,0 @@ -# Understanding Transaction Fees on Tangle Network - -Tangle Network is a substrate-based blockchain that uses the Frontier pallet to provide Ethereum Virtual Machine (EVM) support. This allows Tangle to process transactions originating from both the Substrate and Ethereum ecosystems. - -Since Tangle Network incorporates transaction models from two different blockchain architectures, it's important for developers to understand the fee calculation mechanisms for each transaction type. - -## Substrate vs Ethereum Fees - -Substrate-based blockchains like Tangle Network use the concept of "weight" to calculate transaction fees. The heavier the computation and storage requirements of a transaction, the higher the fee. - -Transactions originating from the Ethereum side consume "gas units" instead. Gas represents the computational effort required to execute a transaction. Each operation has a fixed gas cost based on its complexity. The total fee is calculated by multiplying gas used by the gas price. - -## Ethereum Transactions on Tangle - -While adopting Ethereum's gas model, transaction fees on Tangle work a bit differently than on Ethereum itself: - -- Tangle implements a fee mechanism resembling EIP-1559 but with some modifications -- Gas used is derived from the Substrate extrinsic weight via a fixed factor -- There is a block limit for storage proofs which can cause "out of gas" errors even if gas remains -- Storage growth on chain state is accounted for with a new mechanism that increases gas costs for state-expanding transactions - -## Calculating Ethereum Transaction Fees - -The formula to calculate Ethereum transaction fees on Tangle is: - -``` -GasPrice = min(BaseFee + MaxPriorityFeePerGas, MaxFeePerGas) -Tx Fee = (GasPrice * Weight) / 25000 -``` - -Where: - -- `BaseFee` is a network-set minimum fee, adjusted based on congestion -- `MaxPriorityFeePerGas` and `MaxFeePerGas` are optionally set by the sender -- Division by 25000 converts weight to gas units - -The relevant parameters can be queried from Tangle Network's RPC endpoints for a given transaction. - -## Substrate Transaction Fees - -Substrate transaction fees on Tangle are more straightforward. The total fee paid is directly reported in the `TransactionFeePaid` event emitted by the `transactionPayment` pallet for each extrinsic. - -This event contains: - -1. The paying account -2. The total fee -3. The tip (an additional fee to incentivize inclusion) - -## In Summary - -Substrate transactions on Tangle use a weight-based fee model while Ethereum transactions consume gas, but with Tangle-specific adjustments to the gas price calculation and limits. - -By understanding these fee mechanisms, developers can estimate costs and optimize their Tangle Network transactions originating from both the Substrate and Ethereum sides. diff --git a/pages/developers/testing-with-tangle.mdx b/pages/developers/testing-with-tangle.mdx index b7265d38..1155b4c8 100644 --- a/pages/developers/testing-with-tangle.mdx +++ b/pages/developers/testing-with-tangle.mdx @@ -2,85 +2,59 @@ ## How to test your blueprint with Tangle -This guide will walk you through the process of setting and running Tangle node locally to test your blueprint with Tangle. +This guide walks through the local testing flow used by the [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2). It spins up a seeded Anvil network with the +Tangle v2 contracts from `tnt-core`, so you can test Blueprint Manager flows without running legacy infrastructure. ### Prerequisites -First install and configure `rustup`: +- Rust toolchain (`rustup`) +- Foundry (`anvil`) +- Docker (required for the Anvil test harness) -```bash -# Install -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -# Configure -source ~/.cargo/env -``` - -### Clone the tangle node +### Generate a local operator key ```bash -git clone https://github.com/tangle-network/tangle -cd tangle +cargo tangle key --algo ecdsa --keystore ./local-operator-keys --name anvil-operator +export BLUEPRINT_KEYSTORE_URI="$(pwd)/local-operator-keys" ``` -### Build the node for instant-seal +The `BLUEPRINT_KEYSTORE_URI` value can be reused by the Blueprint Manager and `TangleEvmClient`. -This is different from the production tangle runtime which produces a block every 6 seconds. The instant-seal node produces a block only when a transaction is available. -This allows for faster testing cycles. +### Start a seeded Anvil network -```bash -cargo build --release --features manual-seal,txpool,testnet -``` +The [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2) ships a harness that boots Anvil with the pre-seeded Tangle v2 contracts: -### Run the node +```rust +use blueprint_anvil_testing_utils::harness_builder_from_env; -```bash -./target/release/tangle --tmp --dev --validator -linfo \ ---alice --rpc-cors all --rpc-methods=unsafe --rpc-external \ ---rpc-port 9944 -levm=debug -lgadget=trace --sealing instant +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let harness = harness_builder_from_env() + .include_anvil_logs(true) + .spawn() + .await?; + println!("HTTP RPC: {}", harness.http_endpoint()); + println!("WS RPC: {}", harness.ws_endpoint()); + tokio::signal::ctrl_c().await?; + Ok(()) +} ``` -If successful, you should see output indicating that the node is running: +The harness loads the `localtestnet-state.json` snapshot shipped in the [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2). When the snapshot is missing or +invalid, it replays the Foundry broadcast used by `tnt-core` fixtures. -```bash -./target/release/tangle --tmp --dev --validator -linfo --alice --rpc-cors all --rpc-methods=unsafe --rpc-external --rpc-port 9944 -levm=debug -lgadget=trace --sealing instant - ++++++++++++++++++++++++ - +++++++++++++++++++++++++++ - +++++++++++++++++++++++++++ - +++ ++++++ +++ @%%%%%%%%%%% %%% - ++++++ ++++ +++++ %%%%%%%%%%%% %%%@ - ++++++++++++++++++++++++++ %%%% %%%%@ %%% %%@ @%%%%%%% %%%@ %%%%@ - ++++++++ %%%% @%%%%%%%@ %%%%%%%%% @%%%%%%%%% %%%@ %%%%%%%%% - ++++++++ %%%% %%%%%%%%% %%%% @%%%@ %%%% %%%% %%%@ %%%%%%%%%% - ++++++++++++++++++++++++++ %%%% %%%%%%%%% %%% %%%% %%% @%%% %%%@ @%%%%% %%%%% - ++++++ ++++ ++++++ %%%% %%%%%%%%% %%% %%%% %%%%%%%%%% %%%@ %%%%%%%%%@ - +++ ++++++ +++ %%%% %%%%%%%%% %%% %%%@ %%%%%%%%% %%% %%%%%%%@ - ++++ +++++++++ +++ %%%% %%%% - ++++++++++++++++++++++++++++ %%%%%%%%% - +++++++++++++++++++++++ %%%%% - -2024-10-30 16:00:54.306 INFO main sc_cli::runner: Tangle Node -2024-10-30 16:00:54.306 INFO main sc_cli::runner: ✌️ version 1.2.0-cedde5d83a0 -2024-10-30 16:00:54.306 INFO main sc_cli::runner: ❤️ by Webb Technologies Inc., 2023-2024 -2024-10-30 16:00:54.306 INFO main sc_cli::runner: 📋 Chain specification: Local Testnet -2024-10-30 16:00:54.306 INFO main sc_cli::runner: 🏷 Node name: Alice -2024-10-30 16:00:54.306 INFO main sc_cli::runner: 👤 Role: AUTHORITY -2024-10-30 16:00:54.306 INFO main sc_cli::runner: 💾 Database: RocksDb at /var/folders/ht/41y18g597_9_1035dlw0m3700000gn/T/substrateGSJexb/chains/local_testnet/db/full -2024-10-30 16:00:55.347 INFO main runtime::staking: [0] 💸 generated 5 npos voters, 5 from validators and 0 nominators -2024-10-30 16:00:55.347 INFO main runtime::staking: [0] 💸 generated 5 npos targets -``` - -### How to build testnet runtime (without instant-seal) +### Run your blueprint against local Tangle v2 -To build tangle node, without instant-seal, you can run: +Point the Blueprint Manager at the harness RPC endpoints and your service settings: ```bash -cargo build --release --features txpool,testnet +cargo tangle blueprint run \ + --protocol tangle-evm \ + --http-rpc-url http://127.0.0.1:8545 \ + --ws-rpc-url ws://127.0.0.1:8546 \ + --keystore-path ./local-operator-keys \ + --settings-file ./settings.env ``` -And use the following command to run the node: - -```bash -./target/release/tangle --tmp --dev --validator -linfo \ ---alice --rpc-cors all --rpc-methods=unsafe --rpc-external \ ---rpc-port 9944 -levm=debug -lgadget=trace -``` +For the latest harness details and fixtures, see the [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2) runbook: +`https://github.com/tangle-network/blueprint/blob/v2/docs/operators/anvil.md`. diff --git a/pages/developers/troubleshooting.mdx b/pages/developers/troubleshooting.mdx index 059edf85..b8f13fa8 100644 --- a/pages/developers/troubleshooting.mdx +++ b/pages/developers/troubleshooting.mdx @@ -2,7 +2,7 @@ import GithubFileReaderDisplay from "../../components/GithubFileReaderDisplay"; # Troubleshooting Guide -This guide helps developers troubleshoot common issues when working with Tangle Network blueprints and AVS development. +This guide helps developers troubleshoot common issues when working with Tangle Network blueprints. ## Common Issues @@ -27,13 +27,11 @@ Caused by: note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace ``` -Verify that your blueprint's jobs and reports are implemented correctly. Occasionally, our SDK may have breaking changes to the error reporting. You will want to consider commenting out metadata generation and then rebuilding to check for errors in the blueprint. - -{/* TODO: Add URL */} +Verify that your blueprint's jobs and reports are implemented correctly. If metadata generation is failing, temporarily comment out the build script section that writes metadata and rebuild to surface compiler errors in your blueprint code. diff --git a/pages/index.mdx b/pages/index.mdx index d261216a..e3ed4095 100644 --- a/pages/index.mdx +++ b/pages/index.mdx @@ -1,6 +1,6 @@ --- -title: Overview, Use Cases and Actions -description: Explore the Tangle Network ecosystem at a glance including use cases including secure multi-party computation, as well as features, and roadmap. +title: Tangle Docs +description: Start here for the agentic workbench, sandbox runtime, and protocol. --- import LandingPage from "../components/LandingPage"; diff --git a/pages/infrastructure/_meta.ts b/pages/infrastructure/_meta.ts new file mode 100644 index 00000000..c8ea9c48 --- /dev/null +++ b/pages/infrastructure/_meta.ts @@ -0,0 +1,10 @@ +import type { Meta } from "nextra"; + +const meta: Meta = { + introduction: "Introduction", + architecture: "Architecture", + orchestration: "Orchestration", + sandboxing: "Sandboxing and Safety", +}; + +export default meta; diff --git a/pages/infrastructure/architecture.mdx b/pages/infrastructure/architecture.mdx new file mode 100644 index 00000000..5816bb11 --- /dev/null +++ b/pages/infrastructure/architecture.mdx @@ -0,0 +1,21 @@ +# Architecture + +The runtime is split into an orchestrator and execution sidecars so workloads stay isolated while coordination stays flexible. + +## Core Components + +- **Orchestrator**: Accepts run requests, validates policy, selects hosts, and manages sidecar lifecycle. +- **Session gateway**: Streams events over SSE, buffers for replay, and handles backpressure and connection state. +- **Execution sidecars**: Run agent sessions inside sandboxes, enforce tool access, and emit message/tool/file events. +- **Host drivers and agents**: Track host health, capacity, and metrics across local or pooled hosts. +- **Autoscaling and host pools (optional)**: Promote standby hosts and trigger provisioning through a webhook. +- **Observability layer**: Exposes metrics and health endpoints and preserves execution metadata. + +This architecture keeps workloads portable while maintaining consistent safety guarantees. + +## Operational Traits + +- **Multi-provider backends**: Select providers behind consistent policy gates. +- **Capacity-aware placement**: Allocate based on host health and resource limits. +- **Resilient streams**: Event buffering and replay support intermittent connections. +- **Policy-first execution**: Every task is validated before it runs. diff --git a/pages/infrastructure/introduction.mdx b/pages/infrastructure/introduction.mdx new file mode 100644 index 00000000..d961846b --- /dev/null +++ b/pages/infrastructure/introduction.mdx @@ -0,0 +1,26 @@ +# Sandbox Runtime + +The sandbox runtime is the execution layer for autonomous work. It provisions isolated environments, manages session execution, and streams events so workflows can run safely at scale. +Today, workloads are triggered through the workbench and managed platform services. External runtime APIs are not yet public. + +## What It Provides + +- **Isolation and containment** for untrusted or semi-trusted workloads. +- **Policy enforcement** for tools, data access, and budgets. +- **Execution control** with queueing, timeouts, and retry-aware failures. +- **Streaming observability** with real-time events, file updates, and execution metadata. +- **Capacity management** with host health, pooling, and optional autoscaling. + +## Who This Is For + +- **Workbench users** who need secure, repeatable execution. [Start in the workbench](/vibe/introduction). +- **Operators** who host runtimes and earn for reliable execution. [Operator onboarding](/operators/introduction). +- **Platform teams** who manage execution reliability and safety. [Review architecture](/infrastructure/architecture). + +## Start Here (By Role) + +- **Operators**: Begin with [operator onboarding](/operators/introduction). +- **Platform engineers**: Review [architecture](/infrastructure/architecture) and [orchestration](/infrastructure/orchestration). +- **Security teams**: Start with [sandboxing and safety](/infrastructure/sandboxing). + +The runtime is available via partnership or early access. diff --git a/pages/infrastructure/orchestration.mdx b/pages/infrastructure/orchestration.mdx new file mode 100644 index 00000000..42ea3dc4 --- /dev/null +++ b/pages/infrastructure/orchestration.mdx @@ -0,0 +1,21 @@ +# Orchestration + +Orchestration turns workbench intent into sandboxed execution. It coordinates sidecar placement, session lifecycle, and runtime signals across hosts. + +## Execution Lifecycle (Simplified) + +1. The orchestrator validates policies and checks capacity. +2. A sidecar is selected or started on an available host. +3. A session is created and queued executions run in order. +4. Events stream to clients with buffering and replay support. +5. Completion, failure, or cancellation updates metrics and metadata. + +## What Orchestration Covers + +- **Placement and capacity**: Host health, resource-aware limits, and pool membership. +- **Execution control**: Per-session queues, timeouts, and cancellation. +- **Batch and simulation runs**: Large task sets can queue and retry with backoff. +- **Autoscaling (optional)**: Standby hosts can be promoted and webhooks can request new capacity. +- **Observability hooks**: Health endpoints and metrics for fleet visibility. + +This is how the workbench and protocol workloads remain predictable even when the compute layer is distributed. diff --git a/pages/infrastructure/sandboxing.mdx b/pages/infrastructure/sandboxing.mdx new file mode 100644 index 00000000..2fe98585 --- /dev/null +++ b/pages/infrastructure/sandboxing.mdx @@ -0,0 +1,47 @@ +# Sandboxing and Safety + +Every workload runs inside an isolated sandbox. This protects the host, the operator, and the customer while making agent behavior reviewable. + +## Isolation Technologies + +Operators choose isolation based on security requirements and workload type: + +**Docker containers** provide process, filesystem, and network separation. Containers are lightweight, well-understood, and suitable for most workloads where standard isolation suffices. + +**gVisor** adds an additional isolation layer by intercepting system calls through a user-space kernel. This limits attack surface and is suitable for higher-security workloads where container escapes are a concern. + +**Firecracker micro-VMs** provide hardware-level isolation with millisecond boot times. Suitable for workloads requiring the strongest guarantees, where even kernel-level exploits should not compromise the host. + +Operators declare supported isolation technologies. Customers specify requirements when requesting services. + +## Isolation Guarantees + +Regardless of technology, these guarantees must hold: + +- **Process isolation**: No access to host resources or other sessions. +- **Filesystem isolation**: Private storage with quotas. +- **Network isolation**: Restricted external access per policy. +- **Resource limits**: Bounded CPU, memory, and I/O consumption. + +Operators who fail to maintain isolation guarantees are subject to slashing. + +## Policy Enforcement + +Each sandbox enforces: + +- **Tool allowlists**: Which tools the agent can invoke. +- **Domain allowlists**: Which external endpoints are reachable. +- **Data access policies**: What files and secrets are available. +- **Budget limits**: Token and compute ceilings per session. + +Policies are defined in workbench profiles and enforced at the runtime level. + +## Auditability + +Every session produces: + +- **Event streams**: Real-time logs of agent actions. +- **Execution metadata**: Timing, resource usage, and exit status. +- **File snapshots**: State of the workspace at key points. + +This makes execution reviewable and supports dispute resolution when issues arise. diff --git a/pages/network/_meta.ts b/pages/network/_meta.ts index c1994c93..a7750c06 100644 --- a/pages/network/_meta.ts +++ b/pages/network/_meta.ts @@ -1,39 +1,29 @@ import { Meta } from "nextra"; const meta: Meta = { - overview: "Overview", - tokenomics: "Tokenomics", - "-- incentives": { + "-- core": { type: "separator", - title: "Restaking Incentives", + title: "Protocol Core", }, - "incentives-overview": "Overview", - "incentives-restakers": "For Restakers", - "incentives-operators": "For Operators", - "incentives-developers": "For Developers", - slashing: "Slashing", - "-- restaking infrastructure": { + overview: "Protocol Foundation", + "network-parameters": "Protocol Parameters", + "metrics-and-scoring": "Metrics and Scoring", + "-- economics": { type: "separator", - title: "Restaking Infrastructure", + title: "Economics", }, - differences: "Tangle vs. Eigenlayer", + tokenomics: "TNT Token", + "incentives-overview": "Incentives", + "incentives-operators": "Operator Incentives", + "incentives-developers": "Developer Incentives", + "incentives-stakers": "Staker Incentives", "-- launch": { type: "separator", - title: "Network Launch", + title: "Launch and Migration", }, - "claim-airdrop": "Claim the Airdrop", - launch: "About the Launch", - "points-mechanics": "Points Mechanics", - "-- governance": { - type: "separator", - title: "Governance", - }, - governance: "On-chain Governance", - "-- various": { - type: "separator", - title: "Miscellaenous", - }, - "network-parameters": "Network Parameters", + launch: "Launch", + "claim-airdrop": "TNT Migration and Claims", + "points-mechanics": "Participation Credits", }; export default meta; diff --git a/pages/network/claim-airdrop.mdx b/pages/network/claim-airdrop.mdx index cc38eb6d..b9631fdf 100644 --- a/pages/network/claim-airdrop.mdx +++ b/pages/network/claim-airdrop.mdx @@ -1,63 +1,49 @@ -import { Callout } from 'nextra/components' +import { Callout } from "nextra/components"; -# Claiming Your TNT Airdrop +# TNT Migration and Claims -This is step-by-step guide on how to claim your airdrop tokens through our claims interface. +TNT migration moves balances from the legacy Substrate system and direct EVM allocations into the canonical TNT token on EVM. Claims are verified using a Merkle tree and an SP1 zero-knowledge proof, then distributed with a vesting schedule. - Genesis participants will have 1 year to claim their distribution, **the deadline is April 10 2025.** otherwise the amount is sent to the Tangle Network on-chain treasury. + The claim window and unlock parameters are defined by the on-chain migration contract. The claims portal shows the current deadline and vesting configuration. -## Prerequisites +Contract sources (GitHub): -Before you begin, please ensure you have the following: +- https://github.com/tangle-network/tnt-core/blob/v2/packages/migration-claim/src/TangleMigration.sol +- https://github.com/tangle-network/tnt-core/blob/v2/packages/migration-claim/src/lockups/TNTLinearVesting.sol -- **A compatible wallet**: Ensure your digital wallet is compatible with our platform. Most users will claim with a Substrate compatible wallet like Polkadot.js Extension. Some will use their favorite EVM wallet. [See the full Wallet compatibile page on our Docs.](/wallets) +## Who Can Claim -We support major EVM wallets such as : +- **Substrate holders**: Submit a claim using your SR25519 public key and choose an EVM recipient. +- **EVM recipients**: If you are included in the EVM list, you can claim directly to your wallet. -- MetaMask -- Rainbow Wallet -- Walletconnect +## Vesting Schedule (Default) -For Substrate wallets we support: +- **2% unlocked** immediately at claim time. +- **98% vested** through a linear vesting contract. +- **12-month cliff + 24-month linear vesting** (36 months total). -- Polkadot.js Browser Extension -- Talisman Wallet -- SubWallet +Vesting parameters can be updated before the first claim if governance changes them. Always confirm the live settings in the claims portal. -- **Network Connection**: Ensure you're connected to the correct blockchain network, Tangle Network. +## Step 1: Access the Claims UI -## Step 1: Access the Airdrop UI +1. Navigate to the claims portal: [https://app.tangle.tools/claim](https://app.tangle.tools/claim) +2. Connect your wallet (Substrate or EVM, depending on your allocation). -1. Navigate to our official claims portal: [https://app.tangle.tools/claim](https://app.tangle.tools/claim) -2. Click on the appropriate "Connect Wallet" button in the interface. +## Step 2: Verify Eligibility -## Step 2: Connect Your Wallet +1. The portal checks eligibility and displays your claimable balance. +2. Choose an EVM recipient address if prompted. -1. A pop-up window will appear, asking you to either sign a message for Polkadot.js Extension, or if using EVM, to select your wallet provider (We support major EVM wallets such as MetaMask, Rainbow Wallet, and Walletconnect.) Choose the one that corresponds to your wallet, -2. To connect your wallet to our platform, you may need to add a custom network to your wallet: +## Step 3: Submit the Claim -## Step 3: Verify Airdrop Eligibility +1. Click **Claim** to submit your request. +2. Claims can be submitted gaslessly via a relayer (the portal will handle this automatically when available). +3. Once confirmed, the unlocked amount transfers to your wallet and the vesting contract is created for the remainder. -1. Once your wallet is connected, the system will automatically check for your eligibility to claim the airdrop. -2. If eligible, you will see a message indicating that you have unclaimed TNT, and the amount you can claim.. +## Additional Notes -## Step 4: Claim Your Airdrop - -1. **Airdrop Recipient Field** With our interface, you can claim from one address but send the tokens to a different Substrate or Tangle EVM address. Enter in your preferred address or leave the Airdrop Recipient field with your claiming account address already present. -1. Click on the "Claim Now" button. -1. A transaction will be initiated. You may need to confirm the transaction in your wallet. -1. Depending on the network traffic, it may take a few minutes for the transaction to be processed. -1. Once confirmed, your tokens will be deposited into your connected wallet. - -## Step 5: Confirmation - -1. You will receive a confirmation message on the UI, confirming the successful claim of your airdrop. -2. You can verify the token receipt by checking your wallet balance or clicking through the displayed link to view the transaction on a block explorer. - -## Additional Tips - -- **Gas Fees?**: For the genesis claiming process, there is no gas fee charged to claimed tokens. -- **Security**: Never share your private keys or wallet password with anyone. -- **Support**: If you encounter any issues or have questions, please contact our support team through the official channels listed on our website. +- **Gasless claims**: A claim relayer can pay gas on your behalf. The portal will guide you through this path. +- **Security**: Never share private keys or seed phrases. +- **Support**: Use official channels listed on the site if you need help. diff --git a/pages/network/differences.mdx b/pages/network/differences.mdx deleted file mode 100644 index 0b4668db..00000000 --- a/pages/network/differences.mdx +++ /dev/null @@ -1,51 +0,0 @@ -import ExpandableImage from "../../components/ExpandableImage"; - -# Tangle's Uniqueness - -Tangle's multi-asset restaking infrastructure is built to be multi-chain and has notable differences between Eigenlayer and other restaking infrastructures. To start, Tangle has the notion of Blueprints, which are reusable specifications for AVSes. Developers on Tangle deploy Blueprints and users request service instances from these Blueprints. This fundamental difference allows developers to create and monetize services without having to operate the infrastructure for those services. It also puts a focus on reusable service infrastructures that can be deployed with different levels of security and decentralization. Our focus is to build the decentralized crypto cloud, for instanceable service infrastructure similar to what AWS provides for the average developer. - -## Tangle Blueprints vs. Eigenlayer AVS - -Tangle Blueprints are different from an Eigenlayer AVSes. Firstly, a Tangle Blueprint is a specification for an actively validated service. This means that alone, a Tangle Blueprint doesn't represent a live system. Rather, it represents a template for a service deployment that can be instantiated multiple times. - -Tangle Blueprints represent an additional abstraction over service infrastructures that allow developers to both create and monetize services without having to operate the infrastructure for those services. For example, every developer interacting with Eigenlayer today is also develping and deploying an AVS. On Tangle, a developer can create and deploy a Blueprint and not manage the AVSes that instantiate that Blueprint. This is the benefit of developing and restaking on Tangle. Tangle Blueprints allow developers to build and deploy service infrastructure specifications in a framework as straightforward as deploying a smart contract. - -### A visual distinction between Tangle and other restaking networks. - -Eigenlayer can be summarized as a system w/ Tangle Blueprints, where each Blueprint is instantiated at most once. That is, for each unique Blueprint, there is and will only ever be 1 AVS running that Blueprint's specification. Note, it is entirely possible for Eigenlayer to support a similar architecture by re-deploying the same AVS multiple times under different names, brands, or whatever. However, this is not the default behavior of Eigenlayer nor is it the intended use in public. - -Below we show our diagram for Tangle, with a yellow bar indicating the shared security pool across service instances. Note, the shared security pool only overlaps the first row of AVSes of each blueprint. This is Eigenlayer's default behavior. - -#### Eigenlayer but w/ Tangle Blueprints - - - -Next, we show Tangle's full architecture with multi-asset support and support for reusable Blueprint specifications. Note, the shared security pool overlaps all AVSes of each blueprint. In Tangle, it is also designed to enable AVS instances to self-select the assets they want backing their service. The end-user service requesters can configure an AVS to utilize fewer asset types than are available to them. This is Tangle's default behavior. - -#### Tangle - - - -## Developer Focused - -At Tangle's core is primarily to be developer focused. This is why our AVS framework is built in Rust. It is also why we have as our goal to make Blueprint creation and deployment as straightforward as smart contract development and deployment. Building offchain infrastructure _should be easy_! Our Gadget CLI will be a step-wise improvement in the developer experience of building AVSes for any blockchain ecosystem. - -Tangle's AVS Gadget also supports compiling to WebAssembly, which allows the gadget to be deployed in nearly any computing environment. The Gadget supports running other program binaries and docker images, and these templates will be readily available in our developer documentation. The Gadget acts as a blockchain analogue to Kubernetes, allowing developers to build complex service infrastructures that interact with cloud services, databases, web infrastructure, and unique computing environments such as TEEs, GPUs, FPGAs and more. - -Our Blueprint focused architecture favors developers because it allows developers to build reusable services that benefit any ecosystem. A developer could build an Oracle Blueprint that provides arbitrarily requested data feeds. Users could request many service instances from this Blueprint, each for different blockchain applications and ecosystems. If the service instances are useful and valuable, the developer will earn a portion of the fees generated by the service instances. This is the power of Tangle's Blueprint architecture. - -### Incentivized Blueprints - -Tangle at its core will have a mechanism to incentivize Blueprint creation. This is because the more useful and valuable service instances that are created from a Blueprint, the more fees the Blueprint creator will earn. This is a fundamental difference between Tangle and Eigenlayer. In Eigenlayer, the AVS operator earns fees from the service instances they operate and the generated AVS token. In Tangle, the Blueprint creator earns fees from the service instances that are created from their Blueprint, the operators earn fees for operating the instances, and collectively if the token holders vote to incentivize it, both the restakers and developer will earn TNT incentives. - -Inflationary incentives for Blueprints are dictated by the native governance system on Tangle. Token holders vote to add Blueprints to an incentivized set of Blueprints. The more useful a Blueprint is, the more likely it should be to become incentivized in the native governance system. - -## Operator customizability - -In Tangle, operators register for Blueprints rather than for individual AVSes. Registering for Blueprints onboards the operator to the underlying service that may be requested in the future. Operators can require approvals for any service requests, putting them in a position of power to accept and reject operating AVSes for users and applications they do not wish to support. This is a fundamental difference between Tangle and Eigenlayer. - -A useful Service Blueprint also makes operators more money, as it allows operators to run multiple AVSes with the same infrastructure. This is because the Blueprint is a specification for the service infrastructure, and the AVSes are the instances of that service infrastructure. Operators can run multiple AVSes with the same infrastructure, which allows them to earn more fees from the service instances they operate. If the service instances are useful and valuable, the operator will earn a portion of the fees generated by the service instances. - -## Restaking Any Asset - -Tangle is built to support any asset type. This means that any asset can be restaked in Tangle's infrastructure. Assets are added to the restaking infrastructure through governance. This puts power in the community to decide which assets should be supported by the restaking infrastructure. diff --git a/pages/network/governance/_meta.ts b/pages/network/governance/_meta.ts deleted file mode 100644 index cf97ae47..00000000 --- a/pages/network/governance/_meta.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - overview: "Overview of On-chain Governance", - "democracy-voting": "Voting in Democracy", - "governance-parameters": "On-chain Governance Parameters", - "proposal-creation": "Create a Proposal", - "governance-procedures": "Other Procedures", - democracy: { - title: "🔗 Democracy On-chain", - href: "https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/democracy", - }, - treasury: { - title: "🔗 Treasury On-chain", - href: "https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/treasury", - }, - council: { - title: "🔗 Council On-chain", - href: "https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/council", - }, -}; - -export default meta; diff --git a/pages/network/governance/democracy-voting.mdx b/pages/network/governance/democracy-voting.mdx deleted file mode 100644 index 0aec7f88..00000000 --- a/pages/network/governance/democracy-voting.mdx +++ /dev/null @@ -1,39 +0,0 @@ -# Voting in Democracy Referenda - -Substrate-based blockchains often have built-in on-chain governance mechanisms, which include voting on referenda. Here's a step-by-step guide on how to vote in democracy referenda on a Substrate blockchain: - -Note: This guide assumes you have already set up a Substrate-based wallet and have some tokens in your account. - -# Governance Interfaces - -Polkadot Apps is the primary way to interact with governance on Tangle Network. - -- Democracy: https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/democracy -- Treasury: https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/treasury -- Council: https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/council - -1. **Access the Polkadot/Substrate User Interface (UI):** - Visit the [Substrate UI](https://polkadot.js.org/apps/). This web interface is used to interact with the Tangle network and other Substrate chains, during our testnet phase you can use [Tangle's alpha interface](https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/accounts) - -2. **Connect to the correct network:** - Ensure you're connected to the Tangle Network, if not, at the top-left of the page, you will see a drop-down menu. Here you can select the Tangle network. - -3. **Access the Democracy module:** - On the left sidebar, under the Governance tab, click on Democracy. This is the on-chain voting system where all the current referenda are listed. - -4. **Choose a Referendum:** - You will see a list of active referenda each represented by a number. Click on a specific referendum to see more details. - -5. **Review the Referendum Details:** - Each referendum has a description and specific details. Review these carefully to understand what you are voting for or against. - -6. **Cast Your Vote:** - Once you've decided how to vote, click on the "Vote" button. You'll be asked to choose between 'Aye' (yes) and 'Nay' (no), and you'll have the option to adjust your vote's "conviction," which multiplies your vote's power at the cost of locking your tokens for a longer period. - -7. **Sign and Submit the Transaction:** - After clicking the "Vote" button, you will need to sign the transaction using your account. Enter your password and click on "Sign and Submit". Your vote will be recorded on the blockchain once the transaction is included in a block. - -8. **Wait for the Voting Period to End:** - Each referendum has a voting period. When this period ends, votes are tallied, and the decision is enacted based on the majority vote. - -Remember that **voting in a referendum will lock your tokens until the end of the enactment period (if the proposal passes) or until the end of the voting period (if the proposal does not pass).** The length of these periods can vary, refer to [our parameters.](./governance-parameters.mdx) diff --git a/pages/network/governance/governance-parameters.mdx b/pages/network/governance/governance-parameters.mdx deleted file mode 100644 index 1a3b6db1..00000000 --- a/pages/network/governance/governance-parameters.mdx +++ /dev/null @@ -1,20 +0,0 @@ -# Governance Parameters - -The following durations control windows of action for several governance processes on Tangle Network. These values will likely change as we approach mainnet. - -| Parameter | Duration (minutes) | Duration (days) | -| ----------------------- | -----------------: | --------------: | -| `LaunchPeriod` | 40320 | 28 | -| `VotingPeriod` | 40320 | 28 | -| `FastTrackVotingPeriod` | 4320 | 3 | -| `EnactmentPeriod` | 43200 | 30 | -| `CooloffPeriod` | 40320 | 28 | - -**Descriptions** -`LaunchPeriod`: Represents the duration of the launch period in real-world time. -`VotingPeriod`: Represents the duration of the voting period in real-world time. -`FastTrackVotingPeriod`: Represents the duration of the fast-track voting period in real-world time. -`EnactmentPeriod`: Represents the duration of the enactment period in real-world time. -`CooloffPeriod`: Represents the duration of the cool-off period in real-world time. -`MinimumDeposit`: This parameter defines the minimum balance (measured in some unspecified unit) that must be deposited in some context. The value is 100 times the base UNIT. -`MaxProposals`: This parameter limits the maximum number of proposals that can be active at any given time to 100. diff --git a/pages/network/governance/governance-procedures.mdx b/pages/network/governance/governance-procedures.mdx deleted file mode 100644 index 737fcf60..00000000 --- a/pages/network/governance/governance-procedures.mdx +++ /dev/null @@ -1,45 +0,0 @@ -# Procedures in On-chain Governance - -## Launching a Simple-Majority Referenda - -A Simple Majority proposal is a proposal that necessitates a majority, or more than 51% of votes, to pass, rather than the 'Super Majority' (2/3 of voters) which is the default requirement. This method ensures a predisposition towards approval, preventing the potential override of the general will of many smaller stakeholders by a single party that might vote 6x against a proposal. This strategy aligns power with the broader community, rather than just a few individuals. - -### Process to Execute a Simple Majority Proposal - -The process is slightly intricate at the moment, requiring multiple stages and council approval. The objective is to automate this via modifications to the existing treasury pallet. However, until that is achieved, here's the process. - -1. **Creating a Treasury Proposal** - - Visit the Polkadotjs app, switch to the network and navigate to the treasury tab found in the governance dropdown menu. Here, you'll see the ongoing treasury proposals. - - Click 'Submit Proposal' located on the right side of the interface and enter the proposal's details. You'll be required to bond a percentage of the total requested tokens using the 'Submit with account'. Usually, a beneficiary account should have been set up and added to your address book. The standard practice is to use a minimum of 2/3 multi-sig accounts for grants. - - Your treasury proposal will enter the proposal queue after correct submission. It will receive a number - this is the `proposalID`, important for the next step. - -2. **Creating a 'Preimage' and Accompanying 'Hash'** - - Navigate to the Governance dropdown menu, select Democracy, and create a preimage of the proposal, which is the formal key-value description of the on-chain proposal. Choose treasury from the options list, which reveals a few extra options. Choose `approveProposal(proposalID)` and enter the `proposalID` from step 1. - - Copy the resultant `preimage hash`, submit, and sign this stage with your account. - -3. **Council Proposes Simple Majority as a Motion** - - This stage requires a council member, either directly involved as the proposing party, or indirectly as an intermediary to help a community member submit a Simple Majority proposal as a Council motion. - - Navigate to the Developer dropdown menu and select the Extrinsics option. The proposing council member must switch to their Council member account in the first box. - - From the next dropdown menu, select Council and propose(`threshold`, `proposal, `lengthbound`) from the subsequent options. For `threshold`, choose 8, meaning the Simple Majority motion needs a minimum of 8 out of the 13 Council members to pass and execute Treasury proposal as a simple majority referendum. - - In the next box, under proposal: Proposal, select democracy and then next to that externalProposeMajority (proposalHash). - - Paste the preimage hash (the proposal hash) received in the last stage into the box below proposalHash: Hash. - - For the final box, `lengthBound: Compact` enter 42. - - Now hit Submit Transaction. - -4. **Council Approves Simple Majority Motion** - - At least 8/13 council members need to vote Aye to approve this motion and set the treasury proposal on the path to becoming a simple majority referendum. - - Once 8 members have voted Aye, the motion can be closed, either by the original council member or any other council member, including the last person to vote Aye. The motion exists for 13 days. If there are insufficient votes Aye/Nay, it won't execute. If it does pass, the proposal will progress towards becoming a simple majority referendum, appearing in the Governance dropdown menu as an external proposal. diff --git a/pages/network/governance/overview.mdx b/pages/network/governance/overview.mdx deleted file mode 100644 index c2274893..00000000 --- a/pages/network/governance/overview.mdx +++ /dev/null @@ -1,70 +0,0 @@ -# Overview of On-chain Governance - -In many contemporary on-chain governed blockchain networks, the concept of shared power and decision-making is fundamental. These decentralized ecosystems operate under a community governance model, allowing every token holder to have a say in network upgrades and the evolution of its protocol. - -The network's governance structure typically encompasses several key roles: the council and the general token holder population. Each of these roles contributes to the decision-making process, bringing balance to the governance model. - -**The Council**, elected by token holders, typically consists of a smaller group of participants who are committed to the sustainability and future of the network. Council members are responsible for proposing referenda, vetoing dangerous or malicious proposals, and representing passive token holders. - -**Token holders**, as members of the network, are part of its governance system. They vote on referenda using their tokens, propose changes to the network, and elect the council members. - -A unique feature of such ecosystems is their adaptive quorum biasing, which adjusts the passing threshold of proposals based on turnout. This ensures a fair representation and participation in the system. - -Furthermore, an integral part of these blockchain networks is their ability to upgrade and evolve over time without necessitating hard forks. Changes to the network protocol, like runtime upgrades, can be proposed, voted on, and enacted in a decentralized manner without interrupting network services. - -In essence, these on-chain governed blockchain networks exemplify how decision-making power can be shared amongst all participants. The ethos of transparency, collective intelligence, and broad participation in these systems pave the way for future blockchain technologies. - -# Tangle Network Governance Guide - -The governance system of Tangle Network is divided into two parts, the public referenda and the council. The public referenda allows any TNT token holder to propose and vote, given they provide a bond. - -## Public Referenda - -Proposals can be made by any token holder. Others can agree with the proposal by seconding it and providing tokens equivalent to the original bond. The most seconded proposal during every launch period is moved to the public referenda table for active voting. Voters can lock their tokens for a longer duration to amplify their vote. - -Detailed information on the governance system can be found [here](https://wiki.polkadot.network/general/governance-apps/). - -## Important Parameters for Democracy Module - -Here are some important parameters when voting using the Democracy module: - -- Launch Period: Frequency of new public referenda launches. -- Voting Period: Frequency of referenda vote tallying. -- Emergency Voting Period: The minimum voting period for a fast-tracked emergency referendum. -- Minimum Deposit: The minimum amount needed as a deposit for a public referendum proposal. -- Enactment Period: The minimum time period for locking funds and the period between a proposal being approved and enacted. -- Cool off Period: The time period when a proposal can't be re-submitted after being vetoed. - -These parameters may change based on governance. Refer to Network Parameters or the Node Runtime for the most authoritative reference. - -# Governance Interfaces - -Polkadot Apps is the primary way to interact with governance on Tangle Network. - -- Democracy: https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/democracy -- Treasury: https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/treasury -- Council: https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/council - -## Proposing an Action - -To propose an action, you need to bond some tokens. The Polkadot Apps' "Democracy" tab allows you to submit a new proposal by entering the preimage hash of the proposal. After submitting your proposal, it appears in the "proposals" column. At this point, it is only visible and can be seconded. The preimage must be submitted to make the proposal actionable. - -## Seconding a Proposal - -Seconding a proposal means you agree with the proposal and back it with a deposit equal to the original one. The most seconded proposal is tabled as a referendum to be voted on every launch period. - -## Voting on a Proposal - -To vote on a referendum, navigate to the "Democracy" tab. Any active referendum will appear in the "referenda" column. Click "Vote" to cast a vote for the referendum. Locking your tokens for a longer duration can weigh your vote more strongly. The lock duration doesn't compound upon consecutive votes. - -## Delegate a Vote - -If you're unable to keep up with and vote on upcoming referenda, you can delegate your vote to another account. The delegated tokens add to the vote the delegatee has made. - -## Undelegate a Vote - -You can remove your delegation at any point in the future. Your tokens will be locked for a duration in accordance with the conviction you set at the beginning of the delegation. - -## Proxies - -Proxies vote on behalf of a stash account. Setting a proxy involves submitting a "setProxy" transaction from the "democracy" pallet. You can also remove a proxy or resign as a proxy using the "removeProxy" and "resignProxy" transactions respectively. diff --git a/pages/network/governance/proposal-creation.mdx b/pages/network/governance/proposal-creation.mdx deleted file mode 100644 index bb57b5df..00000000 --- a/pages/network/governance/proposal-creation.mdx +++ /dev/null @@ -1,33 +0,0 @@ -# Proposing an Action on the Tangle Network - -Proposing an referenda on the Tangle Network requires you to bond some TNT tokens. To ensure you have enough tokens to make the minimum deposit, you can check the parameter in the chain state. - -On Tangle Apps, you can use the "Democracy" tab to make a new proposal. To submit a proposal, you'll need to submit what's called the preimage hash. The preimage hash is the hash of the proposal to be enacted. You can easily get the preimage hash by clicking on the "Submit preimage" button and configuring the action you're proposing. - -Copy this preimage hash and save it for the next step. There's no need to click "Submit Preimage" at this point, though you could. We'll go over that in the next section. - -Now you'll click on the "Submit proposal" button, enter the preimage hash into the "preimage hash" field, and enter at least the minimum deposit into the "locked balance" field. Click on the blue "Submit proposal" button, confirm the transaction, and you should see your proposal appear in the "proposals" column on the page. - -Your proposal is now visible to anyone who accesses the Tangle Network, and others can second it or submit a preimage. However, it's hard to tell exactly what this proposal does since it shows the hash of the action. Other TNT holders won't be able to make a judgement about whether they second it or not until someone submits the actual preimage for this proposal. In the next step, you will submit the preimage. - -# Submitting a Preimage - -The act of making a proposal is separate from submitting the preimage for the proposal since the storage cost of submitting a large preimage could be quite high. Allowing the preimage submission to come as a separate transaction means another account could submit the preimage for you if you don't have the funds to do so. It also means that you don't have to pay so many funds right away as you can prove the preimage hash out-of-band. - -However, before the proposal passes, you'll need to submit the preimage, or else the proposal can't be enacted. The guide will now show you how to do this. - -Click on the blue "Submit preimage" button and configure it to be the same as what you did before to acquire the preimage hash. This time, instead of copying the hash to another tab, follow through and click "Submit preimage" and confirm the transaction. - -## Submit Preimage - -Once the transaction is included, you should see the UI update with the information for your already submitted proposal. - -# Seconding a Proposal - -Seconding a proposal means that you're agreeing with the proposal and backing it with an equal amount of deposit as was originally locked. By seconding a proposal, you'll move it higher up the rank of proposals. The most seconded proposal - in value, not the number of supporters - will be tabled as a referendum to be voted on every launch period. - -To second a proposal, navigate to the proposal you want to second and click on the "Second" button. - -You'll be prompted with the full details of the proposal (if the preimage has been submitted!), and you can then broadcast the transaction by clicking the blue "Second" button. - -Once successful, you'll see your second appear in the dropdown in the proposal details. diff --git a/pages/network/incentives-developers.mdx b/pages/network/incentives-developers.mdx index f2c7e9f8..5bfd9b0c 100644 --- a/pages/network/incentives-developers.mdx +++ b/pages/network/incentives-developers.mdx @@ -1,47 +1,32 @@ # Developer Incentives -Tangle Network provides developers with automated revenue streams through blueprint deployment and usage. When users instance your blueprints, smart contracts automatically distribute fees according to a fixed model: 50% to you as the developer, 30% to operators and restakers via Boosted TNT, and 20% to the protocol treasury. +Blueprint developers earn revenue when customers instantiate and run their services. Incentives come from: -## Core Revenue Mechanisms +1. **Service fee revenue share** on every instance of your blueprint. +2. **Optional TNT rewards** (pre-funded, if governance enables `InflationPool`). -### Direct Blueprint Revenue +## Service Fees -- Earn 50% of all instance fees automatically -- Control your revenue share through EVM-based distribution logic -- Implement custom tokenization and revenue-sharing models -- Configure automated payment collection and distribution +For each service payment, the protocol sends the **developer portion** to the blueprint owner by default. A blueprint’s service manager can return a different payout address (for example, to route revenue into a multisig, splitter, or DAO treasury). -### Whitelisting Benefits +The default protocol split is **20% developer / 20% protocol / 40% operators / 20% stakers** (governance configurable). -Blueprints demonstrating high utility can access additional revenue through whitelisting: +## Optional TNT Rewards -- Receive protocol inflation rewards -- Gain enhanced network visibility -- Qualification based on metrics like active instances, transaction volume, and security capital +If the protocol is running `InflationPool` incentives, developers can earn additional TNT based on on-chain activity metrics (e.g., blueprints created, services created, jobs executed, and fees generated). These rewards are claimable from `InflationPool`. -## Technical Integration +## Where This Lives in Code -Your blueprint automatically inherits Tangle's security infrastructure: +- `InflationPool`: https://github.com/tangle-network/tnt-core/blob/main/src/rewards/InflationPool.sol +- `TangleMetrics`: https://github.com/tangle-network/tnt-core/blob/main/src/rewards/TangleMetrics.sol +- `Payments` (fee split + payouts): https://github.com/tangle-network/tnt-core/blob/main/src/core/Payments.sol -- TNT-based restaking mechanism -- Native LST support -- Configurable security parameters -- Smart contract-based revenue distribution +For a readable breakdown and links to contracts, see [Rewards Architecture](/developers/system-architecture/rewards). -## Economic Dynamics +## Design Tips -Successful blueprints benefit from natural market forces: +- Be explicit about slashing conditions and the evidence you expect to be submitted on-chain. +- Use security requirements and operator commitments to express what “secure enough” means for your service. +- Provide observability where possible (heartbeats and optional QoS metrics) to help operators and customers assess performance. -- Higher usage → increased instance fees -- Utility drives restaking capital allocation -- Operator registration follows valuable blueprints -- Self-reinforcing cycle of adoption and security - -## Implementation - -1. Deploy your blueprint (open or closed source) -2. Set revenue distribution parameters -3. Monitor usage and metrics -4. Optionally pursue whitelisting for additional benefits - -The system is designed to let you focus on building valuable software while the network handles monetization, security, and distribution infrastructure. +See [Metrics and Scoring](/network/metrics-and-scoring). diff --git a/pages/network/incentives-operators.mdx b/pages/network/incentives-operators.mdx index 92482d75..8a29df90 100644 --- a/pages/network/incentives-operators.mdx +++ b/pages/network/incentives-operators.mdx @@ -1,36 +1,39 @@ # Operator Incentives -Tangle Network operators earn revenue by running Blueprint instances for customers. The platform provides multiple revenue streams through an automated distribution system that rewards reliable infrastructure operation and service delivery. +Operators earn revenue from service usage and optional TNT budgets. This page outlines the primary revenue paths in the v2 protocol. -## Revenue Streams +## Revenue Sources -### Core Service Fees +1. **Service fees (default split)** -- Earn 30% of all Blueprint instance fees (shared with restakers) -- Automatic distribution through smart contracts -- Additional direct customer payments for compute and storage + - Fees paid by customers are split across developers, the protocol, operators, and stakers. + - The default split is **20% developer / 20% protocol / 40% operators / 20% stakers** (governance configurable). -### Validator Synergies +2. **Optional TNT incentives (pre-funded)** -Operators can increase earnings by also running network validators: + - If governance funds `InflationPool`, operators can earn TNT based on activity metrics. -- Enhanced protocol rewards -- Higher visibility to delegators +3. **Optional operator commission (delegation incentives)** + - If an operator enables commission, they earn a share of delegator incentives from `RewardVaults`. -## Technical Requirements +## How Service Fees Flow -Successful operation requires: +- Service fees are paid in the chosen payment token (native or ERC-20). +- Operator rewards are weighted by service exposure and routed through on-chain accounting. +- Staker fees are routed per operator to `ServiceFeeDistributor` for delegator payouts. -- Reliable infrastructure and monitoring -- High uptime maintenance -- Performance benchmark compliance -- Secure asset management +See [Incentives](/network/incentives-overview) for the full fee flow. -## Economic Model +## Where This Lives in Code -The system creates natural incentives for quality service: +- `ServiceFeeDistributor`: https://github.com/tangle-network/tnt-core/blob/main/src/rewards/ServiceFeeDistributor.sol +- `InflationPool`: https://github.com/tangle-network/tnt-core/blob/main/src/rewards/InflationPool.sol +- `RewardVaults`: https://github.com/tangle-network/tnt-core/blob/main/src/rewards/RewardVaults.sol -- Better performance → more customer instances -- Reliable operation → increased restaking delegation -- Strong reputation → premium service opportunities -- Operational excellence → enhanced protocol rewards +For a readable breakdown and links to contracts, see [Rewards Architecture](/developers/system-architecture/rewards). + +## Operator Success Factors + +- **Reliability**: uptime and correct execution impact service demand. +- **Performance**: meeting blueprint and QoS expectations drives repeat usage. +- **Transparency**: clear policies and monitoring improve operator selection. diff --git a/pages/network/incentives-overview.mdx b/pages/network/incentives-overview.mdx index c83199ae..bf8f40eb 100644 --- a/pages/network/incentives-overview.mdx +++ b/pages/network/incentives-overview.mdx @@ -1,33 +1,67 @@ -# Incentives Overview +# Incentives -## Platform Overview +Tangle’s incentives come from two sources: -Tangle Network is a platform designed for software monetization where developers can deploy software and benefit from its long-term utility. The platform operates as a marketplace where developers create Blueprints that customers can instance, generating ongoing revenue streams through usage fees and rewards. +1. **Service fees** paid by customers when they create and run blueprint services. +2. **TNT incentive budgets** funded to the protocol (no automatic minting). -When a Blueprint is instanced, the generated fees are automatically distributed between the Blueprint developer (50%), the Tangle protocol (20%), and the operators and restakers securing the service (30%). This distribution model ensures all participants are properly incentivized while maintaining platform sustainability. +
    + Incentives flow for service fees and TNT budgets +
    + Service fees split through Payments.sol; TNT budgets flow through InflationPool, + RewardVaults, and ServiceFeeDistributor. +
    +
    -## Core Economic Mechanisms +Claim paths: staker fees + staker inflation via `ServiceFeeDistributor`, TNT staking incentives via `RewardVaults`, and operator/customer/developer TNT via `InflationPool`. -The network's economic model is built on three primary mechanisms: transaction fees, proof of stake rewards, and restaking incentives. +Code references: [Payments.sol](https://github.com/tangle-network/tnt-core/blob/main/src/core/Payments.sol), [ServiceFeeDistributor.sol](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/ServiceFeeDistributor.sol), [InflationPool.sol](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/InflationPool.sol), [RewardVaults.sol](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/RewardVaults.sol), [TangleMetrics.sol](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/TangleMetrics.sol) -### Transaction Fees +## Service Fees -TNT serves as the base asset for all network transactions, including EVM execution, Blueprint creation, and instance deployment. While transaction fees are required for network operation, the protocol may subsidize certain activities during initial phases to encourage adoption. +Customers pay for blueprint services using the chain’s native token or an ERC-20 payment token (including TNT). Fees are split between: -### Proof of Stake Security +- **Developer** (blueprint owner, or a payout address returned by the blueprint’s service manager) +- **Protocol treasury** +- **Operators** (weighted by that service’s per-operator exposure) +- **Stakers** (delegators who staked with the chosen operators) -The network operates on a nominated proof-of-stake consensus mechanism where validators and nominators work together to secure the network. Validators earn native rewards for block production and consensus tasks, while setting their own commission rates. Nominators can stake TNT to support validators and receive pro-rated rewards, with the current network providing approximately 4-5% APY combined. +The default split is **20% developer / 20% protocol / 40% operators / 20% stakers**, and can be updated by governance. -### Restaking Infrastructure +Staker shares are routed per-operator to the on-chain `ServiceFeeDistributor`, which distributes fees to delegators based on: -Tangle's restaking system creates an efficient marketplace between developers, operators, and restakers. Restakers can earn base rewards by depositing TNT and boost their earnings through longer lock periods. Operators run Blueprint instances and earn fees from service execution, while maintaining high performance standards. Blueprint developers benefit from custom reward distributions and long-term adoption of their services. +- Delegated amount (and optional lock multiplier) +- Blueprint selection (`All` vs `Fixed`) +- Optional per-asset security requirements and operator commitments (and optional USD normalization via a price oracle) -### Blueprint Economics +## TNT Incentives (Pre-Funded) -The protocol enforces a transparent fee distribution model between developers, operators, restakers, and the underlying Tangle protocol. +TNT incentives are distributed from a pre-funded on-chain pool: -- **50%** goes directly to you as the blueprint developer -- **30%** is allocated to operators and restakers through the Boosted TNT restaking mechanism -- **20%** flows to the protocol treasury for ecosystem development +- `InflationPool` holds TNT allocated by governance/treasury and distributes it in epochs. +- The staking portion funds `RewardVaults`, which pays TNT incentives for delegated assets (with a deposit cap per asset). +- Other portions become claimable TNT balances for operators, customers, and developers. +- If `stakersBps > 0`, a staker TNT budget is distributed by service exposure and routed through `ServiceFeeDistributor`. -TNT remains the primary restaking asset required for all Blueprint instances. Developers can incorporate additional approved assets for enhanced security. Through programmable distribution mechanisms, developers can customize fee allocations while maintaining protocol alignment. +## Metrics and Scoring + +The protocol can optionally record activity into a metrics contract (`TangleMetrics`) using best-effort hooks (failures do not block core protocol actions). Those metrics drive merit-based distributions in `InflationPool`. + +- See [Metrics and Scoring](/network/metrics-and-scoring) for details. + +## Why This Layout Works + +- **One staker fee path**: Service fees always flow through `ServiceFeeDistributor`, so payout math is consistent across tokens and services. +- **Budget clarity**: Incentives only exist if governance funds them; there is no hidden inflation. +- **Predictable integration**: Developers can reason about exactly where funds go and how to claim them. + +## Source Contracts (GitHub) + +- [`Payments.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/core/Payments.sol) +- [`ServiceFeeDistributor.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/ServiceFeeDistributor.sol) +- [`InflationPool.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/InflationPool.sol) +- [`RewardVaults.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/RewardVaults.sol) +- [`TangleMetrics.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/TangleMetrics.sol) diff --git a/pages/network/incentives-restakers.mdx b/pages/network/incentives-restakers.mdx deleted file mode 100644 index 7bbd4acf..00000000 --- a/pages/network/incentives-restakers.mdx +++ /dev/null @@ -1,150 +0,0 @@ -# Restaking Incentives for Restakers - -## Overview - -Tangle Network incentivizes restakers through TNT rewards for depositing specific assets as well as with service revenue and fee rewards. Deposit rewards are distributed in an ongoing manner and are immediately claimable. Service revenue is distributed in fixed time intervals according to governance. - -The protocol determines which assets are incentivized for deposits and their maximum deposit capacities through governance decisions. Non-incentivized assets are still valid for deposits and can be leveraged to instance Tangle Blueprints. - -### Score vs Points - -For the purposes of avoiding confusion we use the terms points and score independently. Points refer to any future airdrop and scores refer to onchain values that we leverage for distributing onchain rewards, whether from deposits or services. Points are NOT equal to score values! Hopefully we don't mistake them in the docs. - -## Deposit Mechanics - -Depositing incentivized assets on Tangle earns your TNT tokens and points in upcoming airdrops. There are a list of assets onchain that are incentivized. For these assets, there exists a **vault** for them specifying the capacity and APY for deposits. - -### Deposit Capacity - -The restaking system implements maximum deposit capacities for each incentivized asset to ensure secure and controlled TVL growth. Key aspects: - -- Fixed maximum amount per asset -- APY distributed proportionally to deposits/capacity ratio -- Unallocated APY is never minted -- APY defines reward allocation at full capacity - -### Protocol Implementation - -The `pallet-multi-asset-delegation` system manages: - -- Deposit and withdrawal logic -- Delegation to operators -- Asset staking mechanisms -- APY and capacity configurations -- System upgrades via governance - -Users can view available assets and their parameters through the Tangle Restaking dApp. - -### Withdrawal Process - -- Standard withdrawal delay: ~7 days (W sessions) -- Deposits continue earning incentives during withdrawal period -- Assets must be unstaked before withdrawal - -## Scoring and Rewards - -When Tangle Blueprints are instanced, a new service is created. Customers instance Tangle Blueprints by selecting the operators and types of restaked assets they desire for security. They pay for the service in advance and throughout operation for task based services. - -The payments and fees are distributed according to a scoring mechanism that is normalized against the USD value of the service instance's assets. - -TNT is the default restaking asset and must be included as restaked security collateral for all service instances. We treat TNT as a special asset when scoring. TNT earns a score of 1 value per 1 TNT allocated to secure the service, whereas other assets earn a score of 1 value per $1 allocated to secure the service. - -### Lock Multipliers - -Users can enhance their score through time-locks. This locks their deposited assets for additional months, preventing any withdrawals or unstaking until the lock has ended: - -- Longer locks lead to higher score multipliers -- Locked tokens cannot be withdrawn until expiry -- Multiplier directly impacts deposit reward share - -### Reward Calculation for Deposits - -The reward formula $R$ for a user at any point is: - -$R = APY \times \frac{S_u}{S_t} \times \frac{D_t}{C}$ - -Where: - -- $APY$ = Asset's annual percentage yield -- $S_u$ = User's score (including lock multipliers) -- $S_t$ = Total score across all users -- $D_t$ = Total deposits for the asset -- $C$ = Maximum deposit capacity - -### Reward Calculation for Service Rewards - -The protocol allocates service revenue and fee rewards using a dual-tier scoring mechanism: - -For TNT (Native Token): - -- Each TNT restaked earns 1 base point - -For Other Approved Assets: - -- Each $1 USD value restaked earns 1 base point - -The reward formula $R$ for a restaker securing a service instance is calculated as: - -$R = Rewards \times \frac{S_u}{S_t}$ - -Where: - -- $Rewards$ = Rewards and fees of the service instance -- $A_u$ = User's restaking score for the exposed assets -- $A_t$ = Total score across all exposed assets of the service instance - -## Example Scenario: Calculating Restaking Rewards - -Let's walk through practical examples to understand how both deposit rewards and service revenue rewards are calculated. - -### Deposit Rewards Example - -Let's say Alice deposits 1000 TNT with a 12-month lock: - -Initial Parameters: - -- Base APY: 5% -- Lock multiplier: 2x for 2 months -- Total deposits: 100,000 TNT -- Maximum capacity: 1,000,000 TNT - -Her rewards calculation: - -1. Score calculation: - - - Base score: 1000 (from 1000 TNT) - - With 2x multiplier: 2000 score - - Total system score: 150,000 - -2. Annual deposit rewards: - ``` - 5% × (2000/150,000) × (100,000/1,000,000) × 1000 TNT = 0.67 TNT - ``` - -### Service Revenue Example - -Now let's calculate Alice's service revenue rewards: - -Initial Parameters: - -- Service instance monthly fees: 1000 USDC -- Service revenue allocation: 30% to restakers -- Alice's restaked assets: - - 1000 TNT (1000 points) - - 2000 USDC (2000 points) -- Total service restaking points: 10000 (5000 TNT + 5000 USDC worth) - -Her service revenue calculation: - -1. Monthly revenue share: - - ``` - 1000 USDC × 30% = 300 USDC to restakers - ``` - -2. Alice's share: - ``` - 300 USDC × (3000/10000) = 90 USDC per month - ``` - -Alice would earn approximately 90 USDC per month from service revenue, in addition to her deposit rewards of 0.67 TNT annually. Her service revenue share is calculated based on her total restaking score (3000 points from 1000 TNT + 2000 USDC) divided by the total service restaking score of 10000 points. Note that while lock period multipliers affect deposit rewards, they do not impact service revenue calculations. The service revenue is paid out in whatever token the customers use to pay for the service - in this example USDC. diff --git a/pages/network/incentives-stakers.mdx b/pages/network/incentives-stakers.mdx new file mode 100644 index 00000000..e2df3524 --- /dev/null +++ b/pages/network/incentives-stakers.mdx @@ -0,0 +1,66 @@ +# Staker Incentives + +Stakers (delegators) earn two types of rewards on Tangle: + +1. **Service fee revenue** from blueprint services they help secure (paid in the service’s payment token). +2. **Optional TNT incentives** for delegating assets (pre-funded by governance). + +## How You Participate + +- Deposit supported assets into the on-chain `MultiAssetDelegation` staking contract. +- Delegate to an operator and choose a blueprint selection mode: + - **`All`**: you are exposed to all blueprints the operator participates in. + - **`Fixed`**: you choose which blueprint IDs you accept exposure to. +- Optionally apply a lock multiplier (1–6 months) to boost reward share. + +## TNT Deposit Incentives (`RewardVaults`) + +TNT incentives are paid from `RewardVaults`: + +- One vault per staking asset (native, TNT, etc.). +- Governance sets a **deposit cap** and whether the vault is active. +- Rewards are paid in **TNT** from a pre-funded `InflationPool` (no minting). +- Delegator share is based on principal × lock multiplier and the operator’s commission setting. + +## Service Fee Revenue (`ServiceFeeDistributor`) + +When customers pay for a service, the protocol splits fees and routes the **staker portion** per operator to `ServiceFeeDistributor`. From there, fees are distributed to delegators who staked with that operator based on: + +- Delegated amount (and optional lock multiplier) +- Blueprint selection (`All` vs `Fixed`) +- Optional per-asset commitments and USD weighting (if a price oracle is configured) +- Optional streaming over a service’s TTL (for streamed payments) + +## Exposure-Based Protocol Rewards (`InflationPool`) + +If enabled, `InflationPool` can allocate a staker budget in TNT based on **service exposure**. Exposure is computed at distribution time using `ServiceFeeDistributor` and optional USD pricing. + +- The keeper calls `InflationPool.distributeEpochWithServices(serviceIds)`. +- TNT is routed into `ServiceFeeDistributor` and claimed there (same flow as service fees). + +## Risks + +- Slashing reduces the withdrawable value of operator positions using share/exchange-rate accounting. +- Use `Fixed` blueprint selection if you want to scope exposure to specific blueprints. + +Review delegator risks before choosing exposure and lock settings. + +## Source Contracts (GitHub) + +- [`MultiAssetDelegation.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/staking/MultiAssetDelegation.sol) +- [`RewardVaults.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/RewardVaults.sol) +- [`ServiceFeeDistributor.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/ServiceFeeDistributor.sol) +- [`InflationPool.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/InflationPool.sol) + +## User Stories + +**Maya the staker** + +- She delegates 1,000 wstETH to a trusted operator in `Fixed` mode for two blueprints she understands. +- She earns USDC fees from services on those blueprints and optional TNT incentives from the staking vault. +- When the operator stops serving a blueprint, she removes it from her fixed list to reduce exposure. + +**Lee the integrator** + +- He builds a dashboard that shows stakers their estimated service-fee rewards. +- He reads scores and pending rewards from `ServiceFeeDistributor`, and vault info from `RewardVaults`. diff --git a/pages/network/launch.mdx b/pages/network/launch.mdx index 35c6fb20..24c2906d 100644 --- a/pages/network/launch.mdx +++ b/pages/network/launch.mdx @@ -1,62 +1,34 @@ -import { Callout } from 'nextra/components' +import { Callout } from "nextra/components"; -# Tangle Network Mainnet Launch +# Launch -## Launch Date - -- **Mainnet Activation**: Scheduled for April 10, 2024, Tangle Network will make its entrance as a fully operational NPoS network, virtually permissionless, decentralized and community governed from the start. **Validators will be able to participate in validation at 2:00:00 UTC on April 10 2024.** +Tangle is launching as a protocol stack on EVM: a coordination layer for agents and services, plus a migration path for TNT. Operators provide compute, and the protocol contracts handle coordination, payments, and incentives. - You can claim your airdrop at https://app.tangle.tools/claim. See our [guide at Docs:Claim Airdrop](./claim-airdrop) Note that participants will have 1 year to claim their distribution, **the deadline is April 10 2025.** otherwise the amount is sent to the Tangle Network on-chain treasury. + If you are claiming TNT from the migration or airdrop allocations, see the [TNT Migration and Claims](/network/claim-airdrop) guide. -## NPoS Launch and Operations - -- nPoS Framework: Tangle Network has adopted a Nominated Proof of Stake model as its foundational security mechanism. This allows TNT holders to engage directly in the network's security by nominating validators they trust to be responsible for block production and transaction validation. -- Validator Participation: Validators are essential to the network's integrity and performance. Validators are encouraged to stake TNT tokens and signal their commitment to maintaining network protocols. - -## Governance Activation - -- Initial Governance: Governance modules are now activated, granting TNT holders the power to participate in decision-making processes through proposals, referenda, and council elections, ensuring a democratic approach to the network's development. -- Tangle's Sudo Key Role: Initially, Webb, a core development team, holds a Sudo key to ensure smooth operations and governance transitions. This temporary measure allows for efficient implementation of critical updates and decisions during the early stages of the network. -- Sudo Key Deactivation: A timeline is in place for deactivating the Sudo key, transitioning to a fully decentralized governance model. This significant step is expected to occur within the first few months, with input from the token holders to affirm the network's readiness. - -## Core Functionality - -- Balances and Transfers: TNT token balances are active, with token transfer capabilities generally enabled to ensure liquidity and participation in the network's economy. If you receive tokens through the airdrop or other allocation processes, up to 95% of these may be subject to a lockup. See the Allocation page for details. -- Community Proposals for Additional Features: The community is empowered to propose and vote on activating additional core functionalities. This encompasses enhanced transaction types, new governance mechanisms, and network upgrades, among others. - -# Mainnet Genesis Participation - -1. Update to the latest Tangle node release: - -- Visit the releases page on GitHub: https://github.com/tangle-network/tangle/releases -- Download and install the latest version of the Tangle node software. +## What Launch Means Today -2. Obtain the mainnet genesis script: +- **Protocol core is deployed** as a set of EVM contracts (TNT, Blueprints, operators, payments). +- **Migration tooling is live** for Substrate → EVM claims and direct EVM allocations. +- **Incentives and credits are optional** modules that can be enabled by governance. -- Download the mainnet genesis script from the following URL: https://github.com/tangle-network/tangle/blob/main/chainspecs/mainnet/tangle-mainnet.json -- Save the script in the appropriate directory on your validator node. +Protocol contracts (GitHub): https://github.com/tangle-network/tnt-core/tree/main/src -3. Configure your node for mainnet: +## Token Distribution and Lockups -- Update your node's configuration to use the mainnet genesis script. -- Ensure your node is configured to connect to the mainnet network. +The protocol supports two distribution paths: -4. Wait for the mainnet activation: +- **Substrate → EVM migration** via the SP1-based claim system. +- **Direct EVM distributions** via batched transfers with configurable lockups. -- The Tangle Network mainnet is scheduled to activate at 02:00:00 UTC on April 10, 2024. -- Keep your node running and connected to the network until the activation time. +Both paths support a split between unlocked and vested allocations. See the claims guide for default parameters. -5. Participate in validation: +## Governance Status -- Once the mainnet is activated, your node will automatically start participating in block production and transaction validation. -- Ensure your node has sufficient TNT tokens staked to meet the minimum staking requirements for validators. +Governance contracts exist, but production activation depends on the rollout plan and multisig handoff. The protocol is designed to move from admin-controlled deployment to community governance as it matures. -### Important Network Settings +## Operator Onboarding -- Native Asset Symbol: TNT -- Native Asset Decimals: 18 -- Chain ID: `5845` -- Public RPC URL: `https://rpc.tangle.tools` -- Public WSS URL: `wss://rpc.tangle.tools` +Operators provide the compute layer and run sandboxed workloads. See the [Operate](/operators/introduction) section for setup and operational guidance. diff --git a/pages/network/metrics-and-scoring.mdx b/pages/network/metrics-and-scoring.mdx new file mode 100644 index 00000000..dc0bb6cc --- /dev/null +++ b/pages/network/metrics-and-scoring.mdx @@ -0,0 +1,56 @@ +--- +title: Metrics and Scoring +description: How Tangle records protocol activity and uses it for incentive distribution. +--- + +# Metrics and Scoring + +Tangle’s incentive programs can be driven by on-chain activity metrics. Metrics are optional: the protocol is designed so that core actions still work even if metrics recording is disabled or temporarily unavailable. + +## Core Components + +- **`TangleMetrics`**: an on-chain activity recorder implementing `IMetricsRecorder`. +- **Metrics hooks**: best-effort calls from core contracts (wrapped in `try/catch`) that emit or aggregate activity data. +- **`InflationPool`**: a pre-funded TNT budget that can distribute TNT based on recorded metrics. +- **`ServiceFeeDistributor`**: distributes the staker share of service fees and exposes USD-weighted exposure views for staker inflation. + +## What Gets Recorded + +Depending on what is configured on-chain, the protocol can record: + +- **Blueprint activity**: blueprint creation and operator registrations. +- **Service activity**: service creation/termination, job calls, job completion success rates. +- **Payments**: total fees paid by customers. +- **Operator liveness**: heartbeats for active services. +- **Slashing**: executed slash events (and the slashed amount). + +## Staker “Exposure” Scoring + +For staker rewards funded by `InflationPool`, exposure is **computed at distribution time**: + +- `InflationPool.distributeEpochWithServices` queries `ServiceFeeDistributor.getOperatorServiceUsdExposure`. +- Exposure uses the same security requirements and commitments that gate service-fee payouts. +- If a price oracle is configured, the exposure is normalized in USD terms; otherwise it falls back to raw amounts. + +This avoids storing per-delegator exposure metrics on-chain and keeps staker inflation aligned with the same exposure model used for fees. + +## Heartbeats and QoS + +Operators (often via their blueprint manager) can submit service heartbeats to `OperatorStatusRegistry`. Heartbeats: + +- Provide an on-chain signal of liveness for a given `(serviceId, operator)`. +- Can mark operators offline for a service when heartbeats are missed. +- Are often used as part of off-chain monitoring, and may be referenced when proposing a slash. + +Heartbeats do not automatically slash an operator by themselves; slashing requires an authorized on-chain proposal and execution. + +## Important Note on Budgets + +Merit-based rewards only exist if `InflationPool` is funded with TNT. If it has a zero balance, there is nothing to distribute, even if metrics are recorded. + +## Source Contracts (GitHub) + +- [`TangleMetrics.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/TangleMetrics.sol) +- [`InflationPool.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/InflationPool.sol) +- [`ServiceFeeDistributor.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/ServiceFeeDistributor.sol) +- [`OperatorStatusRegistry.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/staking/OperatorStatusRegistry.sol) diff --git a/pages/network/network-parameters.mdx b/pages/network/network-parameters.mdx index 586d54ae..9b5a19b2 100644 --- a/pages/network/network-parameters.mdx +++ b/pages/network/network-parameters.mdx @@ -1,32 +1,79 @@ -# Network Parameters +# Protocol Parameters -This page outlines the key parameters and attributes of the Tangle network. Please note that some of these values may be subject to change via on-chain governance. For the most up-to-date and accurate values, it is recommended to check the constants directly by inspecting the [chain state](https://polkadot.js.org/apps/#/chainstate/constants) and/or [storage](https://polkadot.js.org/apps/#/chainstate). +This page summarizes protocol-level defaults for the Tangle v2 (EVM) deployment. Values are configured per environment and may change via governance or admin controls. For live values, read the contracts on the target network. -### Fee Structure +## TNT Core Values -- Dust (small, uneconomical to track amounts) is burned. -- Fees are split as follows: - - 80% goes to the treasury - - 20% goes to the block author (validator) -- 100% of tips go to block authors, similar to gas fees on other networks. +| Parameter | Value | +| --------------------- | --------------------------------- | +| Name | Tangle Network Token | +| Symbol | TNT | +| Decimals | 18 | +| Max supply (hard cap) | 109,255,636.91921292788561091 TNT | -### Epoch and Era Durations +Source: https://github.com/tangle-network/tnt-core/blob/main/src/governance/TangleToken.sol -| Tangle | Time | -| ------- | --------- | -| Slot | 6 seconds | -| Epoch | 4 hours | -| Session | 4 hours | -| Era | 24 hours | +## TNT Contract Addresses -### Block Time +| Environment | Token Contract | Explorer | +| ----------- | -------------------------------------------- | ---------------------------------------------------------------------------------------- | +| Mainnet | `TBD` | `TBD` | +| Testnet | `0xa9ffe787eea7f385dac8481cd8bdc3d9194aeb5a` | https://testnet-explorer.tangle.tools/address/0xa9ffe787eea7f385dac8481cd8bdc3d9194aeb5a | +| Local | `TBD` | `TBD` | -The Tangle network targets a 6-second block time. +Testnet source: https://github.com/tangle-network/tnt-core/blob/v2/deployments/base-sepolia/latest.json -### Governance +## Core Contract Addresses -- The total minimum deposit for a governance proposal is `PREIMAGE_DEPOSIT + DEMO_MINIMUM_DEPOSIT = 100 + 1000 + (preimage byte fee) > 1100`. +| Contract | Mainnet | Testnet | Local | +| ------------------------ | ------- | -------------------------------------------- | ----- | +| `Tangle` | `TBD` | `0x1be58d12620ecc8ba9d780feec2596510d75a933` | `TBD` | +| `MultiAssetDelegation` | `TBD` | `0x787dd1de4099ff8c68bfac11b82e4aed52c7f1e1` | `TBD` | +| `OperatorStatusRegistry` | `TBD` | `0x20258c5e4cba66d4819a06045ff00d15775e64fb` | `TBD` | +| `TangleMetrics` | `TBD` | `0x2057f94d04e4d667c4d5e60d23d2963358c00970` | `TBD` | +| `RewardVaults` | `TBD` | `0x2963a51fec3e2cf51b19b848942d91296448a353` | `TBD` | +| `InflationPool` | `TBD` | `0xe620f87540724a0cebdee9796dd8580e02dd4911` | `TBD` | +| `Credits` | `TBD` | `0x758226e04478541fcdac605e1f235e2956259a10` | `TBD` | -### Precision +## Protocol Defaults (v2) -TNT has 18 decimal places. +Defaults are defined in `TangleStorage` and `ProtocolConfig` and can be overridden by governance/admin roles. + +### Fee and Security Defaults + +| Parameter | Value | +| --------------------------- | ---------------------------------------------------------- | +| Service fee split (default) | 20% developer / 20% protocol / 40% operators / 20% stakers | +| Default TNT min exposure | 10% (1000 bps) | +| TNT payment discount | 0 bps | +| Aggregation threshold | 67% (6700 bps) | + +### Timing Defaults + +| Parameter | Value | +| ----------------------- | -------------------- | +| Round duration | 6 hours | +| Rounds per epoch | 28 (7 days) | +| Delegator unstake delay | 28 rounds (7 days) | +| Operator exit delay | 56 rounds (14 days) | +| Dispute window | 14 rounds (3.5 days) | +| Reward grace period | 4 rounds (24 hours) | +| Min service commitment | 1 day | +| Exit queue duration | 7 days | +| Min service TTL | 1 hour | +| Max service TTL | 365 days | +| Request expiry grace | 1 hour | +| Max quote age | 1 hour | + +Sources: + +- https://github.com/tangle-network/tnt-core/blob/main/src/TangleStorage.sol +- https://github.com/tangle-network/tnt-core/blob/main/src/config/ProtocolConfig.sol + +## Host Chain Parameters + +Tangle can deploy on multiple EVM chains. Chain-level parameters (gas token, chain ID, block time, finality) are determined by the host chain and may differ by environment. See [Endpoints and Integration](/developers/endpoints) for live network details. + +## Metrics and Scoring + +See [Metrics and Scoring](/network/metrics-and-scoring) for how activity is recorded and used in incentive distribution. diff --git a/pages/network/overview.mdx b/pages/network/overview.mdx index a5111300..6a577426 100644 --- a/pages/network/overview.mdx +++ b/pages/network/overview.mdx @@ -2,54 +2,67 @@ import ExpandableImage from "../../components/ExpandableImage"; -## Introduction +# Protocol Foundation -Tangle Network is a blockchain designed to provide secure, decentralized infrastructure services. Developers can build and deploy composable services called Blueprints. Operators provide validator services to secure Blueprint instances requested by users, and together they form a decentralized cloud infrastructure provider where incentives and rewards are distributed based on usage and contribution to the network. +Tangle is the shared operating layer for autonomous work. The protocol coordinates operator-run services and routes payments. Developers publish Blueprints, users instantiate them on demand, and operators run them for a fee. -**Build Composable Service:** -Blueprints are composable services that can be instantiated by users. They are built using our [blueprint](https://github.com/tangle-network/blueprint) framework, which also integrates with Eigenlayer and other restaking networks. +## Today vs Future -**Earn as a Service Operator** -Operators can earn rewards by operating Blueprint instances requested by users. Operators can register for Blueprints and maintain control over the services they operate, selecting the assets they want to expose for securing the Blueprint. +Today the protocol ships with managed onboarding and a curated operator set. Over time it evolves into an open marketplace where operators host runtime services and earn based on reliability and usage. -**Maximize Asset Utilization** -Tangle enables shared security across Blueprints, allowing operators to secure multiple Blueprints with a single set of assets and restakers to earn rewards for their stakes proportional to the value secured. +## Build Composable Services + +Blueprints are composable services that can be instantiated by users. They are built using the [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2) and can integrate with external security providers when needed. + +## Earn as a Service Operator + +Operators can earn rewards by operating Blueprint instances requested by users. Operators register for Blueprints and maintain control over the services they run, selecting the resources they want to expose for securing the Blueprint. + +## Maximize Resource Utilization + +Tangle enables shared security across Blueprints, allowing operators to secure multiple Blueprints with a single pool of resources and participants to earn rewards proportional to the value secured. + +## Who This Is For + +- **Builders** who want to package services as Blueprints and ship them on demand. [Build with Blueprints](/developers/blueprints/introduction) +- **Operators** who want to run services reliably and earn service fees. [Operator onboarding](/operators/introduction) +- **Stakers** who want to back operators and earn from usage. [Economic security](/staking/introduction) ## How Services Work -Tangle Network employs a modular architecture to enable the creation and deployment of complex cryptographic services called Blueprints: +Tangle Network employs a modular architecture to enable the creation and deployment of reusable services called Blueprints: #### Blueprints -A Blueprint is a specification that defines a service, similar to an actively validated service (AVS). However, Blueprints themselves are not live service instances. Developers create Blueprints by specifying a "gadget" binary, the jobs involved, a set of smart contracts for registration and requesting instances, and additional metadata. Users can instance AVSes from Blueprints, similar to EC2 instances, by configuring the operator set and paying the necessary fees. [Read more about Blueprints](../developers/blueprints/introduction.mdx) +A Blueprint is a specification that defines a service. Blueprints themselves are not live service instances. Developers create Blueprints by specifying a service binary (artifact), the jobs involved, a set of smart contracts for registration and requesting instances, and additional metadata. Users instantiate services by configuring the operator set and paying the necessary fees. [Read more about Blueprints](/developers/blueprints/introduction) -#### Restaking on Blueprints +#### Economic Security on Blueprints -Asset issuers restake their assets on operators who run services based on Blueprints. Asset issuers can restake any fungible asset and can restake these assets on a single operator at a time. Operators register for services and agree through the smart contract logic to run Blueprint service instances when requested. [Read more about restaking](../restake/introduction.mdx) +Economic security backs operators who run services based on Blueprints. Operators register for services and agree through the smart contract logic to run Blueprint service instances when requested. [Read more about economic security](../staking/introduction.mdx) #### Requesting Service Instances -Users deploy live service instances using a Blueprint's request functionality. The requester specifies any criteria like the number of operators or other operator attributes. A specific subset of restaked operators is then selected to run that service instance. +Users deploy live service instances using a Blueprint's request functionality. The requester specifies criteria like the number of operators or other operator attributes. A specific subset of qualified operators is then selected to run that service instance. We aim to support fine-grained controls over requesting instances, such as specifying the number of operators, the operator set, and other operator attributes. This allows users to customize their service instances based on their requirements. #### Incentives -Operators perform the service and earn inflationary rewards for executing Blueprint service instances. Importantly, developers who create popular Blueprints also earn a share of these rewards, incentivizing valuable Blueprint creation. +Operators earn **service fees** for running Blueprint instances and may earn optional **TNT incentives** if governance funds them. Developers earn a share of service fees and can also receive optional TNT incentives based on activity metrics. This separation into Blueprints and instances allows services to be defined once but instantiated multiple times by different users with varying operator requirements. Blueprint developers benefit from their work being utilized widely across ecosystems and applications. ## Use Cases -Tangle Network is designed to function as a highly-specialized crypto cloud platform. It is meant to support a wide range of complex cryptographic services and applications built by developers in a reusable and instanceable fashion. Some key service areas include: +Tangle is a protocol for reusable cryptographic and AI services. It supports complex workloads that need verifiable execution and reliable operator incentives. Key service areas include: - **Privacy Infrastructure**: Tangle provides a foundation for enabling privacy-preserving solutions through technologies like multi-party computation (MPC) and zero-knowledge proofs. - **Oracles**: Build decentralized oracle services that can securely feed off-chain data to blockchains using Tangle's threshold signatures and MPC primitives. -- **AI/ML**: Offer secure and scalable AI inference, fine-tuning, and proving through actively validated services on Tangle's decentralized network. +- **AI/ML**: Offer secure and scalable AI inference, fine-tuning, and evaluation services through operator-hosted runtimes. - **Custody Solutions**: Leverage Tangle's cryptographic services to create distributed custody solutions with robust security guarantees. @@ -59,4 +72,4 @@ Tangle Network is designed to function as a highly-specialized crypto cloud plat - **Novel Cryptographic Applications**: Tangle's modular architecture and rich cryptographic library enable developers to build innovative applications requiring advanced cryptographic primitives. -The core vision of Tangle is to provide a comprehensive infrastructure for developers to create and monetize a wide array of cutting-edge services secured by the network's novel restaking mechanism. Developers are incentivized to contribute valuable service blueprints, which can then be instantiated and utilized across ecosystems by operators staking assets from various networks. +The core vision of Tangle is to provide infrastructure for developers to create and monetize a wide array of services with reliable execution and clear incentives. Developers are rewarded for valuable Blueprints, and operators are rewarded for dependable service delivery. diff --git a/pages/network/points-mechanics.mdx b/pages/network/points-mechanics.mdx index 4e789a1c..4b27abf7 100644 --- a/pages/network/points-mechanics.mdx +++ b/pages/network/points-mechanics.mdx @@ -1,66 +1,41 @@ -# Tangle Network Points System +# Participation Credits -The Tangle Network introduces a points system, allowing participants to accumulate experience points, also referred to as XP, by engaging in various activities across the ecosystem. This system is designed to recognize contributions and encourage active involvement. +Tangle uses a credits system to recognize contributions and make participation auditable. Credits are computed off-chain, published as a Merkle root, and claimed on-chain through a dedicated contract. -Points earned will directly influence your eligibility for future airdrops, and we will leverage the democracy system to push forward retroactive airdrops for different types of participation from staking and restaking to operating and validating to developing and instancing Blueprints. +Credits are not token transfers. They are a verifiable record of participation that can be used for programs, access, or distributions over time. -## Participation Mechanics: How to Earn Points +## How Credits Work -Your points are earned through various participation mechanics across Tangle Network. These points reflect your contributions and will determine your rank on the upcoming leaderboard. +1. Eligible activity is indexed off-chain. +2. A Merkle root for the epoch is published on-chain. +3. Users claim credits for that epoch via a single transaction. -**1\. Restaking Participation** +This approach keeps the system lightweight while preserving verifiability. -- Deposits: Earn points by depositing whitelisted assets into the restaking system. -- Delegations: Delegate whitelisted assets to operators to earn points. +## Example Credit Sources -**2\. Network Participation** +- **Operator activity**: running workloads, uptime, and reliability. +- **Blueprint usage**: services that are instantiated and used by customers. +- **Ecosystem growth**: integrations and usage that expand the network. -- Staking and Nominating: Stake TNT tokens and nominate validators. -- Liquid Staking: Liquid stake TNT to earn points. +The exact credit sources and weights can evolve. Always refer to the current program details for what counts in each epoch. -**3\. Network Roles** +## Claiming Credits -- Operators: Register as an operator to earn points. Operate services to earn points. -- Validators: Run a validator to earn points. +Credits are claimed through the `Credits` contract once a new epoch root is published. The claim flow is intentionally simple and can be automated by the UI. -**4\. Partner Integrations** +Contract source (GitHub): https://github.com/tangle-network/tnt-core/blob/v2/packages/credits/src/Credits.sol -- Partners bringing assets into the network through integrations (e.g., Router Protocol, Hourglass) will earn points when users deposit assets for supporting ecosystem growth. +## Contract Addresses -**5\. Development and Instancing** +We will publish the contract addresses as each environment is deployed. -- Developers can earn points by innovating. Eligibility for inclusion is determined by: +| Environment | Credits Contract | Explorer | +| ----------- | -------------------------------------------- | ---------------------------------------------------------------------------------------- | +| Mainnet | `TBD` | `TBD` | +| Testnet | `0x758226e04478541fcdac605e1f235e2956259a10` | https://testnet-explorer.tangle.tools/address/0x758226e04478541fcdac605e1f235e2956259a10 | +| Local | `TBD` | `TBD` | - - 100% completed innovations, no consideration otherwise. - - Fully open source on Github. - - Informative README. - - Post on Common forum [https://common.xyz/tangle](https://common.xyz/tangle) with blog style post. +Testnet source: https://github.com/tangle-network/tnt-core/blob/v2/deployments/base-sepolia/latest.json -- Developers who complete publicly issued bounties from any public Tangle resource will earn points. -- Developers who build innovative Blueprints through hackathons or general interest. -- Developers who deploy Blueprints to testnet and eventually mainnet. -- Developers who get their Blueprints registered for, operated on, and instanced. -- Customers who instance Blueprints. - -### To Maximize Your Points: - -- **Diversify Activities:** Engage in multiple areas—restaking, nominating, running operators or validators, and social engagement—to maximize your point accumulation. -- **Stay Consistent:** Participate for as long as possible to ensure continuous accrual of points. - -### How Points Work - -- All eligible activities are recorded, and corresponding points are awarded. -- Points continue to accumulate for ongoing actions such as staking or delegating. -- Rankings will be reflected on the Tangle Leaderboard once it goes live. -- Points for different actions may be considered for different tranches of future airdrops. - -## Tangle Leaderboard (Coming Soon) - -The **Tangle Leaderboard** will rank participants based on their total points earned. This ranking system is designed to showcase top contributors and foster healthy competition within the ecosystem. - -#### What to Expect - -1. Participants will be ranked in descending order based on their cumulative points. -2. Your rank will depend on the points you earn, which are influenced by the variety of activities you participate in and the duration of your engagement in each activity. - -Stay tuned for updates regarding when the leaderboard goes live\! +If you are looking for token distribution or migration details, see [TNT Migration and Claims](/network/claim-airdrop). diff --git a/pages/network/slashing.mdx b/pages/network/slashing.mdx deleted file mode 100644 index 3b5c5109..00000000 --- a/pages/network/slashing.mdx +++ /dev/null @@ -1,39 +0,0 @@ -# Slashing Mechanisms - -## Overview - -Slashing is a critical security mechanism in the Tangle Network that penalizes operators and restakers for misbehavior in service operations. When a slashing event occurs, both the operator and exposed restakers lose a configurable percentage of their staked assets. Slashing events are recorded publicly and permanently impact an operator's profile. - -## TNT Slashing Model - -### Service Instance Slashing - -Blueprint developers encode specific slashing conditions directly into their services during development. These conditions are made public during Blueprint deployment, ensuring full transparency for operators who choose to register for these services. This transparency allows operators to make informed decisions about which services they want to support. - -### Operator Controls - -While operators can register to run Blueprint services, they retain important controls over their operations. Most notably, they maintain the ability to reject Blueprint Service Instance (BSI) requests from paying customers. This control mechanism helps prevent potential spam attacks and resource overload scenarios, ensuring operators can maintain high quality of service. - -### Restaker Protections - -The system provides restakers with fine-grained control over their asset exposure. Restakers can precisely configure which Blueprints they want exposure to on a per-operator basis. This granular control extends to specific Blueprint Service Instances, allowing restakers to carefully manage their risk exposure across different services and operators. - -## Slashing Implementation - -### Core Logic - -The slashing mechanism operates through a multi-step process that begins in the services pallet. When a slashing condition is triggered, the pallet initiates a slash call with the operator's address, slashing percentage, and Blueprint ID. The multi-asset delegation pallet then processes this request by: - -1. Identifying all delegators associated with the operator -2. Filtering for delegators exposed to the specific Blueprint -3. Applying the specified slash percentage to both restaker and operator deposits - -This process accounts for scenarios where operators may also act as their own restakers, ensuring fair treatment across all participants. - -### Asset Distribution - -By default, slashed assets are transferred to the Treasury's control. However, Blueprint developers can implement custom hooks to direct slashed assets to specific destinations. The system carefully prevents automatic opt-in of restakers to new Blueprints that operators register for, protecting against potential collusion between operators and Blueprint developers. - -### Slashing Execution - -The service pallet implements a sophisticated queuing system for slash events. Each event enters a queue for a predetermined SLASHING_QUEUE_LIFETIME period. During this time, authorized parties (DISPUTE_ORIGIN) can dispute the slashing event. If no disputes are raised during the queue lifetime, the slashing event executes automatically. This waiting period ensures fair treatment while maintaining system security. diff --git a/pages/network/tokenomics/_meta.ts b/pages/network/tokenomics/_meta.ts index 13c8eeb8..caf69ff0 100644 --- a/pages/network/tokenomics/_meta.ts +++ b/pages/network/tokenomics/_meta.ts @@ -1,7 +1,7 @@ import { Meta } from "nextra"; const meta: Meta = { - usage: "Token Overview", + usage: "Token Utility", allocation: "Allocation", inflation: "Inflation", }; diff --git a/pages/network/tokenomics/allocation.mdx b/pages/network/tokenomics/allocation.mdx index 8ca38da6..6b0272cb 100644 --- a/pages/network/tokenomics/allocation.mdx +++ b/pages/network/tokenomics/allocation.mdx @@ -1,133 +1,58 @@ --- -title: Genesis Allocations and Token Allocation Details +title: Token Allocation tags: - Token - Tokenomics --- -import AllocationTable from 'components/AllocationTable.tsx'; -import { Callout } from 'nextra/components' -import { Bleed } from 'nextra-theme-docs' +import { Callout } from "nextra/components"; -### Overview: Genesis Allocations +# Token Allocation -Observing successful networks as a benchmark, Tangle Network has tailored its genesis allocation to meet its unique needs and goals. +This page summarizes TNT allocation and vesting for the v2 protocol. The authoritative source is the on-chain +distribution contracts and migration snapshot files; the table below is a readable summary derived from those sources. -![Allocation Pie Chart](/images/allocation-pie-chart.png) - -### Vesting Schedules - - - Genesis participants will have 1 year to claim their distribution, **the deadline is April 10 2025.** otherwise the amount is sent to the Tangle Network on-chain treasury. + + Amounts are derived from the v2 migration snapshot and carveout files. See the source links in this page for + exact values (wei) and the full Merkle tree. -Tangle Network implements two distinct vesting schedules designed to promote long-term commitment while ensuring a balanced token distribution: - -**A-Vesting (4-Year Schedule):** - -- Total Duration: 4 years (48 months) -- Cliff Period: 12 months -- Initial Release: No tokens are released during the cliff period -- Post-Cliff Distribution: - - At cliff end (12 months): 25% of total allocation released - - Remaining 75%: Vested monthly over 36 months in equal increments -- Monthly Release Rate: ~2.778% of total allocation (post-cliff) - -**B-Vesting (Airdrop Schedule):** - -- Total Duration: 2 years (24 months) -- Cliff Period: 1 month -- Initial Release: Predefined percentage available at launch -- Post-Cliff Distribution: Remaining tokens released monthly over 23 months -- Designed specifically for airdrop participants to balance immediate utility with long-term alignment - -Both schedules follow the principle of Immediate Vesting Post-Cliff with Retroactive Accumulation, ensuring a predictable and fair distribution mechanism. - -![Liquid Tokens Over Time](/images/liquid-tokens-over-time-chart.png) - -### Detailed Allocation Overview - -The Tangle Network's allocation model is structured around three core pillars: - -1. **Contributors** - - - Supports core teams, core-team investors, and advisors - - Essential for sustained network development and strategic guidance - - Subject to A-Vesting schedule for long-term alignment - -2. **Airdrops** - - - Rewards validators, early supporters, and active participants - - Follows B-Vesting schedule to encourage community engagement - - Designed to foster a robust and participatory ecosystem - -3. **Governance-Managed** - - Allocated for: - - Community-driven development initiatives - - Network success programs - - Strategic liquidity provisions - - Key partnerships and ecosystem growth - - Managed through transparent on-chain governance - - - - - -# Token Allocation and Vesting Schedule Formulas - -This section outlines the formulas used to calculate various aspects of our token allocation and vesting schedules. - -### Initial Liquid Tokens - -```plaintext -=Total_Tokens_Allocated * Immediate_Liquidity_Percentage -``` - -**Description:** Calculates the number of tokens that are immediately liquid and available at launch, based on the total tokens allocated to an entity and the percentage designated as immediately liquid. - -### Cliff-Release Tokens (for entities with a retroactive vesting cliff) +## Allocation Summary -```plaintext -=Total_Tokens_Allocated * (Cliff_Duration / Total_Vesting_Period) -``` +| Category | Purpose | Amount (TNT) | Share | Vesting | +| -------------------------- | ------------------------------------ | ---------------- | ------ | ------------------------------------ | +| Substrate migration claims | SS58 snapshot claims via Merkle tree | 51,244,581.8122 | 46.90% | 2% unlocked + 12m cliff + 24m linear | +| EVM allocations | Direct EVM recipient list | 1,125,776.5192 | 1.03% | 2% unlocked + 12m cliff + 24m linear | +| Treasury carveout | Non-claimable module balances | 36,844,468.7611 | 33.72% | Liquid at deployment | +| Foundation carveout | Foundation allocation | 15,040,809.8267 | 13.77% | Liquid at deployment | +| Liquidity ops carveout | Liquidity operations budget | 5,000,000 | 4.58% | Liquid at deployment | +| **Total (MAX_SUPPLY)** | Hard cap from `TangleToken` | 109,255,636.9192 | 100% | - | -**Description:** For allocations with a retroactive vesting cliff, this calculates the number of tokens released at the end of the cliff period, based on the total allocation and the proportion of the vesting period represented by the cliff. +## Vesting and Lockups -### Monthly Vesting Rate (for entities with post-cliff monthly vesting) +Allocations can mix unlocked and vested balances. Each allocation defines: -```plaintext -=(Total_Tokens_Allocated - Initial_Liquid_Tokens - Cliff_Release_Tokens) / (Vesting_Period - Cliff_Duration) -``` +- **Unlock percentage** at claim or distribution time. +- **Cliff duration** before vesting starts. +- **Linear vesting period** once the cliff ends. -**Description:** Determines the monthly rate at which tokens vest after the cliff period, considering the total tokens allocated minus any initially liquid tokens and tokens released at the cliff, divided by the remaining months of the vesting period. +For legacy allocations and migration details, see [TNT Migration and Claims](/network/claim-airdrop). -### Special Considerations +## Source of Truth (GitHub) -For entities without a vesting plan (e.g., Treasury, Foundation) -**Entire allocation is considered liquid at launch,** though it is only utilized through governance and so not 'liquid' in the traditional tokenomic sense. +- Token cap: https://github.com/tangle-network/tnt-core/blob/main/src/governance/TangleToken.sol +- Substrate Merkle snapshot: https://github.com/tangle-network/tnt-core/blob/v2/packages/migration-claim/merkle-tree.json +- EVM recipient list: https://github.com/tangle-network/tnt-core/blob/v2/packages/migration-claim/evm-claims.json +- Treasury carveout: https://github.com/tangle-network/tnt-core/blob/v2/packages/migration-claim/treasury-carveout.json +- Foundation carveout: https://github.com/tangle-network/tnt-core/blob/v2/packages/migration-claim/foundation-carveout.json +- Liquidity ops carveout: https://github.com/tangle-network/tnt-core/blob/v2/packages/migration-claim/liquidity-ops-carveout.json -```plaintext -=Total_Tokens_Allocated -``` +## Verification Sources -## Definitions +| Environment | Distribution Contract | Explorer | +| ----------- | --------------------- | -------- | +| Mainnet | `TBD` | `TBD` | +| Testnet | `TBD` | `TBD` | +| Local | `TBD` | `TBD` | -1. **Allocation Category**: A grouping used to categorize the distribution of tokens or shares within a project or organization, typically indicating the purpose or recipient of the allocation. -2. **Entity Name**: The name of the individual or organization receiving the allocation of tokens or shares. -3. **Allocated Share (%)**: The percentage of the total token supply allocated to a specific entity or category. -4. **Vesting Plan**: A structured timeline outlining how allocated tokens or shares become available to the recipient over a period, usually to incentivize long-term commitment or performance. -5. **Cliff (Months)**: The initial period after which a portion of the allocated tokens or shares becomes accessible to the recipient, often used as a safeguard against early departures or underperformance. -6. **Vesting Period (Months)**: The total duration over which allocated tokens or shares gradually become available to the recipient according to the vesting schedule. -7. **Immediate Liquidity (%)**: The percentage of allocated tokens or shares that are immediately accessible or liquid upon allocation, without being subject to vesting restrictions. -8. **Initial Liquid Tokens**: The number of tokens or shares initially available for immediate use or transfer upon allocation. -9. **Cliff-Release Tokens**: The number of tokens or shares released after the cliff period, becoming accessible to the recipient according to the vesting schedule. -10. **Monthly Vesting Rate**: The rate at which tokens or shares vest on a monthly basis after the cliff period, determining the pace of distribution to the recipient. -11. **Total Tokens Allocated**: The overall sum of tokens or shares allocated to a specific entity or category, representing the total amount of ownership or participation assigned. -12. **Contributors**: Individuals or entities actively involved in contributing to the project's development, growth, or success. -13. **Governance-Managed**: Tokens allocated for governance purposes and managed by a designated entity or organization within the project, typically used for decision-making or protocol governance. -14. **Airdrops**: Distribution of tokens to a specific group of recipients, often as a promotional or community-building activity, without requiring direct financial investment. -15. **Leaderboard Participants**: Participants who engage with the project's leaderboard, often in competitions or challenges, and receive tokens as rewards or incentives. -16. **DOT Validators Snapshot**: Participants included in a specific snapshot of DOT (Polkadot) validators and rewarded with tokens accordingly. -17. **EDG Genesis Participants**: Participants involved in the project's genesis event or initial launch phase and eligible for token rewards. -18. **EDG 2023 Snapshot**: Participants included in a snapshot of block 18070680 (July-31-2023 06:28:12 AM +-5 UTC) and eligible for token rewards based on their inclusion in the snapshot. -19. **Total Supply**: The overall quantity of tokens or shares in existence within the project or organization, representing the maximum potential ownership or participation. +The summary will be kept in sync with these contracts. diff --git a/pages/network/tokenomics/inflation.mdx b/pages/network/tokenomics/inflation.mdx index 1745605d..34a4e542 100644 --- a/pages/network/tokenomics/inflation.mdx +++ b/pages/network/tokenomics/inflation.mdx @@ -1,58 +1,27 @@ -import ExpandableImage from "../../../components/ExpandableImage"; +# TNT Supply and Incentives -# Tangle Network's Inflation Model +## No Automatic Inflation in v2 -## Overview +Tangle v2 does **not** mint TNT continuously for block rewards or staking. Incentives are **budgeted and pre-funded** by governance, which means there is no automatic inflation at the protocol level. -Tangle Network utilizes a Nominated Proof of Stake (NPoS) system to secure its network and incentivize participation. The creation (minting) of new Tangle Network Tokens (TNT) serves as the primary mechanism for rewarding validators and nominators, which in turn introduces inflation into the system. This document outlines the key aspects of how rewards are distributed and how inflation is managed within Tangle Network. +## Budgeted Incentives (Pre-Funded) -## NPoS Payments and Inflation +If governance allocates TNT to incentives, funds are deposited into `InflationPool` and distributed by epoch: -- **Purpose**: Rewards are distributed to validators and nominators for their roles in block production and network security. -- **Inflation**: The minting of new TNT for rewards is the main source of inflation within Tangle Network. -- **Exclusions**: This overview **does not** account for penalties (slashings), rewards for reporting misconduct, or transaction fee rewards, which are covered separately. +- **Stakers**: TNT incentives can be paid via `RewardVaults` (per asset) and optional staker exposure budgets. +- **Operators / Developers / Customers**: TNT rewards can be allocated based on activity metrics. -## Inflation Model Simplified +See [Incentives](/network/incentives-overview) and [Metrics and Scoring](/network/metrics-and-scoring) for the full model. -- **Staking Rate:** +Source contracts: -($x$) +- https://github.com/tangle-network/tnt-core/blob/main/src/rewards/InflationPool.sol +- https://github.com/tangle-network/tnt-core/blob/main/src/rewards/RewardVaults.sol -Represents the proportion of total TNT supply that is staked in the NPoS system. +## Service Fees Are Separate -- **Ideal Staking Rate** +Service fees are paid by customers in the payment token (native or ERC-20) and are **not** inflation. Fees are split across developers, the protocol, operators, and stakers. -($\chi_\text{ideal}$) +## Governance Control -The target staking rate Tangle Network aims to achieve for optimal security and liquidity balance. - -- **Yearly Interest Rate:** - -($i(x)$) - -The rate at which rewards are paid out relative to the amount staked, adjusted based on the staking rate to incentivize desired staking levels. - -### Key Concepts: - -- **Incentives**: The system adjusts rewards to encourage a staking rate close to $\chi_{ideal}$, reducing rewards as staking exceeds this target to prevent liquidity issues. -- **Inflation Rate ($I$)**: Calculated based on several factors, including rewards for NPoS participation, treasury funding, penalties, and transaction fees. The goal is to balance inflation with network security and operational needs. -- **Adjustable Parameters**: The model includes variables like the ideal interest rate ($i_{ideal}$) and inflation limits that can be tuned to manage the network's economic dynamics effectively. - - - -## Reward Distribution Mechanism - -- Validators and nominators receive rewards for their contributions to block production and network security, with rewards calculated based on several factors including the total points earned for various actions within the network. -- **Payment Details**: Rewards are allocated based on a point system, where different network contributions earn different points. The total payout is then distributed proportionally to the points earned by each participant. - -## Inflation Control and Staking Incentives - -- The inflation model is designed to encourage a balanced staking rate by adjusting rewards based on the current staking rate relative to the ideal target. -- **Ideal Staking Rate Adjustment**: Factors such as network growth and operational needs may lead to adjustments in the ideal staking rate to maintain network security and efficiency. - -## Simplifying Complexities - -- While the underlying mechanics are complex, the essence is to incentivize behaviors that secure the network and ensure its smooth operation, balancing between rewarding participation and controlling inflation. -- **Governance Role**: The community and governance processes play a crucial role in adjusting parameters within the inflation model to respond to evolving network needs and conditions. - -This simplified overview aims to provide a clearer understanding of how Tangle Network manages inflation and rewards within its NPoS system, making the information accessible to a broader audience without diminishing the intricacies of the underlying mechanisms. +If the protocol ever enables minting or changes the supply policy, this page will be updated to reflect the new model. diff --git a/pages/network/tokenomics/usage.mdx b/pages/network/tokenomics/usage.mdx index b33a8e63..155b3fa8 100644 --- a/pages/network/tokenomics/usage.mdx +++ b/pages/network/tokenomics/usage.mdx @@ -1,34 +1,27 @@ -# TNT Token Information and Utility +# TNT Token -The Tangle Network's native token is TNT, used as the gas token, payment token, and base restaking asset for Tangle Blueprints and restaked services. +TNT is Tangle’s **governance token** (an ERC‑20 on EVM chains). -| Network | Network Type | Native Asset Symbol | Native Asset Decimals | -| -------------- | ------------ | ------------------- | --------------------- | -| Tangle Network | Mainnet | TNT | 18 | -| Tangle Testnet | Testnet | tTNT | 18 | +TNT is **not** the gas token. Gas fees are paid in the underlying chain’s native asset (e.g., ETH on Base). ## Usage Cases -### Consensus Mechanism and nPoS: +### Governance -The Tangle Network's nominated proof-of-stake (nPoS) consensus mechanism requires TNT to be staked by validators to participate in validating blocks. Additionally, nominators can nominate their TNT to individual validators to participate in the rewards to that validator and increase that validator's tokens-at-stake. +TNT holders participate in protocol governance (e.g., parameter updates, treasury actions, upgrades) using on-chain voting. -### Network Economics +### Protocol Alignment -Tangle Network dynamically mints or burns TNT tokens to reward consensus protocol participants like validators and nominators, partially fund its treasury, manage inflation, and ensure the network's economic stability. +TNT is the unit of alignment for the Tangle protocol: it is used to coordinate incentives across stakers, operators, blueprint developers, and users of services. -### Restaking Economics +### Staking Economics -TNT serves as the currency for transactions between job submitters and validators within the Tangle Network's restaking infrastructure. This facilitates seamless, secure, and efficient execution of complex and on-demand computational services. +TNT can be used within the staking ecosystem as an operator **self-stake** asset and as a delegatable staking asset (and may also be accepted for service payments depending on the deployed configuration). -### Slashing Mechanisms: +### Slashing Mechanisms -To safeguard against malicious activities, TNT tokens are integral to Tangle's slashing protocols across consensus and the restaking system. These measures are designed to deter attacks and incentivize adherence to network protocols. - -### Governance Empowerment: - -TNT holders wield governance power, enabling them to partake in pivotal network decisions through referenda. This democratic approach ensures that the Tangle Network evolves in alignment with its community's interests. +TNT can be used as collateral within the staking system, where slashing is applied for service-level violations. These measures deter malicious behavior and align operators with protocol requirements. --- -A considerable portion of TNT will be actively engaged in the network, either staked by validators and nominators for network security or utilized in the execution of Tangle Blueprints. This not only ensures the network's resilience and security but also drives engagement and utility within the Tangle ecosystem. +A considerable portion of TNT will be actively engaged in the protocol (e.g., governance, operator self-stake, incentives), and in the execution of Tangle Blueprints. This helps align economic security with real usage of services. diff --git a/pages/operators/_meta.ts b/pages/operators/_meta.ts index 7093f577..d8a3acf8 100644 --- a/pages/operators/_meta.ts +++ b/pages/operators/_meta.ts @@ -6,23 +6,16 @@ const meta: Meta = { title: "Introduction", }, introduction: "Get Started", - "node-basics": "Node Basics", - validator: "Running a Validator", - monitoring: "Node Monitoring", - "-- Tangle Blueprint Operators": { + "-- blueprint operators": { type: "separator", title: "Blueprint Operators", }, manager: "Blueprint Manager", - operator: "Running an operator", + runbook: "Operator Runbook", + operator: "Running an Operator", pricing: "Pricing", benchmarking: "Blueprint Benchmarking", "quality-of-service": "Quality of Service", - "-- Eigenlayer AVS Operators": { - type: "separator", - title: "Eigenlayer AVS Operators", - }, - "tangle-avs": "Tangle AVS", }; export default meta; diff --git a/pages/operators/benchmarking.mdx b/pages/operators/benchmarking.mdx index f1011a63..d7405826 100644 --- a/pages/operators/benchmarking.mdx +++ b/pages/operators/benchmarking.mdx @@ -4,39 +4,38 @@ title: Understanding Benchmarking # Understanding Benchmarking -As a Tangle Network operator, you should understand how the network benchmarks your system to determine pricing for blueprints. This guide explains the automated benchmarking process and how it affects the quotes generated for your node. +As a Tangle Network operator, you should understand how the network benchmarks your system to determine pricing for blueprints. This guide explains the automated benchmarking process and how it affects the quotes generated for your operator runtime. ## What is Blueprint Benchmarking? Blueprint benchmarking is an automated process that measures your system's capabilities to determine: -- The cost to run specific blueprints on your node +- The cost to run specific blueprints on your operator host - The resource allocation for different blueprint types -- Your node's competitive position in the operator marketplace +- Your operator's competitive position in the operator marketplace -When users request quotes from your node, Tangle's pricing engine uses these benchmark results to calculate fair prices based on your hardware profile. +When users request quotes from your operator, Tangle's pricing engine uses these benchmark results to calculate fair prices based on your hardware profile. ## How Benchmarking Works The benchmarking process happens automatically in two key phases: -### 1. During Operator Registration +### 1. On Service Activation (Pricing Engine) -When you register as an operator, the Tangle Network automatically runs baseline benchmarks on your system: +When the pricing engine sees a `ServiceActivated` event for a blueprint, it runs a benchmark to refresh the local +hardware profile for that blueprint: -- Your node's hardware capabilities are measured -- A baseline profile is created for your specific hardware -- This profile is stored by blueprint ID in the network -- The profile is used whenever quotes are requested from your node +- The pricing engine listens to `ITangle` events via the Tangle EVM RPC. +- A benchmark task runs and caches CPU/memory/storage/network measurements. +- The cached profile is keyed by blueprint ID and used for future quote requests. -### 2. During Blueprint Execution +### 2. During Blueprint Runtime (QoS + Metrics) -While blueprints run on your node, the system automatically monitors resource usage: +While blueprints run on your operator host, runtime metrics can be collected for observability: -- Measurements are taken before blueprint execution starts -- Ongoing monitoring occurs during blueprint runtime -- Final measurements capture the state after completion -- This data can be used for future pricing calculations +- The Blueprint Manager can emit QoS metrics and heartbeats. +- Dashboards may display resource usage and job-level stats. +- These metrics are for visibility; the pricing engine still relies on its local benchmark cache. ## Resources That Are Automatically Measured @@ -60,11 +59,11 @@ memory = { count = 16384, price_per_unit = "0.00005" } storage = { count = 1024000, price_per_unit = "0.00002" } ``` -## How Benchmarking Affects Your Node's Quotes +## How Benchmarking Affects Your Operator's Quotes The benchmark results directly influence how the pricing engine generates quotes: -1. When a user requests a blueprint quote from your node, the system retrieves your benchmark profile +1. When a user requests a blueprint quote from your operator, the pricing engine retrieves the cached benchmark profile 2. It applies the blueprint's resource requirements to your profile 3. It calculates the quote using this formula: @@ -74,7 +73,7 @@ Quote = Base Resource Cost × Time Multiplier × Security Commitment Factor Where: -- **Base Resource Cost**: Derived from your node's benchmarking results +- **Base Resource Cost**: Derived from your operator benchmarking results - **Time Multiplier**: Adjusts cost based on how long the service will run - **Security Commitment Factor**: Based on the asset security commitments required by the request @@ -94,15 +93,16 @@ The final quote would then be adjusted based on network conditions and security ## Viewing Benchmark Information -Depending upon the service or blueprint being run, you may have information or metrics available to view. This will be accessible where you submitted your request and job, but it will depend upon the blueprint in question. +Benchmark results are stored in the pricing engine’s local cache and surfaced via logs/metrics. QoS dashboards are +blueprint-specific and depend on what the operator exposes. ## Frequently Asked Questions **Q: Do I need to manually run benchmarks?** -A: No, the benchmarking process is fully automated. It runs during registration and periodically thereafter. +A: No, the pricing engine runs benchmarks automatically when services activate and refreshes its cache as needed. **Q: Can I improve my benchmark scores?** -A: While you can't directly modify the benchmarking process, upgrading your hardware or optimizing your system can indirectly improve your node's performance. +A: While you can't directly modify the benchmarking process, upgrading your hardware or optimizing your system can indirectly improve your operator performance. **Q: How often are benchmarks updated?** A: Benchmarks are initially created during registration and may be updated periodically or when significant system changes are detected. @@ -115,7 +115,7 @@ A: Yes, users may choose operators based partly on performance metrics derived f To learn more about operating on the Tangle Network, you may want to review: - [Pricing Strategies](/operators/pricing) -- [Node Configuration](/operators/node-basics) -- [Monitoring Your Node](/operators/monitoring) +- [Blueprint Manager](/operators/manager/introduction) +- [Quality of Service](/operators/quality-of-service) -Understanding the benchmarking process helps you better appreciate how the Tangle Network determines pricing for blueprints running on your node. +Understanding the benchmarking process helps you better appreciate how the Tangle Network determines pricing for blueprints running on your operator. diff --git a/pages/operators/introduction.mdx b/pages/operators/introduction.mdx index cd2c6de2..15e06649 100644 --- a/pages/operators/introduction.mdx +++ b/pages/operators/introduction.mdx @@ -1,5 +1,54 @@ -import OperatorIntroCards from "../../components/OperatorIntro.tsx" - # Operating on Tangle - +Operators run Blueprint services and earn fees for reliable execution. This section covers everything you need to deploy and maintain operator infrastructure. + +## Quick Start + +1. **Set up the Blueprint Manager**: [Manager Setup](/operators/manager/setup) +2. **Configure your keystore**: [Key Management](/developers/cli/keys) +3. **Register on-chain**: [Join as Operator](/operators/operator/join_operator/join) +4. **Monitor your services**: [Quality of Service](/operators/quality-of-service) + +## What Operators Do + +- Run the Blueprint Manager to execute service instances. +- Submit heartbeats to prove liveness. +- Respond to job calls and submit results on-chain. +- Maintain uptime to avoid QoS degradation and potential slashing. + +## Choosing Isolation + +Operators select how to isolate workloads: + +| Method | Security | Performance | Use Case | +| ------------------- | -------- | ----------- | ----------------------- | +| Native (no sandbox) | Low | Highest | Trusted blueprints only | +| Cloud-hypervisor VM | High | Good | Production default | +| Kata Containers | High | Good | Kubernetes deployments | + +See [Requirements](/operators/manager/requirements) for setup details. + +## Earning and Risks + +Operators earn from: + +- **Service fees**: Paid by customers, split with developers and stakers. +- **Optional TNT incentives**: When governance funds incentive pools. + +Operators risk slashing for: + +- Missing heartbeats beyond the grace period. +- Submitting invalid or late results. +- Violating isolation guarantees. + +## Sandbox Runtime Hosting (Early Access) + +Operators can also host the sandbox runtime that powers autonomous work. It is designed with capacity-aware scheduling, optional host pooling and autoscaling, and Prometheus-style metrics for fleet visibility. + +Learn more in [Runtime Architecture](/infrastructure/architecture). + +## Next Steps + +- [Operator Runbook](/operators/runbook): Daily operations checklist. +- [Pricing](/operators/pricing/overview): How to set competitive rates. +- [Benchmarking](/operators/benchmarking): Test blueprint performance. diff --git a/pages/operators/manager/_meta.ts b/pages/operators/manager/_meta.ts index bb01124a..0253d06d 100644 --- a/pages/operators/manager/_meta.ts +++ b/pages/operators/manager/_meta.ts @@ -3,6 +3,9 @@ import { Meta } from "nextra"; const meta: Meta = { introduction: "Introduction", requirements: "Requirements", + setup: "Setup", + sizing: "Sizing", + security: "Sandboxing and Security", }; export default meta; diff --git a/pages/operators/manager/introduction.mdx b/pages/operators/manager/introduction.mdx index 101a5b30..2f069d86 100644 --- a/pages/operators/manager/introduction.mdx +++ b/pages/operators/manager/introduction.mdx @@ -1,3 +1,24 @@ # Blueprint Manager -TODO +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/manager + +The Blueprint Manager is the operator-side runtime that turns on-chain services into running off-chain infrastructure. + +At a high level, the Blueprint Manager: + +- Watches the chain for service lifecycle events (service activation, job calls, service termination). +- Fetches and verifies blueprint artifacts (native binaries, containers, WASM, etc.). +- Runs service instances in the target execution environment. +- Submits liveness heartbeats and optional QoS metrics on-chain. +- Handles streamed fee drips (when services use streaming payments). + +This page focuses on the operational role of the Blueprint Manager. For integration details, see: + +- [Blueprint Manager (Developer View)](/developers/blueprints/manager) +- [Quality of Service Monitoring](/operators/quality-of-service) + +For operator setup and runtime choices, continue with: + +- [Setup](/operators/manager/setup) +- [Runtime Requirements](/operators/manager/requirements) +- [Sizing](/operators/manager/sizing) diff --git a/pages/operators/manager/requirements.mdx b/pages/operators/manager/requirements.mdx index ff40a190..c9bc48b1 100644 --- a/pages/operators/manager/requirements.mdx +++ b/pages/operators/manager/requirements.mdx @@ -1,5 +1,7 @@ # Runtime Requirements for the Blueprint Manager +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/manager + Blueprints can be executed in multiple ways (see [Sources](/developers/deployment/sources/introduction)), with each requiring certain dependencies, and possibly hardware. @@ -27,6 +29,8 @@ No extra dependencies, the blueprint will run as a normal host process. - This can be done by running `setcap cap_net_admin+eip /path/to/blueprint-manager` - **_or_** simply running the `blueprint-manager` as root (**not recommended**) +For secure production deployments, see [Sandboxing and Security](/operators/manager/security). + ## Container Sources The requirements for running blueprints with [Container Sources](/developers/deployment/sources/container) are: @@ -35,22 +39,16 @@ The requirements for running blueprints with [Container Sources](/developers/dep - [Docker] - The [Kata Containers] runtime -## TEE Sources (WIP, **Linux Only**) - -The requirements for running blueprints with [TEE Sources](/developers/deployment/sources/tee) are: - -- [dstack VMM] -- TODO? - ## WASM Sources (WIP) The requirements for running blueprints with [WASM Sources](/developers/deployment/sources/wasm) are: -- TODO +- A WASM runtime compatible with the blueprint source (e.g., [Wasmtime]) +- Any additional system dependencies required by your blueprint’s host bindings (if applicable) [GitHub CLI]: https://cli.github.com/ [cloud-hypervisor]: https://www.cloudhypervisor.org/ [Kubernetes]: https://kubernetes.io/ [Docker]: https://www.docker.com/get-started/ [Kata Containers]: https://katacontainers.io/ -[dstack VMM]: https://github.com/Dstack-TEE/dstack/tree/master?tab=readme-ov-file#-getting-started +[Wasmtime]: https://wasmtime.dev/ diff --git a/pages/operators/manager/security.mdx b/pages/operators/manager/security.mdx new file mode 100644 index 00000000..c5343172 --- /dev/null +++ b/pages/operators/manager/security.mdx @@ -0,0 +1,55 @@ +# Sandboxing and Security + +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/manager + +Running the Blueprint Manager in production should prioritize isolation and key safety. The recommendations below assume Tangle's EVM protocol. + +## Recommended: VM sandbox for native blueprints (Linux) + +The native VM sandbox uses `cloud-hypervisor` to isolate blueprint binaries. + +1. Install `cloud-hypervisor` and make sure it is in `PATH`. +2. Grant the manager `CAP_NET_ADMIN` so it can manage VM networking: + +```bash +sudo setcap cap_net_admin+eip /path/to/blueprint-manager +``` + +3. Run with VM preferences enabled: + +```bash +cargo tangle blueprint run \ + --protocol tangle-evm \ + --http-rpc-url "$RPC_URL" \ + --ws-rpc-url "$WS_RPC_URL" \ + --keystore-path ./keystore \ + --settings-file ./settings.env \ + --spawn-method vm +``` + +The manager handles kernel and disk image downloads automatically. + +## Containers and Kata + +If you deploy container-based blueprints, use a hardened runtime such as Kata Containers and follow Kubernetes best practices for least privilege. + +## Key and data safety + +- Store keystores on encrypted storage. +- Restrict filesystem permissions to the operator user. +- Avoid running the manager as root unless required for sandboxing. + +## Keystore management + +Create a dedicated ECDSA key for the operator and keep the keystore path consistent across services: + +```bash +cargo tangle key --algo ecdsa --keystore ./keystore --name operator +export BLUEPRINT_KEYSTORE_URI="$(pwd)/keystore" +``` + +The manager uses `--keystore-path`, and QoS uses `BLUEPRINT_KEYSTORE_URI` for heartbeat signing. Keep the keystore path stable and isolated per environment. + +## Dry-run safety + +For validation or benchmarking, `cargo tangle blueprint service spawn --dry-run` runs a service runtime without submitting default on-chain transactions (registration, results, heartbeats). Custom job logic can still submit transactions if it does so explicitly. This is not a production substitute for the manager. diff --git a/pages/operators/manager/setup.mdx b/pages/operators/manager/setup.mdx new file mode 100644 index 00000000..9327272f --- /dev/null +++ b/pages/operators/manager/setup.mdx @@ -0,0 +1,63 @@ +# Blueprint Manager Setup + +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/manager + +This page covers the operator flow for configuring and running the Blueprint Manager against Tangle's EVM protocol. + +## 1) Create a settings file + +The manager reads protocol addresses (and optional service scope) from `settings.env`: + +```bash +cat > settings.env <<'EOF' +BLUEPRINT_ID=123 +TANGLE_CONTRACT=0x... +RESTAKING_CONTRACT=0x... +STATUS_REGISTRY_CONTRACT=0x... +# SERVICE_ID=456 # optional; omit to follow all activations +EOF +``` + +The settings key is `RESTAKING_CONTRACT` for compatibility; it points to the staking contract. + +## 2) Start the manager + +```bash +cargo tangle blueprint run \ + --protocol tangle-evm \ + --http-rpc-url "$RPC_URL" \ + --ws-rpc-url "$WS_RPC_URL" \ + --keystore-path ./keystore \ + --settings-file ./settings.env +``` + +This process should run continuously. Use a process supervisor (systemd, docker, or Kubernetes) for restarts and health checks. + +## 3) Choose runtime preferences + +You can control how services are executed: + +- `--spawn-method` selects the preferred runtime (`native`, `vm`, `container`). +- `--vm` or `--no-vm` force or disable the VM sandbox. +- `--preferred-source` lets you override the blueprint's preferred source type. +- `--save-runtime-prefs` persists `PREFERRED_SOURCE` and `USE_VM` into `settings.env`. + +Example: + +```bash +cargo tangle blueprint run \ + --protocol tangle-evm \ + --http-rpc-url "$RPC_URL" \ + --ws-rpc-url "$WS_RPC_URL" \ + --keystore-path ./keystore \ + --settings-file ./settings.env \ + --spawn-method vm \ + --save-runtime-prefs +``` + +## 4) Data and cache directories + +- `--data-dir` controls the per-service working directory (defaults to `./data`). +- The manager maintains a cache for downloaded artifacts (defaults to `./cache`). + +Plan capacity based on the number of services you expect to host. See [Sizing and Capacity](/operators/manager/sizing). diff --git a/pages/operators/manager/sizing.mdx b/pages/operators/manager/sizing.mdx new file mode 100644 index 00000000..9a2bec6e --- /dev/null +++ b/pages/operators/manager/sizing.mdx @@ -0,0 +1,31 @@ +# Sizing and Capacity Planning + +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/manager + +Blueprints vary widely in resource needs. Use the guidance below as a starting point and adjust based on the specific blueprint workloads you operate. + +## Suggested tiers + +| Tier | Use case | vCPU | RAM | Storage | Notes | +| --------------- | ------------------------------------ | ---- | --------- | ------------ | ---------------------------- | +| Dev / Test | Local validation, dry runs | 2-4 | 8-16 GB | 50+ GB SSD | Single service, minimal load | +| Standard | Single blueprint, steady traffic | 8 | 32 GB | 200+ GB SSD | Good baseline for production | +| High Throughput | Multiple services or heavy workloads | 16+ | 64-128 GB | 500+ GB NVMe | Reserve headroom for spikes | + +## Storage planning + +- Allocate space for `data_dir` (per-service state) and the manager cache. +- Prefer SSD or NVMe for fast artifact download and startup time. +- Budget extra space for logs, metrics, and any blueprint-specific datasets. + +## Network planning + +- Stable HTTP and WebSocket RPC endpoints are required. +- Low latency improves job pickup time and QoS reporting. +- Ensure inbound connectivity for your `OPERATOR_RPC_ADDRESS`. + +## Scaling strategies + +- Start with one manager instance per host. +- Scale horizontally by running multiple operators on separate hosts and keystores. +- Prefer the VM sandbox for untrusted or high-risk blueprints (see [Sandboxing and Security](/operators/manager/security)). diff --git a/pages/operators/monitoring/_meta.ts b/pages/operators/monitoring/_meta.ts deleted file mode 100644 index 561e328d..00000000 --- a/pages/operators/monitoring/_meta.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - quickstart: "Quickstart", - prometheus: "Prometheus", - "alert-manager": "AlertManager", - grafana: "Grafana Dashboard", - loki: "Loki Log Manager", -}; - -export default meta; diff --git a/pages/operators/monitoring/alert-manager.mdx b/pages/operators/monitoring/alert-manager.mdx deleted file mode 100644 index cdd4c3c6..00000000 --- a/pages/operators/monitoring/alert-manager.mdx +++ /dev/null @@ -1,342 +0,0 @@ ---- -title: Alert Manager Setup -description: Create alerts to notify the team when issues arise. ---- - -import { Tabs, Tab } from "../../../components/Tabs"; -import Callout from "../../../components/Callout"; - -# Alert Manager Setup - -The following is a guide outlining the steps to setup AlertManager to send alerts when a Tangle node or DKG is being disrupted. If you do not have Tangle node setup yet, please -review the **Tangle Node Quickstart** setup guide [here](../node-basics/quickstart.mdx). - -In this guide we will configure the following modules to send alerts from a running Tangle node. - -- **Alert Manager** listens to Prometheus metrics and pushes an alert as soon as a threshold is crossed (CPU % usage for example). - -## What is Alert Manager? - -The Alertmanager handles alerts sent by client applications such as the Prometheus server. It takes care of deduplicating, grouping, -and routing them to the correct receiver integration such as email, PagerDuty, or OpsGenie. It also takes care of silencing and -inhibition of alerts. To learn more about Alertmanager, please -visit the official docs site [here](https://prometheus.io/docs/alerting/latest/alertmanager/). - -### Getting Started - -Start by downloading the latest releases of the [AlertManager](https://prometheus.io/docs/alerting/latest/alertmanager). - - - This guide assumes the user has root access to the machine running the Tangle node, and following the below steps inside that machine. As well as, - the user has already configured Prometheus on this machine. - - -**1. Download Alertmanager** - - - - - AMD version: - ```sh filename="AMD" copy - wget https://github.com/prometheus/alertmanager/releases/download/v0.24.0/alertmanager-0.24.0.darwin-amd64.tar.gz - ``` - ARM version: - ```sh filename="ARM" copy - wget https://github.com/prometheus/alertmanager/releases/download/v0.24.0/alertmanager-0.24.0.darwin-arm64.tar.gz - ``` - - - - - AMD version: - ```sh filename="AMD" copy - wget https://github.com/prometheus/alertmanager/releases/download/v0.24.0/alertmanager-0.24.0.linux-amd64.tar.gz - ``` - ARM version: - ```sh filename="ARM" copy - wget https://github.com/prometheus/alertmanager/releases/download/v0.24.0/alertmanager-0.24.0.linux-arm64.tar.gz && - ``` - - For other linux distrubutions visit the [Prometheus releases](https://github.com/prometheus/prometheus/releases). - - - - - AMD version: - ```sh filename="AMD" copy - wget https://github.com/prometheus/alertmanager/releases/download/v0.24.0/alertmanager-0.24.0.windows-amd64.tar.gz - ``` - ARM version: - ```sh filename="ARM" copy - wget https://github.com/prometheus/alertmanager/releases/download/v0.24.0/alertmanager-0.24.0.windows-arm64.tar.gz - ``` - - - - -**2. Extract the Downloaded Files:** - -Run the following command: - -```sh filename="tar" copy -tar xvf alertmanager-*.tar.gz -``` - -**3. Copy the Extracted Files into `/usr/local/bin`:** - - - **Note:** The example below makes use of the `linux-amd64` installations, please update to make use of the target system you have installed. - - -Copy the `alertmanager` binary and `amtool`: - -```sh filename="cp" copy -sudo cp ./alertmanager-*.linux-amd64/alertmanager /usr/local/bin/ && -sudo cp ./alertmanager-*.linux-amd64/amtool /usr/local/bin/ -``` - -**4. Create Dedicated Users:** - -Now we want to create dedicated users for the Alertmanager module we have installed: - -```sh filename="useradd" copy -sudo useradd --no-create-home --shell /usr/sbin/nologin alertmanager -``` - -**5. Create Directories for `Alertmanager`:** - -```sh filename="mkdir" copy -sudo mkdir /etc/alertmanager && -sudo mkdir /var/lib/alertmanager -``` - -**6. Change the Ownership for all Directories:** - -We need to give our user permissions to access these directories: - -**alertManager**: - -```sh filename="chown" copy -sudo chown alertmanager:alertmanager /etc/alertmanager/ -R && -sudo chown alertmanager:alertmanager /var/lib/alertmanager/ -R && -sudo chown alertmanager:alertmanager /usr/local/bin/alertmanager && -sudo chown alertmanager:alertmanager /usr/local/bin/amtool -``` - -**7. Finally, let's clean up these directories:** - -```sh filename="rm" copy -rm -rf ./alertmanager* -``` - -Great! You have now installed and setup your environment. The next series of steps will be configuring the service. - -## Configuration - -For implementation examples, [refer to our GitHub.](https://github.com/tangle-network/tangle/blob/7e1b017f7e8b05578192dd577b358e8a8acee9f7/deployment/README.md#L4). - -### Prometheus - -The first thing we need to do is add `rules.yml` file to our Prometheus configuration: - -Let's create the `rules.yml` file that will give the rules for Alert manager: - -```sh filename="nano" copy -sudo touch /etc/prometheus/rules.yml -sudo nano /etc/prometheus/rules.yml -``` - -We are going to create 2 basic rules that will trigger an alert in case the instance is down or the CPU usage crosses 80%. -You can create all kinds of rules that can triggered, [refer to our full list.](hhttps://github.com/tangle-network/tangle/blob/7e1b017f7e8b05578192dd577b358e8a8acee9f7/deployment/prometheus/rules.yml). - -Add the following lines and save the file: - -```sh filename="group" copy -groups: - - name: alert_rules - rules: - - alert: InstanceDown - expr: up == 0 - for: 5m - labels: - severity: critical - annotations: - summary: "Instance $labels.instance down" - description: "[{{ $labels.instance }}] of job [{{ $labels.job }}] has been down for more than 1 minute." - - - alert: HostHighCpuLoad - expr: 100 - (avg by(instance)(rate(node_cpu_seconds_total{mode="idle"}[2m])) * 100) > 80 - for: 0m - labels: - severity: warning - annotations: - summary: Host high CPU load (instance bLd Kusama) - description: "CPU load is > 80%\n VALUE = {{ $value }}\n LABELS: {{ $labels }}" -``` - -The criteria for triggering an alert are set in the `expr:` part. You can customize these triggers as you see fit. - -Then, check the rules file: - -```yaml filename="promtool rules" copy -promtool check rules /etc/prometheus/rules.yml -``` - -And finally, check the Prometheus config file: - -```yaml filename="promtool check" copy -promtool check config /etc/prometheus/prometheus.yml -``` - -### Gmail setup - -We can use a Gmail address to send the alert emails. For that, we will need to generate an app password from our Gmail account. - -Note: we recommend you here to use a dedicated email address for your alerts. [Review Google's own guide for -proper set-up](https://support.google.com/mail/answer/185833?hl=en). - -### Slack notifications - -We can also utilize Slack notifications to send the alerts through. For that we need to a specific Slack channel to send the notifications to, and -to install Incoming WebHooks Slack application. - -To do so, navigate to: - -1. Administration > Manage Apps. -2. Search for "Incoming Webhooks" -3. Install into your Slack workspace. - -### Alertmanager - -The Alert manager config file is used to set the external service that will be called when an alert is triggered. Here, we are going to use the Gmail and Slack notification created previously. - -Let's create the file: - -```sh filename="nano" copy -sudo touch /etc/alertmanager/alertmanager.yml -sudo nano /etc/alertmanager/alertmanager.yml -``` - -And add the Gmail configuration to it and save the file: - -```sh filename="Gmail config" copy -global: - resolve_timeout: 1m - -route: - receiver: 'gmail-notifications' - -receivers: -- name: 'gmail-notifications' - email_configs: - - to: 'EMAIL-ADDRESS' - from: 'EMAIL-ADDRESS' - smarthost: 'smtp.gmail.com:587' - auth_username: 'EMAIL-ADDRESS' - auth_identity: 'EMAIL-ADDRESS' - auth_password: 'EMAIL-ADDRESS' - send_resolved: true - - -# ******************************************************************************************************************************************** -# Alert Manager for Slack Notifications * -# ******************************************************************************************************************************************** - - global: - resolve_timeout: 1m - slack_api_url: 'INSERT SLACK API URL' - - route: - receiver: 'slack-notifications' - - receivers: - - name: 'slack-notifications' - slack_configs: - - channel: 'channel-name' - send_resolved: true - icon_url: https://avatars3.githubusercontent.com/u/3380462 - title: |- - [{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }} - {{- if gt (len .CommonLabels) (len .GroupLabels) -}} - {{" "}}( - {{- with .CommonLabels.Remove .GroupLabels.Names }} - {{- range $index, $label := .SortedPairs -}} - {{ if $index }}, {{ end }} - {{- $label.Name }}="{{ $label.Value -}}" - {{- end }} - {{- end -}} - ) - {{- end }} - text: >- - {{ range .Alerts -}} - *Alert:* {{ .Annotations.title }}{{ if .Labels.severity }} - `{{ .Labels.severity }}`{{ end }} - *Description:* {{ .Annotations.description }} - *Details:* - {{ range .Labels.SortedPairs }} • *{{ .Name }}:* `{{ .Value }}` - {{ end }} - {{ end }} -``` - -Of course, you have to change the email addresses and the auth_password with the one generated from Google previously. - -## Service Setup - -### Alert manager - -Create and open the Alert manager service file: - -```sh filename="create service" copy -sudo tee /etc/systemd/system/alertmanager.service > /dev/null << EOF -[Unit] - Description=AlertManager Server Service - Wants=network-online.target - After=network-online.target - -[Service] - User=alertmanager - Group=alertmanager - Type=simple - ExecStart=/usr/local/bin/alertmanager \ - --config.file /etc/alertmanager/alertmanager.yml \ - --storage.path /var/lib/alertmanager \ - --web.external-url=http://localhost:9093 \ - --cluster.advertise-address='0.0.0.0:9093' - -[Install] -WantedBy=multi-user.target -EOF -``` - -## Starting the Services - -Launch a daemon reload to take the services into account in systemd: - -```sh filename="daemon-reload" copy -sudo systemctl daemon-reload -``` - -Next, we will want to start the alertManager service: - -**alertManager**: - -```sh filename="start service" copy -sudo systemctl start alertmanager.service -``` - -And check that they are working fine: - -**alertManager**:: - -```sh filename="status" copy -sudo systemctl status alertmanager.service -``` - -If everything is working adequately, activate the services! - -**alertManager**: - -```sh filename="enable" copy -sudo systemctl enable alertmanager.service -``` - -Amazing! We have now successfully added alert monitoring for our Tangle node! diff --git a/pages/operators/monitoring/grafana.mdx b/pages/operators/monitoring/grafana.mdx deleted file mode 100644 index 126b90e9..00000000 --- a/pages/operators/monitoring/grafana.mdx +++ /dev/null @@ -1,193 +0,0 @@ ---- -title: Grafana Dashboard Setup -description: Create visual dashboards for the metrics captured by Prometheus. ---- - -import { Tabs, Tab } from "../../../components/Tabs"; -import Callout from "../../../components/Callout"; - -# Grafana Setup - -The following is a guide outlining the steps to setup Grafana Dashboard to visualize metric data for a Tangle node. If you do not have Tangle node setup yet, please -review the **Tangle Node Quickstart** setup guide [here](../node-basics/quickstart.mdx). - -In this guide we will configure the following modules to visualize metric data from a running Tangle node. - -- **Grafana** is the visual dashboard tool that we access from the outside (through SSH tunnel to keep the node secure). - -## What are Grafana Dashboards? - -A dashboard is a set of one or more panels organized and arranged into one or more rows. Grafana ships with a variety of panels making it easy to -construct the right queries, and customize the visualization so that you can create the perfect dashboard for your need. Each panel can interact -with data from any configured Grafana data source. To learn more about Grafana Dashboards, please -visit the official docs site [here](https://grafana.com/docs/grafana/latest/dashboards/). - -### Getting Started - -Let's first start by downloading the latest releases of the above mentioned modules (Grafana). - - - This guide assumes the user has root access to the machine running the Tangle node, and following the below steps inside that machine. As well as, - the user has already configured Prometheus on this machine. - - -**1. Download Grafana** - - - - - ```sh filename="brew" copy - brew update - brew install grafana - ``` - - - - - ```sh filename="linux" copy - sudo apt-get install -y apt-transport-https - sudo apt-get install -y software-properties-common wget - wget -q -O - https://packages.grafana.com/gpg.key | sudo apt-key add - - ``` - - For other linux distrubutions please visit official release page [here](https://grafana.com/grafana/download?edition=oss&platform=linux). - - - - -**2. Add Grafana repository to APT sources:** - - - This guide assumes the user is installing and configuring Grafana for a linux machine. For Macos instructions - please visit the offical docs [here](https://grafana.com/docs/grafana/v9.0/setup-grafana/installation/mac/). - - -```sh filename="add-apt" copy -sudo add-apt-repository "deb https://packages.grafana.com/oss/deb stable main" -``` - -**3. Refresh your APT cache to update your package lists:** - -```sh filename="apt update" copy -sudo apt update -``` - -**4. Next, make sure Grafana will be installed from the Grafana repository:** - -```sh filename="apt-cache" copy -apt-cache policy grafana -``` - -The output of the previous command tells you the version of Grafana that you are about to install, and where you will retrieve the package from. Verify that the installation candidate at the top of the list will come from the official Grafana repository at `https://packages.grafana.com/oss/deb`. - -```sh filename="output" -Output of apt-cache policy grafana -grafana: - Installed: (none) - Candidate: 6.3.3 - Version table: - 6.3.3 500 - 500 https://packages.grafana.com/oss/deb stable/main amd64 Packages -... -``` - -**5. You can now proceed with the installation:** - -```sh filename="install grafana" copy -sudo apt install grafana -``` - -**6. Install the Alert manager plugin for Grafana:** - -```sh filename="grafana-cli" copy -sudo grafana-cli plugins install camptocamp-prometheus-alertmanager-datasource -``` - -## Service Setup - -### Grafana - -The Grafana's service is automatically created during extraction of the deb package, you do not need to create it manually. - -Launch a daemon reload to take the services into account in systemd: - -```sh filename="daemon-reload" copy -sudo systemctl daemon-reload -``` - -**Start the Grafana service:** - -```sh filename="start service" copy -sudo systemctl start grafana-server -``` - -And check that they are working fine, one by one: - -```sh filename="status" copy -systemctl status grafana-server -``` - -If everything is working adequately, activate the services! - -```sh filename="enable" copy -sudo systemctl enable grafana-server -``` - -## Run Grafana dashboard - -Now we are going to setup the dashboard to visiualize the metrics we are capturing. - -From the browser on your local machine, navigate to `http://localhost:3000/login`. You should be greeted with -a login screen. You can login with the default credentials, `admin/admin`. Be sure to update your password afterwards. - - - This guide assumes the user has configured Prometheus, AlertManager, and Loki as a data source. - - -**Next, we need to add Prometheus as a data source.** - -1. Open the Settings menu -2. Select **Data Sources** -3. Select **Add Data Source** -4. Select Prometheus -5. Input the URL field with http://localhost:9090 -6. Click Save & Test - -**Next, we need to add AlertManager as a data source.** - -1. Open the Settings menu -2. Select **Data Sources** -3. Select **Add Data Source** -4. Select AlertManager -5. Input the URL field with http://localhost:9093 -6. Click Save & Test - -**Next, we need to add Loki as a data source.** - -1. Open the Settings menu -2. Select **Data Sources** -3. Select **Add Data Source** -4. Select Loki -5. Input the URL field with http://localhost:3100 -6. Click Save & Test - -We have our data sources connected, now its time to import the dashboard we want to use. You may -create your own or import others, but the purposes of this guide we will use the Polkadot Essentials dashboard created -by bLD nodes! - -**To import a dashboard:** - -1. Select the + button -2. Select **Import** -3. Input the dashboard number, **13840** -4. Select Prometheus and AlertManager as data sources from the dropdown menu -5. Click Load - -**In the dashboard selection, make sure you select:** - -- **Chain Metrics**: substrate -- **Chain Instance Host**: localhost:9615 to point the chain data scrapper -- **Chain Process Name**: the name of your node binary - -Congratulations!! You have now configured Grafana to visualize the metrics we are capturing. You now -have monitoring setup for your node! diff --git a/pages/operators/monitoring/loki.mdx b/pages/operators/monitoring/loki.mdx deleted file mode 100644 index 8c9024e3..00000000 --- a/pages/operators/monitoring/loki.mdx +++ /dev/null @@ -1,334 +0,0 @@ ---- -title: Loki Log Management -description: A service dedidated to aggregate and query system logs. ---- - -import { Tabs, Tab } from "../../../components/Tabs"; -import Callout from "../../../components/Callout"; - -# Loki Log Management - -The following is a guide outlining the steps to setup Loki for log management of a Tangle node. If you do not have Tangle node setup yet, please -review the **Tangle Node Quickstart** setup guide [here](../node-basics/quickstart.mdx). - -In this guide we will configure the following modules to scrape metrics from the running Tangle node. - -- **Loki** provides log aggregation system and metrics. [Download](https://grafana.com/docs/loki/latest/setup/install/) -- **Promtail** is the agent responsible for gathering logs, and sending them to Loki.[Download](https://grafana.com/docs/loki/latest/send-data/promtail/installation/) - -Let's first start by downloading the latest releases of the above mentioned modules (Loki, Promtail download pages). - - - This guide assumes the user has root access to the machine running the Tangle node, and following the below steps inside that machine. - - -**1. Download Loki** - - - - - AMD version: - ```sh filename="AMD" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/loki-darwin-amd64.zip" - ``` - ARM version: - ```sh filename="ARM" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/loki-darwin-arm64.zip" - ``` - - - - - AMD version: - ```sh filename="AMD" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/loki-linux-amd64.zip" - ``` - ARM version: - ```sh filename="ARM" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/loki-linux-arm64.zip" - ``` - - For other linux distrubutions, [visit the official Loki release page](https://github.com/grafana/loki/releases). - - - - - AMD version: - ```sh filename="AMD" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/loki-windows-amd64.exe.zip" - ``` - - - - -**2. Download Promtail** - - - - - AMD version: - ```sh filename="AMD" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/promtail-darwin-amd64.zip" - ``` - ARM version: - ```sh filename="ARM" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/promtail-darwin-arm64.zip" - ``` - - - - - AMD version: - ```sh filename="AMD" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/promtail-linux-amd64.zip" - ``` - ARM version: - ```sh filename="ARM" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/promtail-linux-arm64.zip" - ``` - - - - - AMD version: - ```sh filename="AMD" copy - curl -O -L "https://github.com/grafana/loki/releases/download/v2.7.0/promtail-windows-amd64.exe.zip" - ``` - - - - -**3. Extract the Downloaded Files:** - -```sh filename="unzip" copy -unzip "loki-linux-amd64.zip" && -unzip "promtail-linux-amd64.zip" -``` - -**4. Copy the Extracted Files into `/usr/local/bin`:** - -```sh filename="cp" copy -sudo cp loki-linux-amd64 /usr/local/bin/ && -sudo cp promtail-linux-amd64 /usr/local/bin/ -``` - -**5. Create Dedicated Users:** - -Now we want to create dedicated users for each of the modules we have installed: - -```sh filename="useradd" copy -sudo useradd --no-create-home --shell /usr/sbin/nologin loki && -sudo useradd --no-create-home --shell /usr/sbin/nologin promtail -``` - -**6. Create Directories for `loki`, and `promtail`:** - -```sh filename="mkdir" copy -sudo mkdir /etc/loki && -sudo mkdir /etc/promtail -``` - -**7. Change the Ownership for all Directories:** - -We need to give our user permissions to access these directories: - -```sh filename="chown" copy -sudo chown loki:loki /usr/local/bin/loki-linux-amd64 && -sudo chown promtail:promtail /usr/local/bin/promtail-linux-amd64 -``` - -**9. Finally, let's clean up these directories:** - -```sh filename="rm" copy -rm -rf ./loki-linux-amd64* && -rm -rf ./promtail-linux-amd64* -``` - -The next series of steps will be configuring each service. - -## Configuration - -For implementation examples, [refer to our GitHub.](https://github.com/tangle-network/tangle/blob/7e1b017f7e8b05578192dd577b358e8a8acee9f7/deployment/README.md#L4). - -### Loki - -Loki's configuration details what ports to listen to, how to store the logs, and other configuration options. -There are many other config options for Loki, [read more.](https://grafana.com/docs/loki/latest/configuration/) - -Let's create the file: - -```sh filename="nano" copy -sudo touch /etc/loki/config.yml -sudo nano /etc/loki/config.yml -``` - -```yaml filename="config.yaml" copy -auth_enabled: false - -server: - http_listen_port: 3100 - grpc_listen_port: 9096 - -ingester: - lifecycler: - address: 127.0.0.1 - ring: - kvstore: - store: inmemory - replication_factor: 1 - final_sleep: 0s - chunk_idle_period: 5m - chunk_retain_period: 30s - max_transfer_retries: 0 - -schema_config: - configs: - - from: 2020-10-24 - store: boltdb-shipper - object_store: filesystem - schema: v11 - index: - prefix: index_ - period: 168h - - -storage_config: - boltdb: - directory: /data/loki/index - - filesystem: - directory: /data/loki/chunks - -limits_config: - enforce_metric_name: false - reject_old_samples: true - reject_old_samples_max_age: 168h - -chunk_store_config: - max_look_back_period: 0s - -table_manager: - retention_deletes_enabled: false - retention_period: 0 -``` - -### Promtail - -The Promtail configuration details what logs to send to Loki. In the below configuration we are indicating -to send the logs to Loki from the `/var/log/dkg` directory. This directory can be changed based on what logs you -want to pick up. There are many other config options for Promtail, refer to the [Promtail documentation](https://grafana.com/docs/loki/latest/send-data/promtail/configuration/#configure-promtail) - -Let's create the file: - -```sh filename="nano" copy -sudo touch /etc/promtail/config.yml -sudo nano /etc/promtail/config.yml -``` - -```yaml filename="config.yaml" copy -server: - http_listen_port: 9080 - grpc_listen_port: 0 - -positions: - filename: /data/loki/positions.yaml - -clients: - - url: http://localhost:3100/loki/api/v1/push - -scrape_configs: -- job_name: system - static_configs: - - targets: - - localhost - labels: - job: varlogs - __path__: /var/log/dkg/*log -``` - -## Service Setup - -### Loki - -Create and open the Loki service file: - -```sh filename="loki.service" copy -sudo tee /etc/systemd/system/loki.service > /dev/null << EOF -[Unit] - Description=Loki Service - Wants=network-online.target - After=network-online.target - -[Service] - User=loki - Group=loki - Type=simple - ExecStart=/usr/local/bin/loki-linux-amd64 -config.file /etc/loki/config.yml - -[Install] -WantedBy=multi-user.target -EOF -``` - -### Promtail - -Create and open the Promtail service file: - -```sh filename="promtail.service" copy -sudo tee /etc/systemd/system/promtail.service > /dev/null << EOF -[Unit] - Description=Promtail Service - Wants=network-online.target - After=network-online.target - -[Service] - User=promtail - Group=promtail - Type=simple - ExecStart=/usr/local/bin/promtail-linux-amd64 -config.file /etc/promtail/config.yml - -[Install] -WantedBy=multi-user.target -EOF -``` - -Great! You have now configured all the services needed to run Loki. - -## Starting the Services - -Launch a daemon reload to take the services into account in systemd: - -```sh filename="daemon-reload" copy -sudo systemctl daemon-reload -``` - -Next, we will want to start each service: - -```sh filename="start service" copy -sudo systemctl start loki.service && -sudo systemctl start promtail.service -``` - -And check that they are working fine, one by one: - -**loki**: - -```sh filename="status" copy -systemctl status loki.service -``` - -**promtail**: - -```sh filename="status" copy -systemctl status promtail.service -``` - -If everything is working adequately, activate the services! - -```sh filename="enable" copy -sudo systemctl enable loki.service && -sudo systemctl enable promtail.service -``` - -Amazing! You have now successfully configured Loki for log management. Check out the Grafana -documentation to create a Loki log dashboard! diff --git a/pages/operators/monitoring/prometheus.mdx b/pages/operators/monitoring/prometheus.mdx deleted file mode 100644 index b9b6befc..00000000 --- a/pages/operators/monitoring/prometheus.mdx +++ /dev/null @@ -1,435 +0,0 @@ ---- -title: Prometheus Setup -description: Setup Prometheus for scraping node metrics and more. ---- - -import { Tabs, Tab } from "../../../components/Tabs"; -import Callout from "../../../components/Callout"; - -# Prometheus Setup - -The following is a guide outlining the steps to setup Prometheus to monitor a Tangle node. If you do not have Tangle node setup yet, please -review the **Tangle Node Quickstart** setup guide [here](../node-basics/quickstart.mdx). It is important to note that -this guide's purpose is to help you get started with monitoring your Tangle node, not to advise on how to setup a node securely. Please -take additional security and privacy measures into consideration. - -In this guide we will configure the following modules to scrape metrics from the running Tangle node. - -- **Prometheus** is the central module; it pulls metrics from different sources to provide them to the Grafana dashboard and Alert Manager. -- **Node exporter** provides hardware metrics of the dashboard. -- **Process exporter** provides processes metrics for the dashboard (optional). - -## What is Prometheus? - -Prometheus is an open-source systems monitoring and alerting toolkit originally built at SoundCloud. Since its inception in 2012, -many companies and organizations have adopted Prometheus, and the project has a very active developer and user community. -It is now a standalone open source project and maintained independently of any company. To learn more about Prometheus, please -visit the official docs site [here](https://prometheus.io/docs/introduction/overview/). - -### Getting Started - -Let's first start by downloading the latest releases of the above mentioned modules (Prometheus, Process exporter, and Node exporter). - - - This guide assumes the user has root access to the machine running the Tangle node, and following the below steps inside that machine. - - -**1. Download Prometheus** - - - - - AMD version: - ```sh filename="AMD" copy - wget https://github.com/prometheus/prometheus/releases/download/v2.40.3/prometheus-2.40.3.darwin-amd64.tar.gz - ``` - ARM version: - ```sh filename="ARM" copy - wget https://github.com/prometheus/prometheus/releases/download/v2.40.3/prometheus-2.40.3.darwin-arm64.tar.gz - ``` - - - - - AMD version: - ```sh filename="AMD" copy - wget https://github.com/prometheus/prometheus/releases/download/v2.40.3/prometheus-2.40.3.linux-amd64.tar.gz - ``` - ARM version: - ```sh filename="ARM" copy - wget https://github.com/prometheus/prometheus/releases/download/v2.40.3/prometheus-2.40.3.linux-arm64.tar.gz - ``` - - For other linux distrubutions please visit official release page [here](https://github.com/prometheus/prometheus/releases). - - - - - AMD version: - ```sh filename="AMD" copy - wget https://github.com/prometheus/prometheus/releases/download/v2.40.3/prometheus-2.40.3.windows-amd64.tar.gz - ``` - ARM version: - ```sh filename="ARM" copy - wget https://github.com/prometheus/prometheus/releases/download/v2.40.3/prometheus-2.40.3.windows-arm64.tar.gz - ``` - - - - -**2. Download Node Exporter** - - - - - AMD version: - ```sh filename="AMD" copy - wget https://github.com/prometheus/node_exporter/releases/download/v1.40.0/node_exporter-1.4.0.darwin-amd64.tar.gz - ``` - ARM version: - ```sh filename="ARM" copy - wget https://github.com/prometheus/node_exporter/releases/download/v1.40.0/node_exporter-1.4.0.darwin-arm64.tar.gz - ``` - - - - - AMD version: - ```sh filename="AMD" copy - wget https://github.com/prometheus/node_exporter/releases/download/v1.40.0/node_exporter-1.4.0.linux-amd64.tar.gz - ``` - ARM version: - ```sh filename="ARM" copy - wget https://github.com/prometheus/node_exporter/releases/download/v1.40.0/node_exporter-1.4.0.linux-arm64.tar.gz - ``` - - For other linux distrubutions please visit official release page [here](https://github.com/prometheus/node_exporter/releases). - - - - -**3. Download Process Exporter** - - - - - AMD version: - ```sh filename="AMD" copy - wget https://github.com/ncabatoff/process-exporter/releases/download/v0.7.10/process-exporter-0.7.10.linux-amd64.tar.gz - ``` - ARM version: - ```sh filename="ARM" copy - wget https://github.com/ncabatoff/process-exporter/releases/download/v0.7.10/process-exporter-0.7.10.linux-arm64.tar.gz - ``` - - For other linux distrubutions please visit official release page [here](https://github.com/ncabatoff/process-exporter/releases). - - - - -**4. Extract the Downloaded Files:** - -Run the following command: - -```sh filename="tar" copy -tar xvf prometheus-*.tar.gz && -tar xvf node_exporter-*.tar.gz && -tar xvf process-exporter-*.tar.gz -``` - -**5. Copy the Extracted Files into `/usr/local/bin`:** - - - **Note:** The example below makes use of the `linux-amd64` installations, please update to make use of the target system you have installed. - - -We are first going to copy the `prometheus` binary: - -```sh filename="cp" copy -sudo cp ./prometheus-*.linux-amd64/prometheus /usr/local/bin/ -``` - -Next, we are going to copy over the `prometheus` console libraries: - -```sh filename="cp" copy -sudo cp -r ./prometheus-*.linux-amd64/consoles /etc/prometheus && -sudo cp -r ./prometheus-*.linux-amd64/console_libraries /etc/prometheus -``` - -We are going to do the same with `node-exporter` and `process-exporter`: - -```sh filename="cp" copy -sudo cp ./node_exporter-*.linux-amd64/node_exporter /usr/local/bin/ && -sudo cp ./process-exporter-*.linux-amd64/process-exporter /usr/local/bin/ -``` - -**6. Create Dedicated Users:** - -Now we want to create dedicated users for each of the modules we have installed: - -```sh filename="useradd" copy -sudo useradd --no-create-home --shell /usr/sbin/nologin prometheus && -sudo useradd --no-create-home --shell /usr/sbin/nologin node_exporter && -sudo useradd --no-create-home --shell /usr/sbin/nologin process-exporter -``` - -**7. Create Directories for `Prometheus`, and `Process exporter`:** - -```sh filename="mkdir" copy -sudo mkdir /var/lib/prometheus && -sudo mkdir /etc/process-exporter -``` - -**8. Change the Ownership for all Directories:** - -We need to give our user permissions to access these directories: - -**prometheus**: - -```sh filename="chown" copy -sudo chown prometheus:prometheus /etc/prometheus/ -R && -sudo chown prometheus:prometheus /var/lib/prometheus/ -R && -sudo chown prometheus:prometheus /usr/local/bin/prometheus -``` - -**node_exporter**: - -```sh filename="chwon" copy -sudo chown node_exporter:node_exporter /usr/local/bin/node_exporter -``` - -**process-exporter**: - -```sh filename="chown" copy -sudo chown process-exporter:process-exporter /etc/process-exporter -R && -sudo chown process-exporter:process-exporter /usr/local/bin/process-exporter -``` - -**9. Finally, let's clean up these directories:** - -```sh filename="rm" copy -rm -rf ./prometheus* && -rm -rf ./node_exporter* && -rm -rf ./process-exporter* -``` - -Great! You have now installed and setup your environment. The next series of steps will be configuring each service. - -## Configuration - -If you are interested to see how we configure the Tangle Network nodes for monitoring [check out the Tangle Network deployment README](https://github.com/tangle-network/tangle/blob/7e1b017f7e8b05578192dd577b358e8a8acee9f7/deployment/README.md#L4) - -### Prometheus - -Let"s edit the Prometheus config file and add all the modules in it: - -```sh filename="nano" copy -sudo nano /etc/prometheus/prometheus.yml -``` - -Add the following code to the file and save: - -```yaml filename="promtheus.yml" copy -global: - scrape_interval: 15s - evaluation_interval: 15s - -rule_files: - - 'rules.yml' - -alerting: - alertmanagers: - - static_configs: - - targets: - - localhost:9093 - -scrape_configs: - - job_name: "prometheus" - scrape_interval: 5s - static_configs: - - targets: ["localhost:9090"] - - job_name: "substrate_node" - scrape_interval: 5s - static_configs: - - targets: ["localhost:9615"] - - job_name: "node_exporter" - scrape_interval: 5s - static_configs: - - targets: ["localhost:9100"] - - job_name: "process-exporter" - scrape_interval: 5s - static_configs: - - targets: ["localhost:9256"] -``` - -- **scrape_interval** defines how often Prometheus scrapes targets, while evaluation_interval controls how often the software will evaluate rules. -- **rule_files** set the location of Alert manager rules we will add next. -- **alerting** contains the alert manager target. -- **scrape_configs** contain the services Prometheus will monitor. - -You can notice the first scrap where Prometheus monitors itself. - -### Process exporter - -Process exporter needs a config file to be told which processes they should take into account: - -```sh filename="nano" copy -sudo touch /etc/process-exporter/config.yml -sudo nano /etc/process-exporter/config.yml -``` - -Add the following code to the file and save: - -```sh filename="config.yml" copy -process_names: - - name: "{{.Comm}}" - cmdline: - - '.+' -``` - -## Service Setup - -### Prometheus - -Create and open the Prometheus service file: - -```sh filename="promtheus.service" copy -sudo tee /etc/systemd/system/prometheus.service > /dev/null << EOF -[Unit] - Description=Prometheus Monitoring - Wants=network-online.target - After=network-online.target - -[Service] - User=prometheus - Group=prometheus - Type=simple - ExecStart=/usr/local/bin/prometheus \ - --config.file /etc/prometheus/prometheus.yml \ - --storage.tsdb.path /var/lib/prometheus/ \ - --web.console.templates=/etc/prometheus/consoles \ - --web.console.libraries=/etc/prometheus/console_libraries - ExecReload=/bin/kill -HUP $MAINPID - -[Install] - WantedBy=multi-user.target -EOF -``` - -### Node exporter - -Create and open the Node exporter service file: - -```sh filename="node_exporter.service" copy -sudo tee /etc/systemd/system/node_exporter.service > /dev/null << EOF -[Unit] - Description=Node Exporter - Wants=network-online.target - After=network-online.target - -[Service] - User=node_exporter - Group=node_exporter - Type=simple - ExecStart=/usr/local/bin/node_exporter - -[Install] - WantedBy=multi-user.target -EOF -``` - -### Process exporter - -Create and open the Process exporter service file: - -```sh filename="process-exporter.service" copy -sudo tee /etc/systemd/system/process-exporter.service > /dev/null << EOF -[Unit] - Description=Process Exporter - Wants=network-online.target - After=network-online.target - -[Service] - User=process-exporter - Group=process-exporter - Type=simple - ExecStart=/usr/local/bin/process-exporter \ - --config.path /etc/process-exporter/config.yml - -[Install] -WantedBy=multi-user.target -EOF -``` - -## Starting the Services - -Launch a daemon reload to take the services into account in systemd: - -```sh filename="deamon-reload" copy -sudo systemctl daemon-reload -``` - -Next, we will want to start each service: - -**prometheus**: - -```sh filename="start serive" copy -sudo systemctl start prometheus.service -``` - -**node_exporter**: - -```sh filename="start serive" copy -sudo systemctl start node_exporter.service -``` - -**process-exporter**: - -```sh filename="start serive" copy -sudo systemctl start process-exporter.service -``` - -And check that they are working fine: - -**prometheus**: - -```sh filename="status" copy -systemctl status prometheus.service -``` - -**node_exporter**: - -```sh filename="status" copy -systemctl status node_exporter.service -``` - -**process-exporter**: - -```sh filename="status" copy -systemctl status process-exporter.service -``` - -If everything is working adequately, activate the services! - -**prometheus**: - -```sh filename="enable" copy -sudo systemctl enable prometheus.service -``` - -**node_exporter**: - -```sh filename="enable" copy -sudo systemctl enable node_exporter.service -``` - -**process-exporter**: - -```sh filename="enable" copy -sudo systemctl enable process-exporter.service -``` - -Amazing! We have now completely setup our Prometheus monitoring and are scraping metrics from our -running Tangle node. - -You can view those metrics on the Prometheus dashboard by going to `http://localhost:9090/metrics` ! diff --git a/pages/operators/monitoring/quickstart.mdx b/pages/operators/monitoring/quickstart.mdx deleted file mode 100644 index f290bf18..00000000 --- a/pages/operators/monitoring/quickstart.mdx +++ /dev/null @@ -1,60 +0,0 @@ ---- -title: Quickstart -description: Creating monitoring stack for Tangle node. ---- - -import { Tabs, Tab } from "../../../components/Tabs"; -import Callout from "../../../components/Callout"; -import ExpandableImage from "../../../components/ExpandableImage"; - -# Monitoring Tangle Node - -The following is a guide outlining the steps to setup monitoring for an Tangle node. If you do not have Tangle node setup yet, please -review the **How to run an Tangle node** setup guide [here](../node-basics/quickstart.mdx). It is important to note that -this guide's purpose is to help you get started with monitoring your Tangle node, not to advise on how to setup a node securely. Please -take additional security and privacy measures into consideration. - -Here is how our final configuration will look like at the end of this guide. - -- **Prometheus** is the central module; it pulls metrics from different sources to provide them to the Grafana dashboard and Alert Manager. -- **Grafana** is the visual dashboard tool that we access from the outside (through SSH tunnel to keep the node secure). -- **Alert Manager** listens to Prometheus metrics and pushes an alert as soon as a threshold is crossed (CPU % usage for example). -- **Tangle Node** natively provides metrics for monitoring. -- **Process exporter** provides processes metrics for the dashboard (optional). -- **Loki** provides log aggregation system and metrics. -- **Promtail** is the agent responsible for gathering logs, and sending them to Loki. - - - Running the monitoring stack requires that you are already running the tangle network node with at least the following ports exports: - - Prometheus : `https://localhost:9615` - - -## Docker usage - -The quickest way to setup monitoring for your node is to use our provided `docker-compose` file. The docker image starts all the above monitoring -tools with the exception of `Node exporter`. `node-exporter` is ommitted since some metrics are not available when running inside a docker container. - -Follow the instructions [here](./prometheus.mdx) to start the prometheus node exporter. - -### Prerequisites - -Before starting the monitoring stack, ensure the configs are setup correctly, - -- (Optional) Set the `__SLACK_WEBHOOK_URL__` in `alertmanager.yml` to receive slack alerts -- Ensure the promtail mount path matches your log directory - -Note : All containers require connection to the localhost, this behaviour is different in Linux/Windows/Mac, the configs within the `docker-compose` and yml -files assume a linux environment. Refer [this](https://stackoverflow.com/questions/24319662/from-inside-of-a-docker-container-how-do-i-connect-to-the-localhost-of-the-mach) to make necessary adjustments for your environment. - -### Usage - -**To start the monitoring stack, run:** - -```sh filename="compose up" copy -cd monitoring -docker compose up -d -``` - -You can then navigate to `http://localhost:3000` to access the Grafana dashboard! - - diff --git a/pages/operators/node-basics/_meta.ts b/pages/operators/node-basics/_meta.ts deleted file mode 100644 index caa6cf0e..00000000 --- a/pages/operators/node-basics/_meta.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - quickstart: "Quickstart", - hardware: "Hardware", - "node-software": "Tangle Software", - systemd: "Deploy with systemd", - "docker-node": "Deploy with Docker", - flags: "Flags", - troubleshooting: "Troubleshooting", - faq: "FAQs", -}; - -export default meta; diff --git a/pages/operators/node-basics/docker-node.mdx b/pages/operators/node-basics/docker-node.mdx deleted file mode 100644 index 58c7347e..00000000 --- a/pages/operators/node-basics/docker-node.mdx +++ /dev/null @@ -1,274 +0,0 @@ ---- -title: Deploying with Docker -description: Deploy a Tangle node with only a few steps using Docker. ---- - -import Callout from "../../../components/Callout"; -import { Tabs } from 'nextra/components'; - -# Deploying a Tangle Network Node with Docker - -A Tangle Network node can be spun up quickly using Docker. **This guide covers both Full Node and Validator Node deployment.** . For more information on installing Docker, -please visit the official Docker [docs](https://docs.docker.com/get-docker). Make sure that your system meets the requirements which can read [here](/hardware.mdx). - -## Setup the Docker Environment - -The quickest and easiest way to get started is to make use of our published Docker Tangle image. In doing so, users simply pull down the image from ghcr.io, -set their keys, fetch the applicable chainspec and run the start command to get up and running. - -### 1. Pull the Tangle Docker image: - -```sh filename="pull" copy -# Only use "main" if you know what you are doing, it will use the latest and maybe unstable version of the node. - -docker pull ghcr.io/tangle-network/tangle/tangle:main -``` - -### 2. Create a local directory to store the chain data: - -Let us create a directory where we will store all the data for our node. This includes the chain data, and logs. - -```sh filename="mkdir" copy -mkdir /var/lib/tangle/ -``` - -### 3. Select and Start your Node Type - - - -**4. Start Tangle full node:** - -**Note:** Full nodes do not participate in block production or consensus so no required keys are necessary. - -To start the node run the following command: - -```sh filename="docker run" copy -docker run --rm -it -v /var/lib/tangle/:/data ghcr.io/tangle-network/tangle/tangle:main \ - --chain tangle-mainnet \ - --name="YOUR-NODE-NAME" \ - --base-path /data \ - --rpc-cors all \ - --port 9946 \ - --telemetry-url "wss://telemetry.polkadot.io/submit/ 1" -``` - -Once Docker pulls the necessary images, your Tangle node will start, displaying lots of information, -such as the chain specification, node name, role, genesis state, and more. - -If you followed the installation instructions for Tangle, once synced, you will be connected to peers and see -blocks being produced on the Tangle network! Note that in this case you need to also sync to the Polkadot/Kusama -relay chain, which might take a few days. - - - - -### Generate and store keys: - -We need to generate the required keys for our node. -The keys we need to generate include the following: - -- Role key (Ecdsa) -- Babe key (Sr25519) -- Account key (Sr25519) -- Grandpa key (Ed25519) -- ImOnline key (Sr25519) - -Let's now insert our required secret keys, we will not pass the SURI in the command, instead it will be interactive, where you -should paste your SURI when the command asks for it. - -**Account Keys** - -```sh filename="Acco" copy - -docker run --rm -it --platform linux/amd64 --network="host" \ -ghcr.io/tangle-network/tangle/tangle:main \ - key insert --base-path /var/lib/tangle/ \ - --chain tangle-mainnet \ - --scheme Sr25519 \ - --key-type acco -``` - -**Babe Keys** - -```sh filename="Babe" copy -docker run --rm -it --platform linux/amd64 --network="host" \ -ghcr.io/tangle-network/tangle/tangle:main \ - key insert --base-path /var/lib/tangle/ \ - --chain tangle-mainnet \ - --scheme Sr25519 \ - --key-type babe -``` - -**Im-online Keys** - **these keys are optional (required if you are running as a validator)** - -```sh filename="Imonline" copy -docker run --rm -it --platform linux/amd64 --network="host" \ -ghcr.io/tangle-network/tangle/tangle:main \ - key insert --base-path /var/lib/tangle/ \ - --chain tangle-mainnet \ - --scheme Sr25519 \ - --key-type imon -``` - -**Role Keys** - -```sh filename="Role" copy -docker run --rm -it --platform linux/amd64 --network="host" \ -ghcr.io/tangle-network/tangle/tangle:main \ - key insert --base-path /data \ - --chain tangle-mainnet \ - --scheme Ecdsa \ - --key-type role -``` - -**Grandpa Keys** - -```sh filename="Grandpa" copy -docker run --rm -it --platform linux/amd64 --network="host" \ -ghcr.io/tangle-network/tangle/tangle:main \ - key insert --base-path /data \ - --chain tangle-mainnet \ - --scheme Ed25519 \ - --key-type gran -``` - -To ensure you have successfully generated the keys correctly run: - -```sh filename="ls" copy -ls ~/data/validator//chains/tangle-mainnet/keystore/ -# You should see a some file(s) there, these are the keys. -``` - -**Caution:** Ensure you insert the keys using the instructions for your node at [generate keys](#generate-and-store-keys) The key autogeneration feature is removed for mainnet releases. The `--auto-insert-keys` is deprecated and you should manually generate and manage your keys. - -### 5. Start Tangle Validator node: - -To start the node run the following command: - -```sh filename="docker run" copy -docker run --platform linux/amd64 --network="host" \ -ghcr.io/tangle-network/tangle/tangle:main \ ---base-path=/data \ ---chain tangle-mainnet \ ---name="YOUR-NODE-NAME" \ ---execution wasm \ ---wasm-execution compiled \ ---trie-cache-size 0 \ ---validator \ ---telemetry-url "wss://telemetry.polkadot.io/submit/ 1" -``` - -Once Docker pulls the necessary images, your Tangle node will start, displaying lots of information, -such as the chain specification, node name, role, genesis state, and more. - -If you followed the installation instructions for Tangle, once synced, you will be connected to peers and see -blocks being produced on the Tangle network! - -```sh filename="logs" -2023-03-22 14:55:51 Tangle Standalone Node -2023-03-22 14:55:51 ✌️ version 0.1.15-54624e3-aarch64-macos -2023-03-22 14:55:51 ❤️ by Webb Technologies Inc., 2017-2023 -2023-03-22 14:55:51 📋 Chain specification: Tangle Mainnet -2023-03-22 14:55:51 🏷 Node name: cooing-morning-2891 -2023-03-22 14:55:51 👤 Role: FULL -2023-03-22 14:55:51 💾 Database: RocksDb at /Users/local/Library/Application Support/tangle/chains/local_testnet/db/full -2023-03-22 14:55:51 ⛓ Native runtime: tangle-115 (tangle-1.tx1.au1) -2023-03-22 14:55:51 Bn254 x5 w3 params -2023-03-22 14:55:51 [0] 💸 generated 5 npos voters, 5 from validators and 0 nominators -2023-03-22 14:55:51 [0] 💸 generated 5 npos targets -2023-03-22 14:55:51 [0] 💸 generated 5 npos voters, 5 from validators and 0 nominators -2023-03-22 14:55:51 [0] 💸 generated 5 npos targets -2023-03-22 14:55:51 [0] 💸 new validator set of size 5 has been processed for era 1 -2023-03-22 14:55:52 🔨 Initializing Genesis block/state (state: 0xfd16…aefd, header-hash: 0x7c05…a27d) -2023-03-22 14:55:52 👴 Loading GRANDPA authority set from genesis on what appears to be first startup. -2023-03-22 14:55:53 Using default protocol ID "sup" because none is configured in the chain specs -2023-03-22 14:55:53 🏷 Local node identity is: 12D3KooWDaeXbqokqvEMqpJsKBvjt9BUz41uP9tzRkYuky1Wat7Z -2023-03-22 14:55:53 💻 Operating system: macos -2023-03-22 14:55:53 💻 CPU architecture: aarch64 -2023-03-22 14:55:53 📦 Highest known block at #0 -2023-03-22 14:55:53 〽️ Prometheus exporter started at 127.0.0.1:9615 -2023-03-22 14:55:53 Running JSON-RPC HTTP server: addr=127.0.0.1:9933, allowed origins=["http://localhost:*", "http://127.0.0.1:*", "https://localhost:*", "https://127.0.0.1:*", "https://polkadot.js.org"] -2023-03-22 14:55:53 Running JSON-RPC WS server: addr=127.0.0.1:9944, allowed origins=["http://localhost:*", "http://127.0.0.1:*", "https://localhost:*", "https://127.0.0.1:*", "https://polkadot.js.org"] -2023-03-22 14:55:53 discovered: 12D3KooWMr4L3Dun4BUyp23HZtLfxoQjR56dDp9eH42Va5X6Hfgi /ip4/192.168.0.125/tcp/30304 -2023-03-22 14:55:53 discovered: 12D3KooWNHhcCUsZTdTkADmDJbSK9YjbtscHHA8R4jvrbGwjPVez /ip4/192.168.0.125/tcp/30305 -2023-03-22 14:55:53 discovered: 12D3KooWMr4L3Dun4BUyp23HZtLfxoQjR56dDp9eH42Va5X6Hfgi /ip4/192.168.88.12/tcp/30304 -2023-03-22 14:55:53 discovered: 12D3KooWNHhcCUsZTdTkADmDJbSK9YjbtscHHA8R4jvrbGwjPVez /ip4/192.168.88.12/tcp/30305 -``` - -### Run via Docker Compose - -The docker-compose file will spin up a container running Tangle standalone node, but you have to set the following environment variables. Remember to customize your the values depending on your environment and then copy paste this to CLI. - -```sh filename="set variables" copy -RELEASE_VERSION=main -CHAINSPEC_PATH=/tmp/chainspec/ -``` - -After that run: - -```sh filename="compose up" copy -docker compose up -d -``` - - - - -### Update the Client - -As Tangle development continues, it will sometimes be necessary to upgrade your node software. Node operators will be notified -on our Discord channel when upgrades are available and whether they are necessary (some client upgrades are optional). -The upgrade process is straightforward and is the same for a full node. - -1. Stop the docker container: - -```sh filename="docker stop" copy -sudo docker stop `CONTAINER_ID` -``` - -2. Get the latest version of Tangle from the [Tangle GitHub Release](https://github.com/tangle-network/tangle/pkgs/container/tangle%2Ftangle) - -3. Pull the latest version of Tangle binary by doing `docker pull ghcr.io/tangle-network/tangle/tangle:{VERSION_CODE}`. - Example, if the latest version of Tangle is v0.1.2, then the command would be `docker pull ghcr.io/tangle-network/tangle/tangle:v0.1.12` - -4. Restart the tangle container and you should have the updated version of the client. - -### Purge Your Node - -If you need a fresh instance of your Tangle node, you can purge your node by removing the associated data directory. - -You'll first need to stop the Docker container: - -```sh filename="docker stop" copy -sudo docker stop `CONTAINER_ID` -``` - -If you did not use the `-v` flag to specify a local directory for storing your chain data when you spun up your node, then the data folder is related to the Docker container itself. Therefore, removing the Docker container will remove the chain data. - -If you did spin up your node with the `-v` flag, you will need to purge the specified directory. For example, for the suggested data directly, you can run the following command to purge your parachain node data: - -```sh filename="rm" copy -# purges standalone data -sudo rm -rf /data/chains/* -``` - -Now that your chain data has been purged, you can start a new node with a fresh data directory! - -## Logs - -If you'd like to run the node with verbose logs, you may add the following arguments during initial setup. Adjust the target for the desired logging level (debug | error | info| trace | warn): - -```bash --ldkg=debug \ --ldkg_metadata=debug \ --lruntime::offchain=debug \ --ldkg_proposal_handler=debug \ --ldkg_proposals=debug -``` - -## Begin Validating - -Now that your node is setup, [continue onto our Validator guides to understand token bonding and more.](../validator/introduction.mdx). - -## Support and Questions - -Visit our [Discord's validator channel](https://discord.com/invite/cv8EfJu3Tn) for community assistance. diff --git a/pages/operators/node-basics/faq.mdx b/pages/operators/node-basics/faq.mdx deleted file mode 100644 index 6088ebc4..00000000 --- a/pages/operators/node-basics/faq.mdx +++ /dev/null @@ -1,101 +0,0 @@ -# Validator FAQ: Frequently Asked Questions - -## Where can I get help? - -Our documentation at [https://docs.tangle.tools](/) has the most up-to-date information, and you can ask questions at our Discord, the best place to get assistance with your node or other questions about the project. - -## How do I stay up to date? - -All upgrades and important technical information are announced on Discord and Twitter, in the #tangle-network channel. - -## What are the hardware requirements? - -See [Hardware](./hardware.mdx) and [Getting Started with Validating](../validator/introduction.mdx) for more information. - -## What about backup nodes? - -We recommend that you run two machines with the same specifications, in different countries and service providers to decentralize and make your services more robust. If your primary fails you can quickly resume services on your backup and continue to produce blocks and earn rewards. Please refer to the Q&A on failovers below. - -## What are the different networks? - -There are two networks, each will require dedicated hardware. The Tangle Testnet is free and should be used to familiarize yourself with the setup. See [Resources for more details.](/resources.mdx) - -## What ports do I allow on my firewall? - -The only ports that need to be open for incoming traffic are those designated for P2P. - -**Default Ports for a Tangle Full-Node:** - -| Description | Port | -| ----------- | ----------- | -| P2P | 30333 (TCP) | -| RPC | 9933 | -| WS | 9944 | -| Prometheus | 9615 | - -## Is there a binary? - -Yes, [see our Releases page.](https://github.com/tangle-network/tangle/releases) - -## What are the recommendations for monitoring my node? - -Monitoring is critical for success as a node operator. See our full [Monitoring guide](../monitoring/quickstart.mdx) - -## What are the KPIs I should be monitoring? - -The main key performance indicator for a node operator is the number of blocks produced. The Prometheus metric for this is called `substrate_proposer_block_constructed_count`. - -## How should I set up alerting? - -Alerting is critical for your success as a node operator, see our full guide to using our recommended [AlertManager](../monitoring/alert-manager.mdx) - -## What is the failover process if my primary node is down? - -When the primary server is down, the best way to perform a failover to the backup server is to perform a key association update. Each server should have a unique set of keys already. Run the setKeys author mapping extrinsic. You can follow the Mapping Extrinsic instructions and modify the instructions to use the setKeys extrinsic. - -## What should I look for in the logs? - -For full support, see our guides on [Logging](../monitoring/loki.mdx). - -## How much stake do I need to become a validator in the active set? - -Per era, the NPoS system selects a certain number of nodes with the most TNT to validate. Therefore, the minimum amount required to become an active nominator and earn rewards may change from era to era. You can check the active validator set's stake at [https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/staking](https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/staking) - -## How do I set an identity on my account? - -Setting an identity on-chain will help to identify your node and attract delegations. You can set an identity by following the instructions on [Polkadot's Identity Documentation](https://wiki.polkadot.network/docs/learn-identity) - -## How to move validator to another machine? - -To move your validator to another machine, follow these steps: - -1. Stop the validator node on the current machine. -2. Copy the keystore file containing your validator's keys to the new machine. -3. Install the necessary software and dependencies on the new machine. -4. Configure the validator node on the new machine, ensuring that you point to the copied keystore file. -5. Start the validator node on the new machine. - -## How to start a new validator with old account? - -To start a new validator with an existing account, you need to: - -1. Set up a new machine with the necessary hardware and software requirements. -2. Copy the keystore file containing your validator's keys to the new machine. -3. Configure the validator node on the new machine, ensuring that you point to the copied keystore file. -4. Start the validator node on the new machine. - -Your new validator will be associated with your existing account. - -## How to stop validating? - -To stop validating, follow these steps: - -1. Unbond your validator using the chill extrinsic in the Staking module. This will initiate the unbonding process. -2. Wait for the unbonding period to complete (usually 28 days). -3. Once the unbonding period is over, you can stop your validator node and safely shut down the machine. -4. Your staked funds will be available for withdrawal after the unbonding period. - -## How are validators elected or chosen to be in the active set on Tangle? - -The election algorithm is complex, but relies on stake and the number of nominations - it is not simply who has the most tokens. -[See the Polkadot SDK wiki for more information.](https://wiki.polkadot.network/docs/learn-phragmen#what-is-the-sequential-phragm%C3%A9n-method) diff --git a/pages/operators/node-basics/flags.mdx b/pages/operators/node-basics/flags.mdx deleted file mode 100644 index e15eacdc..00000000 --- a/pages/operators/node-basics/flags.mdx +++ /dev/null @@ -1,94 +0,0 @@ ---- -title: Flags -description: Describes the flags necessary to run and customize a Tangle node. ---- - -import Callout from '/components/Callout'; - -# Flags and Subcommands - -Setting up a Tangle Network node involves various flags to configure its operation. This guide elucidates the commonly used flags and provides instructions on how to view the complete list. - -### Networking: - -- `--port`: Define the TCP port for peer-to-peer protocols. -- `--rpc-port`: Unified port for both HTTP and WS connections. -- `--in-peers`: Limit on accepted incoming connections (Default: 25). -- `--out-peers`: Limit on maintained outgoing connections (Default: 25). - -### Execution: - -- `--execution`: Choose the execution strategy for all contexts based on the runtime compilation: - - - `native`: Use only the native build. - - `wasm`: Use only the Wasm build. - - `both`: Use both native and Wasm builds. - - `nativeelsewasm`: Use native; if it fails, use Wasm. - -- `--wasm-execution`: Method for executing Wasm runtime code: - - `compiled`: Uses the Wasmtime compiled runtime (default). - - `interpreted-i-know-what-i-do`: Uses the wasmi interpreter. - -### State & Database: - -- `--state-pruning`: Define the state pruning mode: - - - `archive`: Retain the full state of all blocks. - - ``: Retain state only for a specified number of blocks. - -- `--trie-cache-size`: Set the internal state cache size. -- `--db-cache`: Limit the database cache's memory usage. Recommended: 50% of server RAM. - -### File Paths & Chain Spec: - -- `--base-path`: Path where chain data resides. -- `--chain`: Chain specification to use; can be a file path. - -### Telemetry & Naming: - -- `--name`: Assign a name to the node for telemetry. -- `--telemetry-url`: URL for the telemetry server. Can specify multiple URLs. - -### Ethereum Compatibility (Frontier): - -- `--eth-log-block-cache`: Limit for the LRU cache size for block data (Default: 300,000,000). -- `--eth-statuses-cache`: Limit for the LRU cache size for transaction statuses (Default: 300,000,000). - -### Syncing: - -- `--sync`: Configure the blockchain syncing mode: - - `full`: Download and validate the full blockchain history. - - `fast`: Download blocks without execution and get the latest state with proofs. - - `fast-unsafe`: As 'fast', but without downloading state proofs. - - `warp`: Download only the latest state and proof. - -## Accessing All Flags - -To see a full list of flags: - -### Using Docker: - -Confirm the path and image with your image name: - -``` -docker run --network="host" -v "/var/lib/data:/data" --u $(id -u ${USER}):$(id -g ${USER}) -ghcr.io/tangle-network/tangle/tangle:main --help -``` - -### Using Systemd: - -If you used the binary directly: - -`./tangle-YOUR-VERSION-HERE> --help` - -If you compiled the binary: - -`./target/release/tangle-YOUR-VERSION-HERE> --help` - - -Currently, your release may be one the following. -Refer to [the Releases page on our Github for more information](https://github.com/tangle-network/tangle/releases): -- tangle-testnet-linux-amd64 -- tangle-txpool-linux-amd64 - diff --git a/pages/operators/node-basics/hardware.mdx b/pages/operators/node-basics/hardware.mdx deleted file mode 100644 index 9e4dd0f7..00000000 --- a/pages/operators/node-basics/hardware.mdx +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: Hardware Requirements -description: An overview of Tangle Network hardware requirements. ---- - -import { Tabs, Tab } from "/components/Tabs"; -import Callout from "/components/Callout"; - -# Hardware - -The current Tangle testnet is a standalone network, meaning that it is not connected to the Polkadot or Kusama relay chain. -Since the Tangle is not a parachain, the size of nodes are quite a small build as it only contains code to run the standalone Tangle network and not syncing -the relay chain or communicate between the two. As such, the build is smaller, and does not require the same minumum spec requirements as a parachain node. - -The following specifications are the ideal or recommended, but nodes can be run with less. Testnet nodes have also been run using AWS t3.Large instances. - -| Component | Requirements | -| --------- | ------------------------------------------------------------------------------------------------------ | -| CPU | Intel(R) Core(TM) i7-7700K CPU @ 4.20GHz | -| Storage | An NVMe solid state drive of 500 GB (As it should be reasonably sized to deal with blockchain growth). | -| Memory | 32GB ECC | -| Firewall | P2P port must be open to incoming traffic:
    - Source: Any
    - Destination: 30333, 30334 TCP | - -### Running Ports - -As stated before, the standalone nodes will listen on multiple ports. The default Substrate ports are used in the standalone chain. - -The only ports that need to be open for incoming traffic are those designated for P2P. - -**Default Ports for a Tangle Full-Node:** - -| Description | Port | -| ----------- | ----------- | -| P2P | 30333 (TCP) | -| RPC | 9933 | -| WS | 9944 | -| Prometheus | 9615 | diff --git a/pages/operators/node-basics/node-software.mdx b/pages/operators/node-basics/node-software.mdx deleted file mode 100644 index 41c52571..00000000 --- a/pages/operators/node-basics/node-software.mdx +++ /dev/null @@ -1,63 +0,0 @@ ---- -title: Node Software and Binaries -description: An overview of Tangle Network software and how to run a node. ---- - -import { Tabs, Tab } from "/components/Tabs"; -import Callout from "/components/Callout"; - -# Running a Tangle Node - -This guide provides an overview of the Tangle Network software and instructions on how to run a node. - -## Prerequisites - -Before running a Tangle node, ensure that your machine meets the following requirements: - -- Operating System: Linux (recommended), macOS, or Windows -- Hardware: Sufficient CPU, RAM, and storage capacity to synchronize and store the blockchain data - -## Installing the Tangle Node Binary - -To install the Tangle node binary, follow these steps: - -1. Download the latest release binary from the [Tangle releases page](https://github.com/tangle-network/tangle/releases). The current latest version is 1.0.0. - - ```sh filename="Get binary" copy - wget https://github.com/tangle-network/tangle/releases/download/v1.0.0/tangle-default-linux-amd64 - - ``` - -2. Make the downloaded binary executable - `chmod +x tangle-default-linux-amd64` - -## Running the Tangle Node - -To start your Tangle node and begin synchronizing with the network, run the following command: -`./tangle-default-linux-amd64` - -Your node will start synchronizing with the Tangle Network. Once the synchronization process is complete, your node will be fully operational. - -### Running a Node with SystemD or Docker - -For production environments, it is recommended to run your Tangle node using a process manager like SystemD or Docker. This ensures that your node runs consistently and automatically restarts in case of any issues. - -Please refer to the guides in sidebar for instructions on running a Tangle node with SystemD or Docker. - -## Default Ports for a Tangle Node - -The following table lists the default ports used by a Tangle node: - -## Feature Flags - -The Tangle node software includes several feature flags that enable additional functionality. Here are some notable feature flags: - -- `txpool`: Enables transaction tracing and debugging for EVM transactions. -- `relayer`: Starts an embedded transaction relayer for transaction relaying and data querying. -- `light-client`: Starts an embedded light client for syncing EVM data on Tangle. - -To build the Tangle node with specific feature flags enabled, use the following command: - -`cargo build --release --features ` - -Replace `` with the desired feature flag(s) you want to enable. diff --git a/pages/operators/node-basics/quickstart.mdx b/pages/operators/node-basics/quickstart.mdx deleted file mode 100644 index 1ff776ae..00000000 --- a/pages/operators/node-basics/quickstart.mdx +++ /dev/null @@ -1,63 +0,0 @@ ---- -title: Node Operator Quickstart -description: Participate in the Tangle ecosystem by deploying a Tangle node to validate transactions on the Tangle Network mainnet. ---- - -import { QuickDeployArea, DeployArea, SupportArea, MonitoringArea } from "../../../components/TangleQuickstart" - -# Node Operator Quickstart - -Becoming a node operator on the Tangle Network requires some technical skills, trust, and support from the community. Below -is a collection of quick links for quick setups! - -### Key Mainnet Details - -- Native token: `TNT` -- Decimals: 18 -- Chain ID: `5845` -- RPC endpoint: `https://rpc.tangle.tools` -- WSS endpoint: `wss://rpc.tangle.tools` - -# Tangle Network Mainnet Node Quickstart - -This guide provides a quickstart for anyone looking to run a Tangle Network node and participate in mainnet. Before following this guide, please ensure your machine meets the [hardware requirements](./hardware.mdx) and has the necessary dependencies installed. - -## 1. Update to Latest Node Release - -Visit the [Tangle releases page](https://github.com/tangle-network/tangle/releases) and download the latest version of the `tangle` binary for your operating system. For example, for release v1.0.0 on Linux you would run: - -`wget https://github.com/tangle-network/tangle/releases/download/v1.0.0/tangle-linux-amd64` - -Make the downloaded binary executable: - -`chmod +x tangle-default-linux-amd64` - -## 2. Start the Node - -To start your node and connect it to the Tangle mainnet, run: - -``` -./tangle-default-linux-amd64 - ---base-path \ - ---chain tangle-mainnet.json - ---name - ---validator - ---telemetry-url "wss://telemetry.polkadot.io/submit/ 1" -``` - -Replace `` with the directory where your node's data will be stored, and `` with a unique name to identify your node. - -## 3. Stake TNT to Validate - -To be eligible as a validator, you will need to stake Tangle's native token TNT. More details on the minimum staking requirements and how to stake will be provided closer to mainnet genesis. - -Please see our new **[Start Validating guide.](../validator/introduction.mdx)** - -## Monitoring - -Monitoring your node is critical. Refer to the [monitoring docs](../monitoring/quickstart.mdx) for instructions on setting up monitoring. diff --git a/pages/operators/node-basics/systemd.mdx b/pages/operators/node-basics/systemd.mdx deleted file mode 100644 index 8a72f322..00000000 --- a/pages/operators/node-basics/systemd.mdx +++ /dev/null @@ -1,413 +0,0 @@ ---- -title: Systemd Node Operation -description: Run a Tangle full node or Validator node using systemd. ---- - -import { Tabs } from 'nextra/components' - -# Running with Systemd - -You can run your **full** or **validator** node as a systemd process so that it will automatically restart on server reboots or crashes, helping to avoid getting slashed. This guide now includes additional steps for setting up dependencies and Rust configuration, ensuring a smoother setup process. - -Before following this guide, ensure that your machine's environment is set up and the Tangle binary is compiled. If you haven't done so, please refer to the [Requirements](./hardware.mdx) page. - -## Setup - -### 1. Fetch the Tangle Network Binary - -Use the latest release version in the url in place of ``, you can visit [releases](https://github.com/tangle-network/tangle/releases) page to view the latest info. - -``` -wget wget https://github.com/tangle-network/tangle/releases/download//tangle-linux-amd64 -``` - -For example, at the time of writing this document, the latest release is v0.6.1 and the link would be as follows - -``` -wget https://github.com/tangle-network/tangle/releases/download/v0.6.1/tangle-mainnet-linux-amd64 -``` - -### 2. Install Dependencies - -Ensure all necessary dependencies are installed: - -```sh -sudo apt update && sudo apt upgrade -y -sudo apt install curl iptables build-essential git wget jq make gcc nano tmux htop nvme-cli pkg-config libssl-dev libleveldb-dev libgmp3-dev tar clang bsdmainutils ncdu unzip llvm libudev-dev make protobuf-compiler -y -``` - -### 3. Install and Configure Rust - -```sh -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -# choose option 1 -source $HOME/.cargo/env -rustup default nightly -rustup update -rustup update nightly -rustup target add wasm32-unknown-unknown --toolchain nightly -``` - -### 4. Select and Run Your Node Type - - - - - #### Generate node key file - - ```sh filename="node-key" copy - ./target/release/tangle key generate-node-key \ - --file /node-key - ``` - - To ensure you have successfully generated the key correctly run: - - ```sh filename="ls" copy - ls /node-key - ``` - - #### Create the Service Configuration File - - Run the following commands to create the service configuration file: - - ```sh filename="mv" copy - # Move the tangle binary to the bin directory (assumes you are in repo root directory) - sudo mv ./target/release/tangle /usr/bin/ - ``` - - Add the following contents to the service configuration file. Make sure to replace the **USERNAME** with the username you created in the previous step, add your own node name, and update - any paths or ports to your own preference. - - **Note:** The below configuration assumes you are targeting the Tangle Network chainspec. - - **Full Node Service Configuration File** - - ```sh filename="full.service" copy - sudo tee /etc/systemd/system/full.service > /dev/null << EOF - [Unit] - Description=Tangle Full Node - After=network-online.target - StartLimitIntervalSec=0 - - [Service] - User= - Restart=always - RestartSec=3 - ExecStart=/usr/bin/tangle \ - --base-path \ - --name \ - --chain tangle-mainnet \ - --node-key-file "/node-key" \ - --rpc-cors all \ - --port 9946 \ - --no-mdns \ - --telemetry-url "wss://telemetry.polkadot.io/submit/ 1" - - [Install] - WantedBy=multi-user.target - EOF - ``` - - #### Enable the services - - After ensuring the config is correctly written to /etc/systemd/system/full.service, enable and start the service: - - ```sh filename="enable service" copy - sudo systemctl daemon-reload - sudo systemctl enable full - sudo systemctl start full - ``` - **Check the Status of the Service** - ```sh filename="status" copy - sudo systemctl status full - ``` - You should see the node connecting to the network and syncing with the latest blocks. - - **Tail the Latest Outputs** - ```sh filename="logs" copy - sudo journalctl -u full.service -f - ``` - - #### Network sync - - After a full node is started, it will start syncing with the current chain state. Depending on the size of the chain when you do this, this step may take anywhere from a few minutes to a few hours. - - Example of node sync : - - ```sh filename="output after synced" copy - 2021-06-17 03:07:39 🔍 Discovered new external address for our node: /ip4/10.26.16.1/tcp/30333/ws/p2p/12D3KooWLtXFWf1oGrnxMGmPKPW54xWCHAXHbFh4Eap6KXmxoi9u - 2021-06-17 03:07:40 ⚙️ Syncing 218.8 bps, target=#5553764 (17 peers), best: #24034 (0x08af…dcf5), finalized #23552 (0xd4f0…2642), ⬇ 173.5kiB/s ⬆ 12.7kiB/s - 2021-06-17 03:07:45 ⚙️ Syncing 214.8 bps, target=#5553765 (20 peers), best: #25108 (0xb272…e800), finalized #25088 (0x94e6…8a9f), ⬇ 134.3kiB/s ⬆ 7.4kiB/s - 2021-06-17 03:07:50 ⚙️ Syncing 214.8 bps, target=#5553766 (21 peers), best: #26182 (0xe7a5…01a2), finalized #26112 (0xcc29…b1a9), ⬇ 5.0kiB/s ⬆ 1.1kiB/s - 2021-06-17 03:07:55 ⚙️ Syncing 138.4 bps, target=#5553767 (21 peers), best: #26874 (0xcf4b…6553), finalized #26624 (0x9dd9…27f8), ⬇ 18.9kiB/s ⬆ 2.0kiB/s - 2021-06-17 03:08:00 ⚙️ Syncing 37.0 bps, target=#5553768 (22 peers), best: #27059 (0x5b73…6fc9), finalized #26624 (0x9dd9…27f8), ⬇ 14.3kiB/s ⬆ 4.4kiB/s - ``` - - - - - #### Generate and Store Keys - - We need to generate the required keys for our node. For more information on these keys, please see the [Required Keys](https://wiki.polkadot.network/docs/learn-cryptography) section. - The keys we need to generate include the following: - - - Role key (Ecdsa) - - Babe key (Sr25519) - - Account key (Sr25519) - - Grandpa key (Ed25519) - - ImOnline key (Sr25519) - - Let's now insert our required secret keys, we will not pass the SURI in the command, instead it will be interactive, where you - should paste your SURI when the command asks for it. - - **Account Keys** - - ```sh filename="Acco" copy - - ./target/release/tangle key insert --base-path \ - --chain tangle-mainnet \ - --scheme Sr25519 \ - --suri <"12-MNEMONIC-PHRASE"> \ - --key-type acco - ``` - - **Babe Keys** - - ```sh filename="Babe" copy - - ./target/release/tangle key insert --base-path \ - --chain tangle-mainnet \ - --scheme Sr25519 \ - --suri <"12-MNEMONIC-PHRASE"> \ - --key-type babe - ``` - - **Im-online Keys** - **these keys are optional** - - ```sh filename="Imonline" copy - - ./target/release/tangle key insert --base-path \ - --chain tangle-mainnet \ - --scheme Sr25519 \ - --suri <"12-MNEMONIC-PHRASE"> \ - --key-type imon - ``` - - **Role Keys** - - ```sh filename="Role" copy - - ./target/release/tangle key insert --base-path \ - --chain tangle-mainnet \ - --scheme Ecdsa \ - --suri <"12-MNEMONIC-PHRASE"> \ - --key-type role - ``` - - **Grandpa Keys** - - ```sh filename="Grandpa" copy - - ./target/release/tangle key insert --base-path \ - --chain tangle-mainnet \ - --scheme Ed25519 \ - --suri <"12-MNEMONIC-PHRASE"> \ - --key-type gran - ``` - - **Node key** - - ```sh filename="node-key" copy - ./target/release/tangle key generate-node-key \ - --file /node-key - ``` - - To ensure you have successfully generated the keys correctly run: - - ```sh filename="ls" copy - ls /chains/tangle-mainnet/keystore/ - # You should see a some file(s) there, these are the keys. - ``` - - ## System service setup - - Run the following commands to create the service configuration file: - - ```sh filename="mv" copy - # Move the tangle binary to the bin directory (assumes you are in repo root directory) - sudo mv ./target/release/tangle /usr/bin/ - ``` - - Add the following contents to the service configuration file. Make sure to replace the **USERNAME** with the username you created in the previous step, add your own node name, and update any paths or ports to your own preference. - - **Note:** The below configuration assumes you are targeting the Tangle Network chainspec. - - **Caution:** Ensure you insert the keys using the instructions for your node.[generate keys](#generate-and-store-keys) - The key autogeneration feature is removed for mainnet releases. The `--auto-insert-keys` is deprecated and you should manually generate and manage your keys. - - **Validator Node** - - ```sh filename="validator.service" copy - sudo tee /etc/systemd/system/validator.service > /dev/null << EOF - [Unit] - Description=Tangle Validator Node - After=network-online.target - StartLimitIntervalSec=0 - - [Service] - User= - Restart=always - RestartSec=3 - ExecStart=/usr/bin/tangle \ - --base-path \ - --name \ - --chain tangle-mainnet \ - --node-key-file "/node-key" \ - --port 30333 \ - --validator \ - --no-mdns \ - --telemetry-url "wss://telemetry.polkadot.io/submit/ 1" - - [Install] - WantedBy=multi-user.target - EOF - ``` - - #### Enable Validator Node - - Double check that the config has been written to `/etc/systemd/system/validator.service` correctly. - If so, enable the service so it runs on startup, and then try to start it now: - - ```sh filename="enable service" copy - sudo systemctl daemon-reload - sudo systemctl enable validator - sudo systemctl start validator - ``` - - Check the status of the service: - - ```sh filename="status" copy - sudo systemctl status validator - ``` - - You should see the node connecting to the network and syncing the latest blocks. - If you need to tail the latest output, you can use: - - ```sh filename="logs" copy - sudo journalctl -u validator.service -f - ``` - - If the node is running correctly, you should see an output similar to below: - - ```sh filename="output" - 2023-03-22 14:55:51 Tangle Standalone Node - 2023-03-22 14:55:51 ✌️ version 0.1.15-54624e3-aarch64-macos - 2023-03-22 14:55:51 ❤️ by Webb Technologies Inc., 2017-2023 - 2023-03-22 14:55:51 📋 Chain specification: Tangle Mainnet - 2023-03-22 14:55:51 🏷 Node name: cooing-morning-2891 - 2023-03-22 14:55:51 👤 Role: FULL - 2023-03-22 14:55:51 💾 Database: RocksDb at /Users/local/Library/Application Support/tangle/chains/local_testnet/db/full - 2023-03-22 14:55:51 ⛓ Native runtime: tangle-115 (tangle-1.tx1.au1) - 2023-03-22 14:55:51 Bn254 x5 w3 params - 2023-03-22 14:55:51 [0] 💸 generated 5 npos voters, 5 from validators and 0 nominators - 2023-03-22 14:55:51 [0] 💸 generated 5 npos targets - 2023-03-22 14:55:51 [0] 💸 generated 5 npos voters, 5 from validators and 0 nominators - 2023-03-22 14:55:51 [0] 💸 generated 5 npos targets - 2023-03-22 14:55:51 [0] 💸 new validator set of size 5 has been processed for era 1 - 2023-03-22 14:55:52 🔨 Initializing Genesis block/state (state: 0xfd16…aefd, header-hash: 0x7c05…a27d) - 2023-03-22 14:55:52 👴 Loading GRANDPA authority set from genesis on what appears to be first startup. - 2023-03-22 14:55:53 Using default protocol ID "sup" because none is configured in the chain specs - 2023-03-22 14:55:53 🏷 Local node identity is: 12D3KooWDaeXbqokqvEMqpJsKBvjt9BUz41uP9tzRkYuky1Wat7Z - 2023-03-22 14:55:53 💻 Operating system: macos - 2023-03-22 14:55:53 💻 CPU architecture: aarch64 - 2023-03-22 14:55:53 📦 Highest known block at #0 - 2023-03-22 14:55:53 〽️ Prometheus exporter started at 127.0.0.1:9615 - 2023-03-22 14:55:53 Running JSON-RPC HTTP server: addr=127.0.0.1:9933, allowed origins=["http://localhost:*", "http://127.0.0.1:*", "https://localhost:*", "https://127.0.0.1:*", "https://polkadot.js.org"] - 2023-03-22 14:55:53 Running JSON-RPC WS server: addr=127.0.0.1:9944, allowed origins=["http://localhost:*", "http://127.0.0.1:*", "https://localhost:*", "https://127.0.0.1:*", "https://polkadot.js.org"] - 2023-03-22 14:55:53 discovered: 12D3KooWMr4L3Dun4BUyp23HZtLfxoQjR56dDp9eH42Va5X6Hfgi /ip4/192.168.0.125/tcp/30304 - 2023-03-22 14:55:53 discovered: 12D3KooWNHhcCUsZTdTkADmDJbSK9YjbtscHHA8R4jvrbGwjPVez /ip4/192.168.0.125/tcp/30305 - 2023-03-22 14:55:53 discovered: 12D3KooWMr4L3Dun4BUyp23HZtLfxoQjR56dDp9eH42Va5X6Hfgi /ip4/192.168.88.12/tcp/30304 - 2023-03-22 14:55:53 discovered: 12D3KooWNHhcCUsZTdTkADmDJbSK9YjbtscHHA8R4jvrbGwjPVez /ip4/192.168.88.12/tcp/30305 - ``` - - #### Network sync - - After a validator node is started, it will start syncing with the current chain state. Depending on the size of the chain when you do this, this step may take anywhere from a few minutes to a few hours. - - Example of node sync : - - ```sh filename="output after synced" copy - 2021-06-17 03:07:39 🔍 Discovered new external address for our node: /ip4/10.26.16.1/tcp/30333/ws/p2p/12D3KooWLtXFWf1oGrnxMGmPKPW54xWCHAXHbFh4Eap6KXmxoi9u - 2021-06-17 03:07:40 ⚙️ Syncing 218.8 bps, target=#5553764 (17 peers), best: #24034 (0x08af…dcf5), finalized #23552 (0xd4f0…2642), ⬇ 173.5kiB/s ⬆ 12.7kiB/s - 2021-06-17 03:07:45 ⚙️ Syncing 214.8 bps, target=#5553765 (20 peers), best: #25108 (0xb272…e800), finalized #25088 (0x94e6…8a9f), ⬇ 134.3kiB/s ⬆ 7.4kiB/s - 2021-06-17 03:07:50 ⚙️ Syncing 214.8 bps, target=#5553766 (21 peers), best: #26182 (0xe7a5…01a2), finalized #26112 (0xcc29…b1a9), ⬇ 5.0kiB/s ⬆ 1.1kiB/s - 2021-06-17 03:07:55 ⚙️ Syncing 138.4 bps, target=#5553767 (21 peers), best: #26874 (0xcf4b…6553), finalized #26624 (0x9dd9…27f8), ⬇ 18.9kiB/s ⬆ 2.0kiB/s - 2021-06-17 03:08:00 ⚙️ Syncing 37.0 bps, target=#5553768 (22 peers), best: #27059 (0x5b73…6fc9), finalized #26624 (0x9dd9…27f8), ⬇ 14.3kiB/s ⬆ 4.4kiB/s - ``` - - #### Bond TNT and setup validator Account - - After your node is synced, you are ready to setup keys and onboard as a validator, make sure to complete the steps - at [Start Validating](../validator/introduction.mdx) to start validating. - - - - - #### Generate node key file - - ```sh filename="node-key" copy - ./target/release/tangle key generate-node-key \ - --file /node-key - ``` - - To ensure you have successfully generated the key correctly run: - - ```sh filename="ls" copy - ls /node-key - ``` - - The following is the service configuration file, use this while completing the Full Node guide. - **Note:** To run with evm trace, you should use a binary built with `txpool` flag, refer to [Binaries](./node-software.mdx#binaries) page for more details. - - ```sh filename="full.service" copy - sudo tee /etc/systemd/system/full.service > /dev/null << EOF - [Unit] - Description=Tangle Full Node - After=network-online.target - StartLimitIntervalSec=0 - - [Service] - User= - Restart=always - RestartSec=3 - ExecStart=/usr/bin/tangle \ - --base-path \ - --name \ - --chain tangle-mainnet \ - --node-key-file "/node-key" \ - --rpc-cors all \ - --port 9946 \ - --no-mdns --ethapi trace,debug,txpool - - [Install] - WantedBy=multi-user.target - EOF - ``` - - - - - -Congratulations! You have officially setup a Tangle Network node using Systemd. - -## Monitoring - -To setup monitoring for your node, please refer to the [monitoring](../monitoring/quickstart.mdx) page. - -## Begin Validating - -Now that your node is setup, [continue onto our Validator guides to understand token bonding and more.](../validator/introduction.mdx) - -## Support and Questions - -Visit our [Discord's validator channel](https://discord.com/invite/cv8EfJu3Tn) for community assistance. diff --git a/pages/operators/node-basics/troubleshooting.mdx b/pages/operators/node-basics/troubleshooting.mdx deleted file mode 100644 index b7432605..00000000 --- a/pages/operators/node-basics/troubleshooting.mdx +++ /dev/null @@ -1,83 +0,0 @@ ---- -title: Troubleshooting -description: Provides a series of suggestive fixes that are common issues when starting a Tangle node. ---- - -import Callout from '/components/Callout'; - -## Troubleshooting - - -### Logs - -If you'd like to run the node with verbose logs, you may add the following arguments during initial setup. Adjust the target for the desired logging level (debug | error | info| trace | warn): - -```bash -RUST_LOG=runtime=debug ./target/release/ --dev -``` - - - -### P2P Ports Not Open - -If you don't see an "Imported" message (without the [Relaychain] tag), check the P2P port configuration. Ensure the P2P port is open to incoming traffic. - -### In Sync - -Both chains must be in sync at all times. Look for "Imported" or "Idle" messages and ensure you have connected peers. - -### Genesis Mismatching - -If you notice log messages like: - -```bash -DATE [Relaychain] Bootnode with peer id ID is on a different chain (our genesis: 0x3f5... theirs: 0x45j...) -``` - -You may be running an older version and need to upgrade. - -### Troubleshooting for Apple Silicon users - -#### Homebrew and PATH Configuration - -If you haven't installed Homebrew: https://brew.sh/ - -```bash -/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)" -``` - -Make sure Homebrew is up-to-date, install openssl - -```bash -brew update -brew install openssl -``` - -After installation, ensure you've added /opt/homebrew/bin to your PATH: - -```bash -echo 'export PATH=/opt/homebrew/bin:$PATH' >> ~/.bash_profile -``` - -#### Dependencies - -1. GMP: Ensure the gmp dependency is correctly installed. - -```bash -brew install gmp -``` - -2. If you're still facing an issue with gmp, adjust your path to the gmp lib: - -```bash -cargo clean -export LIBRARY_PATH=$LIBRARY_PATH:$(brew --prefix)/lib:$(brew --prefix)/opt/gmp/lib -``` - -Add the above export to your bash_profile as well. - -3. Ensure the Protobuf dependency is correctly installed: - -```bash -brew install protobuf -``` diff --git a/pages/operators/operator/join_operator/join.mdx b/pages/operators/operator/join_operator/join.mdx index 5a2bd4ee..20b06231 100644 --- a/pages/operators/operator/join_operator/join.mdx +++ b/pages/operators/operator/join_operator/join.mdx @@ -1,41 +1,67 @@ ## Operators -Operators are noderunners that have tokens at stake and choose to restake them to enable participation in roles, which conduct the jobs produced by a blueprint instance. In esssence, Operators are service providers whose effectiveness and security is guarenteed by their restaked assets. +Operators are service providers who stake assets to run blueprint services. Staking gives customers and developers an enforceable security boundary (exposure + slashing) around off-chain work. ### Joining as an Operator -To participate in restaking, a user can join as an operator by providing a bond amount through the ``join_operators function. This registers the user as an operator and locks their bond, which is necessary for participating in the network and receiving rewards. +This page covers operator onboarding for Tangle v2. You need to: -### Step 1: Access the PolkadotJS Interface +1. Register with the staking contract (self-stake). +2. Register for each blueprint you intend to operate. +3. Run the Blueprint Manager to execute jobs. -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. +- Next: [Blueprint Manager](/operators/manager/introduction) -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: +### Step 1: Install the CLI and prepare a keystore -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer +Install `cargo-tangle` and create/import an ECDSA key: -### Step 2: Join as an Operator +- [CLI installation](/developers/cli/installation) +- [Key management](/developers/cli/keys) -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. +### Step 2: Register as a staking operator -![PolkadotJS Extrinsics](./images/extrinsic.png) +Use the CLI to stake and register in `MultiAssetDelegation` (amount is in wei of the configured bond asset): -- Under the **MultiAssetDelegation** section, select **Join Operators** and enter the bond amount. +```bash +cargo tangle operator register \ + --http-rpc-url https://... \ + --ws-rpc-url wss://... \ + --keystore-path ./keystore \ + --tangle-contract 0x... \ + --restaking-contract 0x... \ + --status-registry-contract 0x... \ + --amount +``` -![PolkadotJS Join Operators](./images/join.png) +The CLI flag name is `--restaking-contract` for compatibility; it refers to the staking contract. -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee and bond. +The stake amount must meet the minimum configured for the enabled asset. Contract addresses are listed on +[Network Parameters](/network/network-parameters). -![PolkadotJS Transaction](./images/sign.png) +### Step 3: Register for a blueprint -If successful, you should see the following confirmation toast notification: +Register your operator in the core protocol for a specific blueprint ID: -![PolkadotJS Operator Joined](./images/success.png) +```bash +cargo tangle blueprint register \ + --http-rpc-url https://... \ + --ws-rpc-url wss://... \ + --keystore-path ./keystore \ + --tangle-contract 0x... \ + --restaking-contract 0x... \ + --status-registry-contract 0x... \ + --blueprint-id 123 \ + --rpc-endpoint "https://operator.example.com" \ + --registration-inputs ./registration.tlv +``` -Lets break down the events, navigate to the **Network** tab, you should see the following events: +- Repeat this step for each blueprint you want to serve. +- `--rpc-endpoint` is optional and publishes your operator RPC address. +- `--registration-inputs` is required only if the blueprint expects extra registration payloads. -![PolkadotJS Events](./images/events.png) +### Step 4: Run the Blueprint Manager -- multi_asset_delegation.OperatorJoined : tells you that the operator has joined successfully. +Start the runtime that watches for service activations and executes jobs: + +- [Blueprint Manager setup](/operators/manager/setup) diff --git a/pages/operators/operator/join_operator/leave.mdx b/pages/operators/operator/join_operator/leave.mdx index b040a100..0ef3b4ed 100644 --- a/pages/operators/operator/join_operator/leave.mdx +++ b/pages/operators/operator/join_operator/leave.mdx @@ -1,88 +1,48 @@ ## Leave as an Operator -Operators can leave the operator role by unstaking their tokens. The leave process is similar to the unstake process, which means its a two step process, first you schedule a leave operation -and then you execute the leave operation after the leave delay period has passed. You can cancel the leave operation before it is executed. +Operators can leave the operator role by scheduling a leave and then completing it after the leave delay period has passed. ## Schedule Operator Leave -### Step 1: Access the PolkadotJS Interface +### Step 1: Schedule Operator Leave -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. +Ensure you have joined as an operator first, see [Join as an Operator](./join). -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: +```bash +export HTTP_RPC_URL="https://..." +export WS_RPC_URL="wss://..." +export KEYSTORE_PATH="./keystore" +export TANGLE_CONTRACT="0x..." +export RESTAKING_CONTRACT="0x..." +export STATUS_REGISTRY_CONTRACT="0x..." -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer +cargo tangle operator start-leaving \ + --http-rpc-url "$HTTP_RPC_URL" \ + --ws-rpc-url "$WS_RPC_URL" \ + --keystore-path "$KEYSTORE_PATH" \ + --tangle-contract "$TANGLE_CONTRACT" \ + --restaking-contract "$RESTAKING_CONTRACT" \ + --status-registry-contract "$STATUS_REGISTRY_CONTRACT" +``` -### Step 2: Schedule Operator Leave - -Ensure you have joined as an operator first, see [Join as an Operator](./join.mdx). - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](./images/extrinsic.png) - -- Under the **MultiAssetDelegation** section, select **Schedule Operator Leave** - -![PolkadotJS Schedule Operator Leave](./images/scheduleleaevoperator.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee and bond. +The CLI flag name is `--restaking-contract` for compatibility; it refers to the staking contract. If successful, your leave will be scheduled. -## Cancel Operator Leave - -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Step 2: Cancel Operator Unstake - -Ensure you have joined as an operator first, see [Join as an Operator](./join.mdx) and have scheduled an leave. - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](./images/extrinsic.png) - -- Under the **MultiAssetDelegation** section, select **Cancel Operator Leave** - -![PolkadotJS Cancel Operator Leave](./images/cancelleaveoperator.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee and bond. - -If successful, your unstake will be canceled. - ## Execute Operator Leave -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - ### Step 2: Execute Operator Leave -Ensure you have joined as an operator first, see [Join as an Operator](./join.mdx) and have scheduled a leave, also ensure the leave delay period has passed. - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](./images/extrinsic.png) - -- Under the **MultiAssetDelegation** section, select **Execute Operator Leave** - -![PolkadotJS Execute Operator Leave](./images/executeleaveoperator.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee and bond. +Ensure you have scheduled a leave and the leave delay period has passed. + +```bash +cargo tangle operator complete-leaving \ + --http-rpc-url "$HTTP_RPC_URL" \ + --ws-rpc-url "$WS_RPC_URL" \ + --keystore-path "$KEYSTORE_PATH" \ + --tangle-contract "$TANGLE_CONTRACT" \ + --restaking-contract "$RESTAKING_CONTRACT" \ + --status-registry-contract "$STATUS_REGISTRY_CONTRACT" +``` If successful, you will no longer be an operator. diff --git a/pages/operators/operator/join_operator/stake.mdx b/pages/operators/operator/join_operator/stake.mdx index 675b5096..4ef6c144 100644 --- a/pages/operators/operator/join_operator/stake.mdx +++ b/pages/operators/operator/join_operator/stake.mdx @@ -1,125 +1,68 @@ ## Staking as an Operator -Operators can increase their stake to participate to increase their chances of being selected for roles or to signal their commitment to the network. +Operators can increase their self-stake to increase capacity, qualify for higher minimums, or signal commitment to customers. ## Bond More -### Step 1: Access the PolkadotJS Interface +### Increase Operator Stake -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. +Operators can increase their self-stake via the CLI. The amount is in wei for the configured bond asset. -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: +```bash +export HTTP_RPC_URL="https://..." +export WS_RPC_URL="wss://..." +export KEYSTORE_PATH="./keystore" +export TANGLE_CONTRACT="0x..." +export RESTAKING_CONTRACT="0x..." +export STATUS_REGISTRY_CONTRACT="0x..." -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer +cargo tangle operator increase-stake \ + --http-rpc-url "$HTTP_RPC_URL" \ + --ws-rpc-url "$WS_RPC_URL" \ + --keystore-path "$KEYSTORE_PATH" \ + --tangle-contract "$TANGLE_CONTRACT" \ + --restaking-contract "$RESTAKING_CONTRACT" \ + --status-registry-contract "$STATUS_REGISTRY_CONTRACT" \ + --amount +``` -### Step 2: Bond More as an Operator +The CLI flag name is `--restaking-contract` for compatibility; it refers to the staking contract. -Ensure you have joined as an operator first, see [Join as an Operator](./join.mdx). - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](./images/extrinsic.png) - -- Under the **MultiAssetDelegation** section, select **Operator Bond More** and enter the bond amount. - -![PolkadotJS Bond More](./images/bondmore.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee and bond. - -![PolkadotJS Transaction](./images/signstake.png) - -If successful, you should see the following confirmation toast notification: - -![PolkadotJS Operator Joined](./images/successstake.png) +If successful, the transaction emits `OperatorStakeIncreased(operator, amount)`. ## Schedule Operator Unstake -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer +### Step 1: Schedule Operator Unstake -### Step 2: Schedule Operator Unstake +Ensure you have joined as an operator first, see [Join as an Operator](./join). -Ensure you have joined as an operator first, see [Join as an Operator](./join.mdx). - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](./images/extrinsic.png) - -- Under the **MultiAssetDelegation** section, select **Schedule Operator Unstake** and enter the amount to unstake. - -![PolkadotJS Bond More](./images/operatorunstake.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee and bond. - -![PolkadotJS Transaction](./images/signstake.png) +```bash +cargo tangle operator schedule-unstake \ + --http-rpc-url "$HTTP_RPC_URL" \ + --ws-rpc-url "$WS_RPC_URL" \ + --keystore-path "$KEYSTORE_PATH" \ + --tangle-contract "$TANGLE_CONTRACT" \ + --restaking-contract "$RESTAKING_CONTRACT" \ + --status-registry-contract "$STATUS_REGISTRY_CONTRACT" \ + --amount +``` If successful, your tokens will be unlocked after the unstake delay period. -## Cancel Operator Unstake - -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Step 2: Cancel Operator Unstake - -Ensure you have joined as an operator first, see [Join as an Operator](./join.mdx) and have scheduled an unstake. - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](./images/extrinsic.png) - -- Under the **MultiAssetDelegation** section, select **Cancel Operator Unstake** and enter the amount to cancel. - -![PolkadotJS Cancel Operator Unstake](./images/canceloperatorunstake.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee and bond. - -![PolkadotJS Transaction](./images/signstake.png) - -If successful, your unstake will be canceled. - ## Execute Operator Unstake -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - ### Step 2: Execute Operator Unstake -Ensure you have joined as an operator first, see [Join as an Operator](./join.mdx) and have scheduled an unstake, also ensure the unstake delay period has passed. - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](./images/extrinsic.png) - -- Under the **MultiAssetDelegation** section, select **Execute Operator Unstake** - -![PolkadotJS Execute Operator Unstake](./images/executeoperatorunstake.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee and bond. - -![PolkadotJS Transaction](./images/signstake.png) +Ensure you have scheduled an unstake and the delay period has passed. + +```bash +cargo tangle operator execute-unstake \ + --http-rpc-url "$HTTP_RPC_URL" \ + --ws-rpc-url "$WS_RPC_URL" \ + --keystore-path "$KEYSTORE_PATH" \ + --tangle-contract "$TANGLE_CONTRACT" \ + --restaking-contract "$RESTAKING_CONTRACT" \ + --status-registry-contract "$STATUS_REGISTRY_CONTRACT" +``` If successful, all unstaked tokens will be unlocked and returned to the operator. diff --git a/pages/operators/pricing/_meta.ts b/pages/operators/pricing/_meta.ts index fb3da9cd..c37074cd 100644 --- a/pages/operators/pricing/_meta.ts +++ b/pages/operators/pricing/_meta.ts @@ -1,7 +1,7 @@ import { Meta } from "nextra"; const meta: Meta = { - overview: "Overview", + overview: "Pricing Basics", }; export default meta; diff --git a/pages/operators/pricing/overview.mdx b/pages/operators/pricing/overview.mdx index 1cf4167e..9df6221b 100644 --- a/pages/operators/pricing/overview.mdx +++ b/pages/operators/pricing/overview.mdx @@ -1,23 +1,27 @@ --- -title: Blueprint Pricing Overview +title: Blueprint Pricing --- # Blueprint Pricing +SDK source (GitHub): https://github.com/tangle-network/blueprint/tree/v2/crates/pricing-engine + +Operator preferences interface (GitHub): https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/ITangleOperators.sol + As a blueprint operator, you'll need to set up pricing for your services to receive fair compensation for the resources you provide. This guide explains how pricing works in the Tangle Network and how to configure it properly. ## Prerequisites - Basic understanding of Tangle Network operations -- Familiarity with running and maintaining a Tangle node -- Knowledge of basic blueprint concepts +- Familiarity with running the Blueprint Manager runtime +- Knowledge of blueprint concepts and service lifecycles ## How Blueprint Pricing Works The pricing process follows these steps: 1. **Registration**: When you register as an operator for a blueprint, you provide your pricing service address in your preferences - - **Note**: If your RPC server address changes, you can update it on-chain for any registered blueprint through the `updateRpcAddress` services extrinsic call + - **Note**: If your RPC server address changes, update it on-chain via `updateOperatorPreferences` on the `Tangle` contract (see `ITangleOperators`) 2. **Quote Requests**: Users request price quotes from registered operators like you 3. **Quote Generation**: Your service calculates prices based on resource requirements, creates security commitments, and signs quotes 4. **Operator Selection**: Users select operators based on price and other factors @@ -80,7 +84,7 @@ Where: When users request your services, they include security requirements that specify what percentage of assets you need to secure. Your pricing service needs to respond with your commitment: - **Exposure Percentage**: The percentage of assets you guarantee to secure (between the user's minimum and maximum requirements) -- **Asset Types**: Can be Custom (u64) or ERC20 (H160 address) +- **Asset Types**: Native (address(0)) or ERC20 (token address) This commitment will be included with your signed quote. The commitment used for the quote is automatically the minimum exposure percentage specified in the user's security requirements. diff --git a/pages/operators/quality-of-service.mdx b/pages/operators/quality-of-service.mdx index 1ba3880a..2f93100f 100644 --- a/pages/operators/quality-of-service.mdx +++ b/pages/operators/quality-of-service.mdx @@ -4,131 +4,54 @@ title: Quality of Service Monitoring # Quality of Service Monitoring -As an operator, the Quality of Service (QoS) system provides you with comprehensive visibility into your running blueprints. This guide explains how to access and interpret the QoS dashboards and metrics provided by the operators running your blueprints. +QoS is the observability layer for running Blueprints. As an operator, you decide how metrics, logs, and dashboards are exposed to your team or customers. This page outlines what QoS exports and how to configure access safely. -## What is the QoS System? +## What Gets Exported -The Quality of Service (QoS) system in Tangle Network provides a complete observability stack that gives you access to optional insights into your running blueprints: +QoS uses Prometheus-compatible metrics by default, with optional Grafana and Loki. -- Real-time monitoring of blueprint health and performance -- Centralized logs for troubleshooting and audit trails -- Heartbeat monitoring to verify continuous operation -- Visualization dashboards for all key metrics +| Component | Default Endpoint | Notes | +| ------------------ | ------------------------------------- | ---------------------------------------------------------------------- | +| Prometheus metrics | `http://:9090/metrics` | Includes `/health` plus Prometheus v1 API routes like `/api/v1/query`. | +| Grafana UI | `http://:3000` | Only when configured or managed by QoS. | +| Loki push API | `http://:3100/loki/api/v1/push` | Only when configured or managed by QoS. | -The information provided by the QoS service may be optional and unique depending upon the blueprint in question, so it is recommended to check the documentation of a given blueprint for more specifics. +## Managed Stack vs External Stack -## Accessing QoS Dashboards +### Managed Stack (Docker) -When a blueprint is running for you, the operator provides access to QoS dashboards through Grafana. Here's how to access them: +If the Blueprint enables `manage_servers`, QoS will launch Grafana/Loki/Prometheus containers. You should: -1. In your blueprint execution details, locate the operator's QoS endpoint (typically provided after blueprint execution begins) -2. Navigate to the Grafana URL (default: `http://[operator-endpoint]:3000`) - while the port defaults to 3000, it may be different and specified by the operator running it. -3. Log in using the credentials provided by the operator (typically admin/admin for basic setups) - this may also differ from blueprint to blueprint. -4. Once logged in, navigate to the "Dashboards" section in the left sidebar -5. Look for a dashboard with a name that corresponds to the ID of your blueprint +- Ensure Docker is available on the host. +- Mount persistent volumes for Grafana and Loki (`data_dir`). +- Override default Grafana credentials (defaults are admin/admin and anonymous access is on). +- Open ports only on trusted networks or front them with a proxy. -## What You Can Monitor +### External Stack (Recommended for Production) -The QoS dashboards provide comprehensive visibility into your blueprint's operation: +Run your own observability stack and point QoS to it: -### 1. System Performance +- Configure Prometheus to scrape `http://:9090/metrics`. +- Set `GrafanaConfig.prometheus_datasource_url` to your Prometheus URL. +- If you use Loki, set `LokiConfig.url` to your Loki push endpoint. -The system metrics panels can show you how the blueprint is utilizing resources, with some example metrics being: +This approach keeps credentials and retention policies under your control. -- **CPU Usage**: Real-time CPU utilization by your blueprint -- **Memory Consumption**: RAM usage over time -- **Disk I/O**: Storage activity for data-intensive operations -- **Network Traffic**: Inbound/outbound network traffic +## Quick Verification -These metrics help you understand if your blueprint has adequate resources and is performing efficiently. +```bash +curl -s http://localhost:9090/health +curl -s http://localhost:9090/metrics | head -n 20 +``` -### 2. Blueprint-specific Metrics +## Security Notes -These panels show you how your specific blueprint is performing: +- Do not expose Grafana with default credentials. +- Prefer a reverse proxy with auth and TLS. +- If you allow public dashboards, isolate them from write endpoints. -- **Job Execution Frequency**: How often jobs are being executed -- **Job Duration Statistics**: How long jobs are taking to complete -- **Error Rates**: Percentage of jobs failing or experiencing errors -- **Resource Utilization**: How efficiently resources are being used +## Related Docs -Any given blueprint may also have additional information that is specific to that blueprint and the jobs it runs. - -### 3. Heartbeat Monitoring - -The heartbeat section shows you the operational status of your blueprint: - -- **Last Heartbeat Timestamp**: When the most recent heartbeat was recorded -- **Heartbeat Success Rate**: Percentage of successful heartbeats -- **Chain Confirmation Status**: Verification that heartbeats are being recorded on-chain - -These heartbeats ensure that an operator is punished (slashed) if they do not run the blueprint as they should. - -### 4. Log Visualization with Loki - -Centralized logs provide detailed insights into blueprint operation: - -- **Error Logs**: Any errors or warnings generated by your blueprint -- **Information Logs**: Standard operational logs from your blueprint -- **System Logs**: Underlying system events that may affect your blueprint - -## Interpreting QoS Data - -### Key Performance Indicators - -When monitoring your blueprints, pay attention to these important indicators: - -1. **Job Success Rate**: Should be close to 100% under normal conditions -2. **Response Time**: How quickly jobs are being completed -3. **Resource Efficiency**: Is your blueprint using resources as expected? -4. **Heartbeat Regularity**: Heartbeats should occur at consistent intervals - -### Warning Signs to Watch For - -These patterns may indicate issues with your blueprint: - -- **Increasing Error Rates**: May indicate logic problems or resource constraints -- **Growing Response Times**: Could suggest performance degradation -- **Missing Heartbeats**: May indicate blueprint instability or network issues -- **Unexpected Resource Spikes**: Could indicate inefficient operations or potential attacks - -## Troubleshooting Using QoS Data - -When you encounter issues with your blueprints, the QoS dashboard provides valuable diagnostics: - -### For Failed Jobs - -1. Check the logs panel for specific error messages -2. Look at resource usage at the time of failure -3. Examine any pattern in failures (time of day, specific job types) - -### For Performance Issues - -1. Monitor CPU and memory usage during slow periods -2. Look for concurrent operations that may cause contention -3. Check network traffic for potential bottlenecks - -### For Stability Problems - -1. Review the heartbeat history for gaps or irregularities -2. Examine system logs around times of instability -3. Check for correlations between resource exhaustion and failures - -## Frequently Asked Questions - -**Q: How do I access QoS dashboards if the URL wasn't provided?** -A: The endpoint of your operator is available on-chain, and you can access the QoS dashboards by following the instructions in the [Accessing QoS Dashboards](#accessing-qos-dashboards) section. - -**Q: Can I export QoS metrics for my own analysis?** -A: Yes, most Grafana dashboards allow data export in various formats (CSV, JSON). - -**Q: How long is QoS data retained?** -A: This data is only retained during the duration of the service, unless otherwise stated by the operator/blueprint. - -## Related Information - -To learn more about operating with Tangle Network blueprints, you may want to review: - -- [Blueprint Benchmarking](/operators/benchmarking) -- [Pricing Strategies](/operators/pricing) - -Understanding how to interpret QoS metrics helps you gain insights into blueprint performance and troubleshoot issues effectively. +- [Blueprint Manager setup](/operators/manager/setup) +- [Operator Runbook](/operators/runbook) +- [Benchmarking](/operators/benchmarking) diff --git a/pages/operators/runbook.mdx b/pages/operators/runbook.mdx new file mode 100644 index 00000000..6455e19b --- /dev/null +++ b/pages/operators/runbook.mdx @@ -0,0 +1,42 @@ +# Operator Runbook + +This is a minimal operational checklist for keeping Blueprint services healthy and safe in production. + +## Daily Checks + +- Verify the Blueprint Manager process is running and connected to RPC + WS endpoints. +- Confirm service heartbeats are progressing (no sustained gaps). +- Review job error rates and retry spikes. +- Check disk usage for cache + data directories. + +## Key Signals to Watch + +- **Heartbeat drift**: late or missing heartbeats can trigger QoS degradation. +- **Job queue backlog**: growing queues indicate capacity pressure. +- **RPC latency**: slow RPCs lead to missed service events. +- **Crash loops**: repeated restarts usually imply config or artifact issues. + +## Incident Response + +1. Pause new work by stopping the manager. +2. Capture logs + recent job failures for root cause. +3. Restore service with a known-good config and pinned artifact versions. +4. Run a small validation job before resuming full traffic. + +## Capacity Planning + +- Reserve headroom for spikes in service requests and simulations. +- Size storage for artifacts + per-service data. +- Isolate noisy workloads into separate hosts when possible. + +## Security Hygiene + +- Keep keystores isolated and use least-privilege access. +- Rotate operator keys on schedule. +- Use separate RPC credentials per environment. + +## Related Docs + +- [Blueprint Manager setup](/operators/manager/setup) +- [Quality of Service](/operators/quality-of-service) +- [Benchmarking](/operators/benchmarking) diff --git a/pages/operators/tangle-avs/_meta.ts b/pages/operators/tangle-avs/_meta.ts deleted file mode 100644 index 11cfaec4..00000000 --- a/pages/operators/tangle-avs/_meta.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - quickstart: "Quickstart", - "cross-chain": "Cross-Chain Restaking", -}; - -export default meta; diff --git a/pages/operators/tangle-avs/cross-chain.mdx b/pages/operators/tangle-avs/cross-chain.mdx deleted file mode 100644 index 2ce24108..00000000 --- a/pages/operators/tangle-avs/cross-chain.mdx +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: Cross-Chain Restaking -description: Cross-Chain Restaking ---- - -# Overview - -Tangle AVS offers cross-chain restaking between Tangle and EigenLayer. Our system ensures that cross-chain actions, -specifically slashing, occur reliably, regardless of the network on which they originate or occur. - -## Trust Model - -The trust model for Tangle's cross-chain operations with EigenLayer is designed to be robust and transparent: - -1. **Validator Responsibilities**: Operators running the Tangle AVS are subject to the slashing conditions of the - Tangle main chain, including consensus failures and offline absences. Misbehavior on either network can result in consequences - on both chains an operator is participating in. - -2. **Cross-Chain Messaging**: We utilize a Hyperlane bridge to relay slashing events and rewards between both networks. - -## How it Works - -### Registration Flow - -![Registration Flow](../../../public/images/diagram-tangle-avs-register.png) - -### De-registration Flow - -![De-registration Flow](../../../public/images/diagram-tangle-avs-deregister.png) - -### Key Points in the AVS Flows: - -- **Registration**: Upon Registering to EigenLayer and the AVS, the registration event is communicated to Tangle's - Network via a bridge. Following this cross-chain message, the node receives a reward in Tangle Tokens. Once receiving - this rewards, the AVS will join the validator set and register as an Operator. The Validator now begins validating on Tangle. - -- **De-registration**: A node can de-register from both networks, by sending a deregister event to the AVS. This event - is forwarded to Tangle across the bridge to ensure the de-registration occurs on both networks. - -- **Slashing**: When a validator misbehaves on EigenLayer, Tangle enforces the slashing event. Nodes listening for slashing events on Tangle forward that slashing to EigenLayer in exchange for a reward. - Similarly, slashing events that originate on Tangle can be forwarded to non-Tangle networks to be handled accordingly. - This centralized slashing mechanism ensures consistency and accountability. - -## Where to Ask Questions - -- Join our Discord to ask questions or join in on the discussions: [Tangle Discord](https://discord.com/invite/cv8EfJu3Tn) -- Telegram more your style? We're there too: [Tangle Telegram](https://t.me/tanglenet) diff --git a/pages/operators/tangle-avs/quickstart.mdx b/pages/operators/tangle-avs/quickstart.mdx deleted file mode 100644 index 423d218b..00000000 --- a/pages/operators/tangle-avs/quickstart.mdx +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: Quickstart -description: Setting up Tangle AVS ---- - -import Callout from "../../../components/Callout"; - -# Tangle AVS: Quickstart Guide - -The following is a guide outlining the steps to set up and utilize **Tangle AVS** (Actively Validated Services), -allowing you to run a Tangle Validator on **EigenLayer**'s network. Further explanation of the cross-chain mechanisms offered by -Tangle AVS can be found in the [Cross-Chain](cross-chain.mdx) section. - -For information on hardware requirements, see the [Hardware](../../operators/node-basics/hardware.mdx) section. - - - Ensure that you have everything necessary for running a Tangle node and have access to the necessary network - configurations for EigenLayer or any other network you plan to connect to. - - -## Setup - -The AVS is designed to automatically handle as much of the setup process as possible, so that you can focus on running your -Tangle node. Some specific configurations are in the midst of being implemented, and they will be added in a future update. - -### Usage - -1. Clone the repository: - -```sh filename="git clone" copy - git clone https://github.com/tangle-network/avs.git tangle-avs - cd tangle-avs -``` - -2. Build: - -```sh filename="cargo build" copy -cargo build --release -``` - -### Running Tests - -If you would like to ensure the Tangle AVS works as you would expect, you can test it against local testnets. You can run the following command while a local Tangle testnet is running. - -```sh filename="cargo test" copy -RUST_LOG=gadget=trace cargo test test_full_tangle_avs -- --nocapture -``` diff --git a/pages/operators/validator/_meta.ts b/pages/operators/validator/_meta.ts deleted file mode 100644 index 06edfbf0..00000000 --- a/pages/operators/validator/_meta.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - introduction: "Start Validating", - npos: "NPoS Validator Election", - "upgrade-node": "Upgrade your Validator", - proxyaccount: "Proxy Accounts", - "validator-rewards": "Rewards", -}; - -export default meta; diff --git a/pages/operators/validator/introduction.mdx b/pages/operators/validator/introduction.mdx deleted file mode 100644 index 5239baae..00000000 --- a/pages/operators/validator/introduction.mdx +++ /dev/null @@ -1,218 +0,0 @@ ---- -title: Start Validating on Tangle -description: An overview of Tangle Network's validator registration process. ---- - -import { Tabs, Tab } from "/components/Tabs"; -import Callout from "/components/Callout"; - -# Start Validating - - -**You should be familiar with [account management basics](/network/account-manage) before proceeding, and ensure you're connected to the correct network: Tangle Network.** - - -Becoming a validator on a decentralized network like Tangle is a big responsibility and a fairly technical process. **You are accountable for both your stake and the stake of your nominators. Any errors could lead to slashing of tokens, impacting your balance and reputation.** However, there are also rewards - you help secure a decentralized network and can grow your stake through nominations. - -To become a validator, you need substantial system administration skills to set up infrastructure and resolve anomalies independently. Follow security best practices, as this is crucial for success. The validator role involves more than just running a node. - -You don't have to go it alone. Connect with experienced teams and fellow validators in communities like the Tangle [Discord Validator channel.](https://discord.com/invite/cv8EfJu3Tn) They can provide invaluable insights and support. Carefully weigh the risks and rewards, prepare thoroughly, and leverage the community. - -Generally, the process for becoming a validator involves three steps: - -1. **Bonding Tokens:** Before a node can become a validator, the node operator usually needs to bond (or stake) a certain amount of tokens. This is a way of putting up collateral that can be slashed (or forfeited) if the validator behaves maliciously or fails to properly validate transactions and blocks. Bonding is a way of ensuring that validators have a vested interest in properly maintaining the network. - - -**How much TNT do I need to be an active Validator?** - -To be elected to the active validator set (to recieve block rewards), you need a minimum stake behind your validator. This can come from yourself or nominators. This means at a minimum, you'll need enough TNT for stash and staking accounts with the existential deposit, plus extra for fees. The rest can come from nominators. To understand validator election, check the [NPoS election algorithms page.](https://wiki.polkadot.network/docs/learn-phragmen#what-is-the-sequential-phragm%C3%A9n-method) - -In the future, validators may be able to participate in other forms of reward-winning activities without participating in block rewards. - - - -2. **Setting Up Validator Infrastructure:** This includes ensuring that the node is properly configured, connected to the network, has the necessary keys set up, etc. Part of this setup will involve generating and injecting session keys (like RoleKey, Babe, Grandpa, etc.) which are crucial for various consensus and validation processes. - -3. **Nominating or Registering as a Validator:** After bonding tokens and setting up the validator node, the operator then registers or nominate their node as a validator candidate. This involves submitting a transaction to the network indicating their intention to validate. - -# Launch a Validator - - -The following guide assumes you have a node operating and synced with the network. If not, see the following: -1. [Hardware Specifications](../node-basics/hardware) -2. [Node Software](../node-basics/node-software) -3. [Run Node with Docker](../node-basics/docker-node) -4. **or** [Run Node with Systemd](../node-basics/systemd) - -Once your node is operational, proceed. - - - -## 1. Bond TNT or tTNT - -To validate, you will use a 'Stash' account and a 'staking account.' Make sure this account has enough funds to pay the fees for making transactions. Keep most of your funds in the stash account since it is meant to be the custodian of your staking funds. - - -(Optional) While you are not required to use a proxy account for staking, it is recommended. For this, you will create two accounts and make sure each -of them have at least enough funds to pay the fees for making transactions. Learn more about [setting up a proxy account](./proxyaccount.mdx). - - - -Controller accounts are deprecated in Substrate. For more information, [see this discussion.](https://forum.polkadot.network/t/staking-controller-deprecation-plan-staking-ui-leads-comms/2748) - - -It is now time to set up our validator. We will do the following: - -1. Bond the TNT of the Stash account on the Tangle Network. These TNT will be put at stake for the security of the network and can be slashed. -2. Select the account (optionally a [staking proxy account](./proxyaccount.mdx)). This is the account that will decide when to start or stop validating. - -First, go to the Staking section. Click on "Account Actions", and then the "+ Stash" button. - - -Make sure not to bond all your TNT balance since you will be unable to pay transaction fees from your bonded balance. Always maintain an unbonded amount for fees. - - -**Stash account** - Select your Stash account. In this example, we will bond 1 TNT, where the minimum bonding amount is 1. Make sure that your Stash account contains at least this much. You can, of course, stake more than this. - -**Staking account** - Select the staking account (or [proxy account](./proxyaccount.mdx) created earlier) created earlier. This account will also need a small amount of TNT in order to start and stop validating. - -**Value bonded** - How much TNT from the Stash account you want to bond/stake. Note that you do not need to bond all of the TNT in that account. Also note that you can always bond more TNT later. However, withdrawing any bonded amount requires the duration of the unbonding period. On Kusama, the unbonding period is 7 days. On Polkadot, the planned unbonding period is 28 days. - -**Payment destination** - The account where the rewards from validating are sent. More info here. Payouts can go to any custom address. If you'd like to redirect payments to an account that is neither the staking [proxy account](./proxyaccount.mdx) nor the stash account, set one up. Note that it is extremely unsafe to set an exchange address as the recipient of the staking rewards. - -Once everything is filled in properly, click `Bond` and sign the transaction with your Stash account. - -After a few seconds, you should see an `ExtrinsicSuccess` message. - -Your bonded account will available under `Stashes.` You should now see a new card with all your accounts (note: you may need to refresh the screen). The bonded amount on the right corresponds to the funds bonded by the Stash account. - -## 2. Generate your Keys and Import them to the Keystore - -In order to participate in the tangle protocol, block production, and block finalization, you will be required to set up several keys. These keys include: - -- Role key (Ecdsa) -- Babe key (Sr25519) -- Account key (Sr25519) -- Grandpa key (Ed25519) -- ImOnline key (Sr25519) - -[More info about keys](https://wiki.polkadot.network/docs/learn-cryptography#session-keys) - -See the guides for [launching Tangle Network with Docker](../node-basics/docker-node.mdx) and [Launching with Systemd](../node-basics/systemd.mdx) for exact guides on this step. - -Once your node and keys are setup and your node is synced, proceed to the following: - -## 3. Register with the Network - -Session keys are a critical aspect of the Tangle Network's consensus mechanism, are are composes of the several keys we generate immediately previous, each with a different function. These keys enable your validator node to participate in consensus, and a misconfiguration can lead to missed rewards or even penalties. You can use RPC methods like `hasKey` to check for a specific key or `hasSessionKeys` to check the full session key public key string. - -**Starting Your Node** - -After your node is fully synchronized with the network, stop the process using Ctrl-C. You'll now start your node by designating itself as a validator: - -``` -tangle --validator --name "YourNodeNameOnTelemetry" -``` - -The output will be similar to: - -``` -[timestamp] Tangle Network Standalone -[timestamp] ✌️ version x.x.x -[timestamp] ❤️ by Webb Technologies -[timestamp] 📋 Chain specification: Tangle Network -[timestamp] 🏷 Node name: YourNodeName -... and so on -``` - -Note that you can give your validator any name that you like. However, since this name will appear on telemetry and is visible to others, choose a unique name. - -### Register your Session Key with the Network for your Node - -To participate in consensus, you need to inform the chain about your session keys and map them to your node. - -**Option 1: PolkadotJS Apps** - -1. Connect to Your Validator Node: - Start by connecting the PolkadotJS explorer to your validator node. -2. Access the Toolbox: - Navigate to the `Toolbox` tab. -3. Select `RPC Calls`. - Rotate the Keys: - From the dropdown menu, choose `author > rotateKeys()``. -4. Execute the RPC call. - **Important: Save the output.** This represents your session keys and will be essential for the next steps. - -**Option 2: CLI** - -If you're working on a remote server and need to rotate your session keys, you can use the following command: - -```sh -curl -H "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "author_rotateKeys", "params":[]}' http://localhost:9933 -``` - -**Note:** Adjust http://localhost:9933 if your node's address differs. - -This command prompts your node to generate a new set of session keys. The concatenated public parts of these keys will be returned as the result. - -### Submitting the setKeys Transaction - -To inform the chain about your session keys: - -1. Navigate to `Staking > Account Actions` -2. Set Session Key - -- Click on `Set Session Key` for the account you've designated as your staking account. -- Enter the output from `author_rotateKeys` in the appropriate field. -- Click `Set Session Key`. - -Once you've submitted this transaction, your node is now recognized as a validator and is prepared to engage in the Tangle network's consensus process. - - -**Verify Node Status** -To confirm that your node is live and synchronized, head to Tangle Telemetry- [currently you can view Testnet Telemetry only,](https://telemetry.polkadot.io/#list/0xea63e6ac7da8699520af7fb540470d63e48eccb33f7273d2e21a935685bf1320) and locate your node. Given the myriad of nodes on the Tangle Network, ensure you've chosen a unique name for easier identification. For instance, if you've named your node `tangletechtest`, it should be visible when searched. - - -## Setup via Validator Tab - -This step finalizes and enters you into the validator queue. - -1. Navigate to `Staking>Account Actions` -2. Look for your validator node, and click `Validate` - -Here, you'll need to input the keys generated from the `rotateKeys` step, which is the hexadecimal output from the `author_rotateKeys` process. These keys will remain in a "pending" state until they are incorporated at the beginning of a new era. - -**Define your "reward commission percentage."** This denotes the commission rate applicable to your validator's rewards. **Note on Commission:** A commission rate of 100% indicates that you intend for your validator not to receive any nominations. This could discourage nominators and should be set judiciously. - -**Payment Preferences:** Specify the percentage of rewards you wish to claim. The remaining balance will be divided among your nominators. - -You also have the option to accept or decline new nominations via the "allows new nominations" setting. - -Click on `Bond & Validate` to enter the set of validators. - -**Confirm your Validator** -Navigating to the "Staking" tab will display a list of active validators operating on the network. At the top, you'll see available validator slots and the count of nodes that have expressed their intent to validate. To check your node's status, switch to the "Waiting" tab. - - -**"Waiting" on the Staking Queue** - -The validator roster is updated every era (roughly 6 hours in Tangle Testnet). In the subsequent era, if there's an available slot and your node is chosen to join the validator set, it will be activated as a validator. Otherwise, it will stay in the waiting queue. If your validator doesn't get selected for a specific era, it remains in the queue for the next one. No restart is required. However, you might consider increasing the staked Tangle tokens or seeking more nominators to enhance the chances of your validator's selection. - - - -# Additional Setup - -## Setting identity - -While not required, we highly recommend that validators and node operators set an identity, which is critical for receiving nominations and being seen as a trustworthy node. - -1. Go to the Polkadot.js portal: `Accounts` -2. Open the 3 dots next to your address: `Set on-chain Identity` -3. Enter all fields you want to set. -4. Send the transaction. - -#### Request judgment - -1. Go to the Polkadot.js portal: `Developer > Extrinsic` -2. Select your account and extrinsic type: `identity / requestJudgment` -3. Send the transaction. diff --git a/pages/operators/validator/npos.mdx b/pages/operators/validator/npos.mdx deleted file mode 100644 index 1cf32054..00000000 --- a/pages/operators/validator/npos.mdx +++ /dev/null @@ -1,50 +0,0 @@ -# Tangle Network Consensus Algorithm - -Tangle Network uses Nominated Proof of Stake (NPoS) as its consensus mechanism, similar to Polkadot and Kusama. A key part of NPoS is the election of validators to participate in consensus. This document provides an overview of the Sequential Phragmén election algorithm used in Tangle Network's NPoS for validators. - -## Goals of the NPoS Election Algorithm - -The Sequential Phragmén election algorithm in Tangle Network aims to optimize three key metrics when determining the set of active validators: - -1. Maximize the total stake securing the network. -2. Maximize the stake behind the least-staked validator. -3. Minimize the variance in stake across the validator set. - -These goals ensure the network has high economic security, a high threshold for attack, and fair representation of stake. - -## Sequential Phragmén Election Method - -Tangle Network uses the Sequential Phragmén method for electing validators. This multi-winner election method aims to elect a validator set such that the stake is distributed as evenly as possible among them. The algorithm works as follows: - -1. Nominators cast their votes, indicating which validators they support. -2. The validator with the highest approval stake (total stake backing them) is elected. -3. The stake of each nominator who supported the elected validator is reduced proportionally to their contribution to the validator's approval stake. -4. The process repeats from step 2 until all available validator slots are filled. - -This iterative process ensures a fair distribution of stake across the elected validator set. - -## Practical Considerations and Optimizations - -To optimize the election process and minimize on-chain computation, Tangle Network employs several techniques: - -- Minimizing edges by reducing the number of validators per nominator. -- Maintaining an even stake distribution among elected validators. -- Using off-chain workers to compute the election results and submit them on-chain. - -There are also limits on the number of validators a nominator can select and the number of nominators per validator to manage complexity. - -## Importance for Validators - -As a validator, it's important to understand that not all stake nominated to you may end up contributing to your final backing stake after the election. Nominators typically split their stake among multiple trusted validators. - -Tracking your anticipated backing stake based on nominations can help you plan your node operations. However, the final results will depend on the overall stake distribution and the specific election algorithm used in that era. - -The Sequential Phragmén election method is designed to maintain a fair, decentralized, and secure distribution of stake across the active validator set. By participating in the Tangle Network as a validator, you contribute to the network's security and can earn rewards proportional to your backing stake. - -## Further Resources - -For more information on NPoS, election method and the technicals, see: - -- Polkadot Wiki: [nPoS Election Algorithms](https://wiki.polkadot.network/docs/learn-phragmen#what-is-the-sequential-phragm%C3%A9n-method) -- [W3F Research Page on NPoS](https://research.web3.foundation/Polkadot/protocols/NPoS/Overview) - - An overview of Nominated Proof of Stake as its applied to Polkadot. diff --git a/pages/operators/validator/proxyaccount.mdx b/pages/operators/validator/proxyaccount.mdx deleted file mode 100644 index b2b96c50..00000000 --- a/pages/operators/validator/proxyaccount.mdx +++ /dev/null @@ -1,74 +0,0 @@ -# Setting Up a Proxy Account - -A proxy account allows you to delegate some functionalities to another account, which can act on behalf of the primary account. Polkadot.js Apps also provides an option to create a time-delayed proxy, enhancing security by giving the primary account time to review and potentially cancel transactions before they are executed. - -## Creating a Proxy Account - -### Using the Extrinsics Page - -1. **Navigate to the Extrinsics Page**: - - Extrinsic page : https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/extrinsics - - - Click on the `Developer` tab. - - Select `Extrinsics` from the dropdown. - -2. **Input Proxy Details**: - - - Select your primary account. - - From the dropdown, choose `proxy` > `addProxy`. - - Specify the delegate account for the proxy. - - Choose `Balances` from the `proxyType` dropdown. - - Optionally, set a time delay (in block numbers) to add a waiting period before the proxy can act. - -3. **Finalize the Proxy**: - - Click `Submit Transaction`. - - Authorize and sign the transaction to establish the proxy relationship. - - A confirmation will appear once the transaction is successful. - -### Using the Accounts Page - -1. **Navigate to Your Accounts**: - - Accounts page : https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/accounts - - - Go to the `Accounts` page. - - Find your primary account and click on the three vertical dots next to it. - - Select `Add proxy` (If the account already has a proxy, you'll see `Manage proxies`). - -2. **Specify Proxy Details**: - - A pop-up will appear. - - Choose the account you wish to set as a proxy. - - Define the type of proxy. - - Click `Add Proxy`, then `Submit`, and sign the transaction. - -## Verifying Your Proxy Account - -Once you've set up a proxy account, it's essential to verify that it's configured correctly. - -### Using the Chain State Page - -1. **Navigate to Chain State**: - - Chain state page : https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/chainstate - - - From the dropdown, choose `proxy` > `proxies`. - - Select your primary/proxied account. - - Click on the `+` button to send the query. - -2. **Review Proxy Details**: - - Results will display information about your proxies, including the proxy account address, type, delay period (if set), and the total bond amount. - -### Using the Accounts Page - -1. **Go to Your Accounts**: - - Accounts page : https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/accounts - - - On the `Accounts` page, find the Proxy symbol next to your primary account. - - Hover over the icon and click `Manage proxies` to see your proxies. - -2. **Inspect Proxy Overview**: - - A pop-up will show an overview of all your proxy accounts. - -For a more detailed overview of proxies, refer to the [Polkadot.js documentation](https://wiki.polkadot.network/learn/learn-guides-accounts-proxy/). diff --git a/pages/operators/validator/upgrade-node.mdx b/pages/operators/validator/upgrade-node.mdx deleted file mode 100644 index 6847a1cb..00000000 --- a/pages/operators/validator/upgrade-node.mdx +++ /dev/null @@ -1,54 +0,0 @@ -# Validator Upgrade Guide - -## Introduction - -Validators are crucial to the stability and security of the Tangle Network. This guide provides detailed steps for upgrading validators while maintaining strict uptime requirements to avoid slashing. - -## Preparation - -- Stay informed [about new releases](https://github.com/tangle-network/tangle/) from the Tangle Network community. -- Plan the upgrade process to minimize downtime. - -## Key Components - -### Session Keys - -- Stored in the client, linking your node to the staking proxy. -- Changing keys requires waiting for the current session to finish plus two more sessions. - -### Keystore - -- Located at `/chains/Tangle/keystore`. -- Contains private keys for signing transactions. -- **Do not clone or copy** the keystore; generate new keys for each validator instance. - -## Upgrade Steps - -### Setting Up Validator B (Your New Validator) - -1. Start and sync a second node (Validator B) with the `--validator` flag. -2. Generate session keys for Validator B. -3. Submit a `set_key` extrinsic from your staking proxy with Validator B's session key. -4. Note the session when this extrinsic is executed. -5. Keep Validator A running until two full sessions have elapsed after the current one. - -### Switching to Validator B - -1. After Session N+3, Validator B will act as your validator. -2. Perform maintenance on Validator A. - -### Restoring Validator A - -1. Restart Validator A with the `--validator` flag and sync it. -2. Generate new session keys for Validator A. -3. Submit a `set_key` extrinsic with Validator A's new session key. -4. Keep Validator B running until two full sessions have elapsed after the current session. - -## Monitoring the Transition - -Verify the session change by looking for log messages like: - -``` -2019-10-28 21:44:13 Applying authority set change scheduled at block #450092 -2019-10-28 21:44:13 Applying GRANDPA set change to new set with 20 authorities -``` diff --git a/pages/operators/validator/validator-rewards.mdx b/pages/operators/validator/validator-rewards.mdx deleted file mode 100644 index 4688d428..00000000 --- a/pages/operators/validator/validator-rewards.mdx +++ /dev/null @@ -1,125 +0,0 @@ ---- -title: Validator Rewards -description: A brief overview of Tangle network rewards and their payout scheme. ---- - -# Validator Rewards - -Running a validator node on the Tangle Network allows you to connect to the network, sync with a bootnode, obtain local access to RPC endpoints, and also author blocks. The network rewards successful validators (users running validator nodes and actively producing blocks) by paying a set amount of network tokens as rewards. - -## How Rewards are Calculated - -## Era Points - -For every era (a period of time approximately 6 hours in length in Tangle), validators are paid proportionally to the amount of _era points_ they have collected. Era -points are reward points earned for payable actions like: - -- producing a non-uncle block in the Chain. -- producing a reference to a previously unreferenced uncle block. -- producing a referenced uncle block. - -An uncle block is a block that is valid in every regard, but which failed to become -canonical. This can happen when two or more validators are block producers in a single slot, and the -block produced by one validator reaches the next block producer before the others. We call the -lagging blocks uncle blocks. - -Payments occur at the end of every era. - -Era points create a probabilistic component for staking rewards. - -If the _mean_ of staking rewards is the average rewards per era, then the _variance_ is the -variability from the average staking rewards. The exact TNT value of each era point is not known in -advance since it depends on the total number of points earned by all validators in a given era. This -is designed this way so that the total payout per era depends on Tangle's inflation model, and not on the number of payable -actions (f.e., authoring a new block) executed. - -In this case, analyzing the _expected value_ of staking rewards will paint a better picture as the -weight of era points of validators and para-validators in the reward average are taken into -consideration. - -#### High-level breakdown of reward variance - -This should only serve as a high-level overview of the probabilistic nature for staking rewards. - -Let: - -- `pe` = para-validator era points, -- `ne` = non-para-validator era points, -- `EV` = expected value of staking rewards, - -Then, `EV(pe)` has more influence on the `EV` than `EV(ne)`. - -Since `EV(pe)` has a more weighted probability on the `EV`, the increase in variance against the -`EV` becomes apparent between the different validator pools (aka. validators in the active set and -the ones chosen to para-validate). - -Also, let: - -- `v` = the variance of staking rewards, -- `p` = number of para-validators, -- `w` = number validators in the active set, -- `e` = era, - -Then, `v` ↑ if `w` ↑, as this reduces `p` : `w`, with respect to `e`. - -Increased `v` is expected, and initially keeping `p` ↓ using the same para-validator set for -all parachains ensures availability and approval voting. In addition, despite `v` ↑ on an `e` to `e` -basis, over time, the amount of rewards each validator receives will equal out based on the -continuous selection of para-validators. - -## Payout Scheme - -No matter how much total stake is behind a validator, all validators split the block authoring -payout essentially equally. The payout of a specific validator, however, may differ based on -era points, as described above. Although there is a probabilistic component to -receiving era points, and they may be impacted slightly depending on factors such as network -connectivity, well-behaving validators should generally average out to having similar era point -totals over a large number of eras. - -Validators may also receive "tips" from senders as an incentive to include transactions in their -produced blocks. Validators will receive 100% of these tips directly. - -For simplicity, the examples below will assume all validators have the same amount of era points, -and received no tips. - -``` -Validator Set Size (v): 4 -Validator 1 Stake (v1): 18 tokens -Validator 2 Stake (v2): 9 tokens -Validator 3 Stake (v3): 8 tokens -Validator 4 Stake (v4): 7 tokens -Payout (p): 8 TNT - -Payout for each validator (v1 - v4): -p / v = 8 / 4 = 2 tokens -``` - -Note that this is different than most other Proof-of-Stake systems such as Cosmos. As long as a -validator is in the validator set, it will receive the same block reward as every other validator. -Validator `v1`, who had 18 tokens staked, received the same reward (2 tokens) in this era as `v4` -who had only 7 tokens staked. - -## Slashing - -Although rewards are paid equally, slashes are relative to a validator's stake. Therefore, if you do -have enough TNT to run multiple validators, it is in your best interest to do so. A slash of 30% -will, of course, be more TNT for a validator with 18 TNT staked than one with 9 TNT staked. - -Running multiple validators does not absolve you of the consequences of misbehavior. Polkadot -punishes attacks that appear coordinated more severely than individual attacks. You should not, for -example, run multiple validators hosted on the same infrastructure. A proper multi-validator -configuration would ensure that they do not fail simultaneously. - -Nominators have the incentive to nominate the lowest-staked validator, as this will result in the -lowest risk and highest reward. This is due to the fact that while their vulnerability to slashing -remains the same (since it is percentage-based), their rewards are higher since they will be a -higher proportion of the total stake allocated to that validator. - -To clarify this, let us imagine two validators, `v1` and `v2`. Assume both are in the active set, -have commission set to 0%, and are well-behaved. The only difference is that `v1` has 90 TNT -nominating it and `v2` only has 10. If you nominate `v1`, it now has `90 + 10 = 100` TNT, and you -will get 10% of the staking rewards for the next era. If you nominate `v2`, it now has -`10 + 10 = 20` TNT nominating it, and you will get 50% of the staking rewards for the next era. In -actuality, it would be quite rare to see such a large difference between the stake of validators, -but the same principle holds even for smaller differences. If there is a 10% slash of either -validator, then you will lose 1 TNT in each case. diff --git a/pages/resources/_meta.ts b/pages/resources/_meta.ts deleted file mode 100644 index a066a45b..00000000 --- a/pages/resources/_meta.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - "-- intro": { - type: "separator", - title: "Introduction", - }, - resources: "Resources and Tools", - "account-manage": "Create & Manage Accounts", - "set-identity": "On-chain Identity", - "-- bridges": { - type: "separator", - title: "Bridges & Interoperability", - }, - bridge: "Tangle dApp Bridge", - hyperlane: "Hyperlane", - router: "Router", - glacis: "Glacis", - chainport: "Chainport", - "-- developer-tools": { - type: "separator", - title: "Developer Tools", - }, - biconomy: "Biconomy", - sablier: "Sablier", - safe: "Gnosis Safe", - "useful-contracts": "Useful Contracts", -}; - -export default meta; diff --git a/pages/resources/account-manage.mdx b/pages/resources/account-manage.mdx deleted file mode 100644 index da2d1806..00000000 --- a/pages/resources/account-manage.mdx +++ /dev/null @@ -1,99 +0,0 @@ -import { CommonActions } from "../../components/CommonActions"; -import ExpandableImage from "../../components/ExpandableImage"; -import { Callout } from 'nextra/components' - -# Create and Use an Account on Tangle Network - -## Introduction - -This guide will walk you through creating and managing your account on the Tangle Network through offical apps and browser extensions, which is the most common way users interface with the network. - -## Simplifying Technical Terms - -- **Mnemonic Phrase**: A secret set of words that allows access to your funds. Think of it as a password. -- **Substrate-Based Chains**: Different blockchains built using the Substrate framework, like Tangle Network. -- **A Substrate or 'SS58' Address Format**: A type of address format used in Substrate chains, including Polkadot and Tangle Network. - -## Browser Extension Wallets: Options - -This guide will focus on Polkadot Apps browser extension, a widely trusted system created by the developers of the Polkadot and Substrate ecosystems, however, it's important to note that this browser extension does only one thing: it manages accounts and allows the signing of transactions with those accounts. **It does not inject providers for use by dapps at this early point, nor does it perform wallet functions, e.g send funds.** You will use a web interface to conduct those transactions. - -There are several wallets for browser and mobile developed for Substrate chains. Below is a list of wallets currently supported by our DApps. - -![Wallets](/images/wallets.png) - -## Step-by-Step Guide with Visual Aids - -1. **Install the Browser Extension and Open Polkadot Apps Interface** - - - Install the Polkadot extension [through the official source for your browser.](https://polkadot.js.org/extension/) - - Open the web interface for [Tangle Network](https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer) - - - - **Ensure you're connected to the correct network, Tangle Network, by accessing the above link.** - - - -2. **Open the Extension** - - - - - Click on the extension icon in your browser's toolbar. - -3. **Create a New Account** - - - - - Click on the "+" button or choose "Create new account." - -4. **Secure Your Mnemonic Phrase** - - - - - Write down the mnemonic phrase and store it in a secure location. - -5. **Set a Password** - - - Create a strong password for additional security. - -6. **Load the Account Page in Polkadot Apps** - - - - - Now you have a working account, and can use it in Polkadot Apps. - - - - - Go to the Account tab. - - Use your account to conduct transactions or other activities. - - - -## Emphasizing Security Tips - -- **Store Your Mnemonic Phrase Safely**: Use a secure physical location or a password manager. -- **Keep Your Password Secure**: Use a combination of letters, numbers, and symbols. - -## Frequently Asked Questions (FAQs) - -- **What if the extension isn't working?** - Check if you have the latest version of the browser and the extension. - -- **How do I recover my account if I lose my mnemonic phrase?** - Unfortunately, if you lose your mnemonic phrase, you cannot recover your account. Always keep it safe. - -- **How do I ensure the browser extension is legitimate?** - Download extensions only from official browser stores and verify the publisher's name. - -## Use Case Examples - -Once your account is set up, you can: - -- **Transfer Tokens**: Send and receive tokens within the Tangle Network. -- **Participate in Governance**: Vote on network decisions and proposals. -- **Interact with Tangle Network Features**: Access various applications and services on the Tangle Network. - -## Next Steps - - diff --git a/pages/resources/biconomy.mdx b/pages/resources/biconomy.mdx deleted file mode 100644 index e69de29b..00000000 diff --git a/pages/resources/bridge.mdx b/pages/resources/bridge.mdx deleted file mode 100644 index c22b5a34..00000000 --- a/pages/resources/bridge.mdx +++ /dev/null @@ -1,53 +0,0 @@ -import Callout from "/components/Callout"; - -# Bridge - -In order to participate in Tangle's restaking infrastructure, users need to first bridge in their assets from connected networks such as Ethereum. For this, we have a dedicated bridge DApp that allows users to easily bring their assets to Tangle and transfer them out. - -[Access Tangle DApp's Bridge page here](https://app.tangle.tools/bridge) - -## How the Bridge Works - -Bridging from EVM-based blockchains into Tangle EVM works by leveraging [Hyperlane](/resources/hyperlane) and [Router Protocol](/resources/router). Currently the Tangle dApp is configured against Hyperlane but plans to support our other bridges is in the works. - -## How to Use the Bridge - -### Step 1: Access Tangle DApp & Connect Wallet - -- Open [Tangle DApp's Bridge page](https://app.tangle.tools/bridge). -- Connect your wallet to the DApp by clicking on the **Connect Wallet** button on the top right and selecting your preferred wallet provider. - -### Step 2: Select the Source & Destination Networks - -In this example, we'll be bridging in WETH from Holesky to Tangle Testnet EVM. Select the source network as Holesky and the destination network as Tangle Testnet EVM. - -![Select Source & Destination Networks](/images/restake/bridge/select-networks.png) - -### Step 3: Fill in Details - -- Enter the amount of WETH you'd like to bridge in. -- Enter the recipient address. This is the address on Tangle where the bridged assets will be deposited. If transferring into Tangle EVM (like in this example), this should be an EVM address. - - -Ensure that the recipient address entered is correct to avoid losing your funds. We recommend sending a small amount first to get comfortable with the process. - - -- Click on the **Transfer** button. - -### Step 4: Perform the Transaction - -- After clicking on the **Transfer** button, a confirmation dialog will appear. Review the details & fees, and click on the **Confirm** button to initiate the transaction. -- After a few seconds, the transaction dialog from your wallet provider (such as MetaMask) will appear. The bridging process consists of two transations: one to approve the bridge contract to spend your WETH, and the other to interact with the bridge contract. Review all details and confirm the first transaction to continue. - -![MetaMask Transaction 1 - Approve Spending](/images/restake/bridge/metamask-tx-1.png) - -- After the first transaction is confirmed, a second transaction confirmation dialog will automatically appear. This is the transaction used to interact with the bridge smart contract. Review all details and confirm the transaction to complete the bridging process. - -![MetaMask Transaction 2 - Interact with the Smart Contract](/images/restake/bridge/metamask-tx-2.png) - -### Step 5: Monitor Transaction Progress - -- Once the second transaction is confirmed, you can monitor the progress of the bridging process right from the DApp. A small toast notification will automatically appear on the top right of the screen with the transaction details. -- Once you see the **Executed** status, the bridging process is complete. You can also use the [Tangle Testnet's EVM explorer](https://testnet-explorer.tangle.tools/) to find and track the transaction. Check the [Resources and Tools page](/resources) for other explorers. - -![Transaction Status Toast Notification](/images/restake/bridge/tx-status-toast.png) diff --git a/pages/resources/chainport.mdx b/pages/resources/chainport.mdx deleted file mode 100644 index e69de29b..00000000 diff --git a/pages/resources/glacis.mdx b/pages/resources/glacis.mdx deleted file mode 100644 index b4991ff1..00000000 --- a/pages/resources/glacis.mdx +++ /dev/null @@ -1,12 +0,0 @@ -# Glacis Deployments - -[Glacis](https://glacislabs.com/) is a bridge-agnostic protocol that simplifies cross-chain operations by providing secure, reliable message passing and transaction execution across different blockchain networks. It features built-in security verification, intelligent routing, and flexible delivery guarantees. - -## Core Contracts - -Below are the core contract addresses for Glacis deployed on the Tangle mainnet. You can view each contract on our [Blockscout Mainnet Explorer](https://explorer.tangle.tools/). - -| Contract | Address | -| ------------------------ | -------------------------------------------------------------------------------------------------------------------------------- | -| **Glacis Router** | [`0x68E70e39d4d6A072644E68106678971103A4E044`](https://explorer.tangle.tools/address/0x68E70e39d4d6A072644E68106678971103A4E044) | -| **Glacis Sample Client** | [`0xD2bE908f2e24C14Cd24b80A7A6d093Ee2a740A6A`](https://explorer.tangle.tools/address/0xD2bE908f2e24C14Cd24b80A7A6d093Ee2a740A6A) | diff --git a/pages/resources/hyperlane.mdx b/pages/resources/hyperlane.mdx deleted file mode 100644 index 8fd7f702..00000000 --- a/pages/resources/hyperlane.mdx +++ /dev/null @@ -1,61 +0,0 @@ -import { TokenContracts } from "../../components/TokenContracts"; - -# Hyperlane Deployments - -Hyperlane is a protocol for seamless cross-chain communication and interoperability, enabling decentralized applications (dApps) to operate across multiple blockchain networks. It features interchain messaging and routing for secure data transmission. Learn more on the [Hyperlane GitHub](https://github.com/hyperlane-xyz) and explore its [documentation](https://docs.hyperlane.xyz/). - -## Mainnet Deployment Contracts - -Below are the addresses for the various contracts deployed on the mainnet for Hyperlane. You can view each contract on our [Blockscout Mainnet Explorer](https://explorer.tangle.tools/). - - - -## Hyperlane Core Contracts - -| Contract Name | Address | -| ---------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -| **Aggregation Hook** | [`0xDC995884ec53b6Bc809ed614f5E92084600002ed`](https://explorer.tangle.tools/address/0xDC995884ec53b6Bc809ed614f5E92084600002ed) | -| **Domain Routing ISM** | [`0xaDc0cB48E8DB81855A930C0C1165ea3dCe4Ba5C7`](https://explorer.tangle.tools/address/0xaDc0cB48E8DB81855A930C0C1165ea3dCe4Ba5C7) | -| **Domain Routing ISM Factory** | [`0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908`](https://explorer.tangle.tools/address/0x1052eF3419f26Bec74Ed7CEf4a4FA6812Bc09908) | -| **Fallback Routing Hook** | [`0xd21192429df453021e896f2897Dc8B1167DD61E5`](https://explorer.tangle.tools/address/0xd21192429df453021e896f2897Dc8B1167DD61E5) | -| **Interchain Account ISM** | [`0x45285463352c53a481e882cD5E2AF2E25BBdAd0D`](https://explorer.tangle.tools/address/0x45285463352c53a481e882cD5E2AF2E25BBdAd0D) | -| **Interchain Account Router** | [`0x67F36550b73B731e5b2FC44E4F8f250d89c87bD6`](https://explorer.tangle.tools/address/0x67F36550b73B731e5b2FC44E4F8f250d89c87bD6) | -| **Interchain Gas Paymaster** | [`0x9844aFFaBE17c37F791ff99ABa58B0FbB75e22AF`](https://explorer.tangle.tools/address/0x9844aFFaBE17c37F791ff99ABa58B0FbB75e22AF) | -| **Interchain Security Module** | [`0x336306ADB3c510A318107c01D109D2072c7abB6B`](https://explorer.tangle.tools/address/0x336306ADB3c510A318107c01D109D2072c7abB6B) | -| **Mailbox** | [`0x2f2aFaE1139Ce54feFC03593FeE8AB2aDF4a85A7`](https://explorer.tangle.tools/address/0x2f2aFaE1139Ce54feFC03593FeE8AB2aDF4a85A7) | -| **Merkle Tree Hook** | [`0xF5da68b2577EF5C0A0D98aA2a58483a68C2f232a`](https://explorer.tangle.tools/address/0xF5da68b2577EF5C0A0D98aA2a58483a68C2f232a) | -| **Pausable Hook** | [`0x61594D2cA900C44ab51d07776465397FefC643C6`](https://explorer.tangle.tools/address/0x61594D2cA900C44ab51d07776465397FefC643C6) | -| **Pausable ISM** | [`0x5d69BC38eF3eDb491c0b7186BEc4eC45c4013f93`](https://explorer.tangle.tools/address/0x5d69BC38eF3eDb491c0b7186BEc4eC45c4013f93) | -| **Protocol Fee** | [`0x4E55aDA3ef1942049EA43E904EB01F4A0a9c39bd`](https://explorer.tangle.tools/address/0x4E55aDA3ef1942049EA43E904EB01F4A0a9c39bd) | -| **Proxy Admin** | [`0x0761b0827849abbf7b0cC09CE14e1C93D87f5004`](https://explorer.tangle.tools/address/0x0761b0827849abbf7b0cC09CE14e1C93D87f5004) | -| **Static Aggregation Hook Factory** | [`0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6`](https://explorer.tangle.tools/address/0xEb9FcFDC9EfDC17c1EC5E1dc085B98485da213D6) | -| **Static Aggregation ISM** | [`0xB0525d808721426c56377469B92db16857384deF`](https://explorer.tangle.tools/address/0xB0525d808721426c56377469B92db16857384deF) | -| **Static Aggregation ISM Factory** | [`0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A`](https://explorer.tangle.tools/address/0x8F7454AC98228f3504Bb91eA3D8Adafe6406110A) | -| **Static Merkle Root Multisig ISM Factory** | [`0x2C1FAbEcd7bFBdEBF27CcdB67baADB38b6Df90fC`](https://explorer.tangle.tools/address/0x2C1FAbEcd7bFBdEBF27CcdB67baADB38b6Df90fC) | -| **Static Merkle Root Weighted Multisig ISM Factory** | [`0x148CF67B8A242c1360bb2C93fCe203EC4d4f9B56`](https://explorer.tangle.tools/address/0x148CF67B8A242c1360bb2C93fCe203EC4d4f9B56) | -| **Static Message ID Multisig ISM Factory** | [`0x8b83fefd896fAa52057798f6426E9f0B080FCCcE`](https://explorer.tangle.tools/address/0x8b83fefd896fAa52057798f6426E9f0B080FCCcE) | -| **Static Message ID Weighted Multisig ISM Factory** | [`0xcd849e612Aaa138f03698C3Edb42a34117BFF631`](https://explorer.tangle.tools/address/0xcd849e612Aaa138f03698C3Edb42a34117BFF631) | -| **Storage Gas Oracle** | [`0x7b2e996742fA42d223652A344252B725D1bC428C`](https://explorer.tangle.tools/address/0x7b2e996742fA42d223652A344252B725D1bC428C) | -| **Test Recipient** | [`0x2c61Cda929e4e2174cb10cd8e2724A9ceaD62E67`](https://explorer.tangle.tools/address/0x2c61Cda929e4e2174cb10cd8e2724A9ceaD62E67) | -| **Timelock Controller** | [`0x0000000000000000000000000000000000000000`](https://explorer.tangle.tools/address/0x0000000000000000000000000000000000000000) | -| **Validator Announce** | [`0x062200d92dF6bB7bA89Ce4D6800110450f94784e`](https://explorer.tangle.tools/address/0x062200d92dF6bB7bA89Ce4D6800110450f94784e) | - -## Testnet Deployment Contracts - -Below are the addresses for the various contracts deployed on the testnet for Hyperlane. You can view each contract on our [Blockscout Testnet Explorer](https://testnet-explorer.tangle.tools/). - -| Contract Name | Address | -| ---------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- | -| **Domain Routing ISM Factory** | [`0x89dC5328147BA17aF9feb76DbEdb1182916f2438`](https://testnet-explorer.tangle.tools/address/0x89dC5328147BA17aF9feb76DbEdb1182916f2438) | -| **Interchain Account ISM** | [`0xa464A27Db7Dd67651681147b8bb22eFfA2e7FC76`](https://testnet-explorer.tangle.tools/address/0xa464A27Db7Dd67651681147b8bb22eFfA2e7FC76) | -| **Interchain Account Router** | [`0xF26bd3FDF7D84a9A2800fF6e992E7075f5dBA6C0`](https://testnet-explorer.tangle.tools/address/0xF26bd3FDF7D84a9A2800fF6e992E7075f5dBA6C0) | -| **Mailbox** | [`0x0096a17ff0a55D35DfE9D98BEA2104Ff7b830E23`](https://testnet-explorer.tangle.tools/address/0x0096a17ff0a55D35DfE9D98BEA2104Ff7b830E23) | -| **Proxy Admin** | [`0xC40785D391dcC7Cf77ba7C54f0C8cF8F60877B14`](https://testnet-explorer.tangle.tools/address/0xC40785D391dcC7Cf77ba7C54f0C8cF8F60877B14) | -| **Static Aggregation Hook Factory** | [`0xB2A23781c75F06767d8F8BAe382d78f989C492c6`](https://testnet-explorer.tangle.tools/address/0xB2A23781c75F06767d8F8BAe382d78f989C492c6) | -| **Static Aggregation ISM Factory** | [`0x6BB99502D4867aA401E337315D24fdc3f783388D`](https://testnet-explorer.tangle.tools/address/0x6BB99502D4867aA401E337315D24fdc3f783388D) | -| **Static Merkle Root Multisig ISM Factory** | [`0xcFCC8EdE6aBf99EcDE0C818DA7357f7206DE08e9`](https://testnet-explorer.tangle.tools/address/0xcFCC8EdE6aBf99EcDE0C818DA7357f7206DE08e9) | -| **Static Merkle Root Weighted Multisig ISM Factory** | [`0x380d7E7b20E5Df5893a44E2328732fF1a9525818`](https://testnet-explorer.tangle.tools/address/0x380d7E7b20E5Df5893a44E2328732fF1a9525818) | -| **Static Message ID Multisig ISM Factory** | [`0x315480F385d416c0723FbE2858c7b8Dd7b03A9B4`](https://testnet-explorer.tangle.tools/address/0x315480F385d416c0723FbE2858c7b8Dd7b03A9B4) | -| **Static Message ID Weighted Multisig ISM Factory** | [`0x6245cdDe964B65d9ee2a40f802cBd88842205C61`](https://testnet-explorer.tangle.tools/address/0x6245cdDe964B65d9ee2a40f802cBd88842205C61) | -| **Test Recipient** | [`0x384d44f775A5f273d6c8e2A3740A8238598f1557`](https://testnet-explorer.tangle.tools/address/0x384d44f775A5f273d6c8e2A3740A8238598f1557) | -| **Validator Announce** | [`0x24F4d9fF532B05844e6c984107899d944812540B`](https://testnet-explorer.tangle.tools/address/0x24F4d9fF532B05844e6c984107899d944812540B) | diff --git a/pages/resources/resources.mdx b/pages/resources/resources.mdx deleted file mode 100644 index 3a933d18..00000000 --- a/pages/resources/resources.mdx +++ /dev/null @@ -1,6 +0,0 @@ -import NetworkInfo from "../../components/NetworkResources" -import WalletTable from "../../components/WalletTable" - -# Resources and Tools - - diff --git a/pages/resources/router.mdx b/pages/resources/router.mdx deleted file mode 100644 index a1e4d33e..00000000 --- a/pages/resources/router.mdx +++ /dev/null @@ -1,14 +0,0 @@ -# Router Deployments - -Router is a protocol designed to facilitate seamless cross-chain transactions and interoperability, allowing decentralized applications (dApps) to operate across multiple blockchain networks. It provides efficient routing and liquidity management for secure and fast transactions. Explore more on the [Router Nitro App](https://app.routernitro.com/). - -## Mainnet Supported Tokens - -Below are the token addresses supported on the Tangle mainnet for Router. You can view each token on our [Blockscout Mainnet Explorer](https://explorer.tangle.tools/). - -| Token Name | Symbol | Address | -| ----------------- | ------------------------------------------------------------------------------------------------------------------------ | -------------------------------------------------------------------------------------------------------------------------------- | -| **Tether USD** | USDT USDT | [`0xb6dc6c8b71e88642cead3be1025565a9ee74d1c6`](https://explorer.tangle.tools/address/0xb6dc6c8b71e88642cead3be1025565a9ee74d1c6) | -| **USD Coin** | USDC USDC | [`0x97eec1c29f745dc7c267f90292aa663d997a601d`](https://explorer.tangle.tools/address/0x97eec1c29f745dc7c267f90292aa663d997a601d) | -| **Wrapped Ether** | WETH WETH | [`0x01b4ce0d48ce91eb6bcaf5db33870c65d641b894`](https://explorer.tangle.tools/address/0x01b4ce0d48ce91eb6bcaf5db33870c65d641b894) | -| **Avail** | AVAIL AVAIL | [`0xb8a09939F27908505C4241C3c251f3DA33a207A9`](https://explorer.tangle.tools/address/0xb8a09939F27908505C4241C3c251f3DA33a207A9) | diff --git a/pages/resources/sablier.mdx b/pages/resources/sablier.mdx deleted file mode 100644 index 1d2eed60..00000000 --- a/pages/resources/sablier.mdx +++ /dev/null @@ -1,21 +0,0 @@ -# Sablier Deployments - -Sablier is a protocol for real-time finance on EVM blockchains, enabling programmable token streams and vesting. It allows for continuous payments where tokens are streamed over time rather than transferred in one go. - -## Core Contracts - -Below are the core contract addresses for Sablier V2 deployed on the Tangle mainnet. You can view each contract on our [Blockscout Mainnet Explorer](https://explorer.tangle.tools/). - -| Contract | Address | Version | -| --------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | -| **SablierV2LockupDynamic** | [`0x946654AB30Dd6eD10236C89f2C8B2719df653691`](https://explorer.tangle.tools/address/0x946654AB30Dd6eD10236C89f2C8B2719df653691) | [core-v1.2.0](https://github.com/sablier-labs/deployments/tree/main/lockup/v1.2.0/core) | -| **SablierV2LockupLinear** | [`0xAC19F4181E58efb7094e0cb4e1BB18c79F6AAdf4`](https://explorer.tangle.tools/address/0xAC19F4181E58efb7094e0cb4e1BB18c79F6AAdf4) | [core-v1.2.0](https://github.com/sablier-labs/deployments/tree/main/lockup/v1.2.0/core) | -| **SablierV2LockupTranched** | [`0x63B92F7E2f69877184C955E63B9D8Dff55e52e14`](https://explorer.tangle.tools/address/0x63B92F7E2f69877184C955E63B9D8Dff55e52e14) | [core-v1.2.0](https://github.com/sablier-labs/deployments/tree/main/lockup/v1.2.0/core) | -| **SablierV2NFTDescriptor** | [`0xe785101Cb228693cc3EFdCd5d637fEf6A6Ff7259`](https://explorer.tangle.tools/address/0xe785101Cb228693cc3EFdCd5d637fEf6A6Ff7259) | [core-v1.2.0](https://github.com/sablier-labs/deployments/tree/main/lockup/v1.2.0/core) | - -## Periphery Contracts - -| Contract | Address | Version | -| -------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | -| **SablierV2BatchLockup** | [`0x28D116d7e917756310986C4207eA54183fcba06A`](https://explorer.tangle.tools/address/0x28D116d7e917756310986C4207eA54183fcba06A) | [periphery-v1.2.0](https://github.com/sablier-labs/deployments/tree/main/lockup/v1.2.0/periphery) | -| **SablierV2MerkleLockupFactory** | [`0x5e73bb96493C10919204045fCdb639D35ad859f8`](https://explorer.tangle.tools/address/0x5e73bb96493C10919204045fCdb639D35ad859f8) | [periphery-v1.2.0](https://github.com/sablier-labs/deployments/tree/main/lockup/v1.2.0/periphery) | diff --git a/pages/resources/safe.mdx b/pages/resources/safe.mdx deleted file mode 100644 index 32188c77..00000000 --- a/pages/resources/safe.mdx +++ /dev/null @@ -1,14 +0,0 @@ -# Safe Deployments - -Safe (formerly Gnosis Safe) is a smart contract wallet focused on secure management of digital assets. It provides multi-signature functionality and other advanced security features for managing digital assets on EVM-compatible blockchains. Safe enables users to require multiple signatures to execute transactions, enhancing security for digital asset management. - -Safe contracts deployed deployed and a [Safe UI is hosted by Den](https://safe.onchainden.com/welcome?chain=tnt). - -## Contract Deployments - -Below are the key contract deployments for Safe on the Tangle network. You can view each contract on our [Blockscout Explorer](https://explorer.tangle.tools/). - -| Contract Name | Address | -| ----------------------- | -------------------------------------------------------------------------------------------------------------------------------- | -| **Safe Implementation** | [`0xfb1bffC9d739B8D520DaF37dF666da4C687191EA`](https://explorer.tangle.tools/address/0xfb1bffC9d739B8D520DaF37dF666da4C687191EA) | -| **Proxy Factory** | [`0xC22834581EbC8527d974F8a1c97E1bEA4EF910BC`](https://explorer.tangle.tools/address/0xC22834581EbC8527d974F8a1c97E1bEA4EF910BC) | diff --git a/pages/resources/set-identity.mdx b/pages/resources/set-identity.mdx deleted file mode 100644 index 841588d4..00000000 --- a/pages/resources/set-identity.mdx +++ /dev/null @@ -1,65 +0,0 @@ -# On-Chain Identity on the Tangle Network - -## Introduction - -The Tangle Network provides a naming system that allows participants to add personal information to their on-chain account and subsequently ask for verification of this information by registrars. This feature enhances trust and security within the ecosystem by allowing network participants to verify each other's identities through on-chain data. - -## Understanding Identity on the Tangle Network - -### Setting an Identity - -Users must reserve funds in a bond to store their information on-chain: ~1-6 TNT and some TNT per each field beyond the legal name. These funds are locked, not spent - they are returned when the identity is cleared. - -### Judgements - -After a user injects their information on-chain, they can request judgement from a registrar. Users declare a maximum fee that they are willing to pay for judgement, and registrars whose fee is below that amount can provide a judgement. - -Registrars can select up to six levels of confidence in their attestation: - -1. Unknown: The default value, no judgement made yet. -2. Reasonable: The data appears reasonable, but no in-depth checks (e.g., formal KYC process) were performed. -3. Known Good: The registrar has certified that the information is correct (this step involves verification of state-issued identity documents). -4. Out of Date: The information used to be good but is now out of date. -5. Low Quality: The information is low quality or imprecise but can be fixed with an update. -6. Erroneous: The information is erroneous and may indicate malicious intent. - -A seventh state, "fee paid", is for when a user has requested judgement and it is in progress. Information that is in this state or "erroneous" is "sticky" and cannot be modified; it can only be removed by the complete removal of the identity. - -### Registrars - -Registrars can set a fee for their services and limit their attestation to certain fields. For example, a registrar could charge [placeholder fee] TNT to verify one's legal name, email, and GPG key. - -There is currently 1 registrar on the Tangle Network: - -1. Registrar `0`: - - Account: `tgDhkcoQaPqWM9NSKr8WjyRmy2gFCnt1tym4RuUR8SUNEH5vD` - - Fee: 0 TNT - -### Sub-Identities - -Users can also link accounts by setting "sub-accounts", each with its own identity, under a primary account. The system reserves a bond for each sub-account. An example of how you might use this would be a validation company running multiple validators. - -An account can have a maximum of 100 sub-accounts. Note that a deposit of 1 TNT is required for every sub-account. - -## Setting Your On-Chain Identity Using Polkadot.js Apps - -Follow these steps to set your on-chain identity: - -1. Access Polkadot.js Apps by opening your web browser and navigating to Tangle Network on [Polkadot.js Apps](https://polkadot.js.org/apps/?rpc=wss://rpc.tangle.tools#/accounts). -2. Connect your Polkadot.js extension wallet by clicking on the "Accounts" tab in the Polkadot.js Apps interface. -3. Navigate to the "Accounts" page and locate the account you wish to set an identity for. Click on the three dots on the right side to open a dropdown menu and select "Set on-chain identity." -4. Fill in the identity information form with details such as your legal name, email, Twitter handle, website, Riot (Matrix) username, etc. You do not need to fill out all fields, just the ones relevant to your identity. -5. After filling out the form, scroll down and click "Set Identity" to proceed. Your Polkadot.js extension will prompt you to sign the transaction. Review the information and fees, then sign the transaction to confirm your identity setup. - -## Verification (Optional) - -After setting your on-chain identity, you may want to get it verified by a registrar. This step is optional but adds an additional layer of trust to your identity. - -1. Navigate to the "Accounts" page. -2. Next to your account with the pending identity, you'll see a "Judgements" section. Click "Request Judgement." -3. Select a registrar from the list and follow their specific instructions for verification. -4. You can go to the #registrar channel of our Discord to confirm your request for judgement. See our [community tab on Tangle.tools](https://www.tangle.tools/) for links to Discord. - -## Conclusion - -Setting an on-chain identity on the Tangle Network is a valuable way to enhance your visibility and trustworthiness within the ecosystem. By understanding the identity system and following the steps to set and verify your identity using Polkadot.js Apps, you can take advantage of this feature. Remember, the information you provide will be publicly visible on the blockchain, so only include details you're comfortable sharing. diff --git a/pages/resources/useful-contracts.mdx b/pages/resources/useful-contracts.mdx deleted file mode 100644 index 0cc8d002..00000000 --- a/pages/resources/useful-contracts.mdx +++ /dev/null @@ -1,131 +0,0 @@ -# Useful Contracts - -## MultiSend - -This contract allows you to send identical amounts of TNT to multiple addresses in a single transaction. Useful for batching transactions for airdrop distributions. - -Explorer: [0x55E25dF92f6a7384844964a6e2a85fa182f8abfa](https://explorer.tangle.tools/address/0x55E25dF92f6a7384844964a6e2a85fa182f8abfa?tab=txs) - -```solidity -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -contract MultiSend { - constructor() {} - - function multiSend(address[] calldata recipients, uint256[] calldata amounts) external payable { - require(recipients.length == amounts.length, "Arrays must be same length"); - require(recipients.length > 0, "Must provide at least one recipient"); - - uint256 total = 0; - for(uint256 i = 0; i < amounts.length; i++) { - total += amounts[i]; - } - require(msg.value >= total, "Insufficient funds sent"); - - for(uint256 i = 0; i < recipients.length; i++) { - (bool success,) = recipients[i].call{value: amounts[i]}(""); - require(success, "Transfer failed"); - } - - // Return excess ETH if any - uint256 remaining = msg.value - total; - if (remaining > 0) { - (bool success,) = msg.sender.call{value: remaining}(""); - require(success, "Failed to return remaining ETH"); - } - } -} -``` - -## Multicall3 - -Multicall3 has two main use cases: - -1. Aggregate results from multiple contract reads into a single JSON-RPC request. -2. Execute multiple state-changing calls in a single transaction. - -Read more about Multicall3 [here](https://github.com/mds1/multicall3). - -Testnet Explorer: [0xcA11bde05977b3631167028862bE2a173976CA11](https://testnet-explorer.tangle.tools/address/0xca11bde05977b3631167028862be2a173976ca11) - -Mainnet Explorer: [0xcA11bde05977b3631167028862bE2a173976CA11](https://explorer.tangle.tools/address/0xcA11bde05977b3631167028862bE2a173976CA11) - -```typescript -import assert from 'node:assert'; -import { createPublicClient, defineChain, erc20Abi, http } from 'viem'; - -assert(process.env.RPC_URL, 'RPC_URL is not set'); - -const TANGLE_TESTNET = defineChain({ - id: 3799, - name: 'Tangle EVM Testnet', - nativeCurrency: { - name: 'Tangle Native Token', - symbol: 'tTNT', - decimals: 18, - }, - rpcUrls: { - default: { - http: [process.env.RPC_URL], - }, - }, - contracts: { - multicall3: { - address: '0xcA11bde05977b3631167028862bE2a173976CA11', - blockCreated: 776767, - }, - }, -}); - -// Setup the client. -const client = createPublicClient({ - chain: TANGLE_TESTNET, - transport: http(process.env.RPC_URL), -}); - -const ERC20_ADDRESS = '0x87d95f134221D9c2b3dE15aCe58BACe4121c07B0'; - -async function example1() { - // Execute the multicall and get the erc20 metadata (name, symbol, decimals). None of these calls can fail so we set - // `allowFailure` to false. This results in each return value's type matching the type of the - // corresponding call, e.g. `0x${string}` for addresses, `bigint` for uint256, etc. If we set - // `allowFailure` to true then the returns types are of the following shape, using the example of - // the address return type: - // { - // error: Error; - // result?: undefined; - // status: "error"; - // } | { - // error?: undefined; - // result: `0x${string}`; - // status: "success"; - // } - const [name, symbol, decimals] = await client.multicall({ - contracts: [ - { - address: ERC20_ADDRESS, - abi: erc20Abi, - functionName: 'name', - }, - { - address: ERC20_ADDRESS, - abi: erc20Abi, - functionName: 'symbol', - }, - { - address: ERC20_ADDRESS, - abi: erc20Abi, - functionName: 'decimals', - }, - ], - allowFailure: false, - }); - - console.log( - `Token ${name} has a symbol of ${symbol} and ${decimals} decimals at address ${ERC20_ADDRESS} on ${TANGLE_TESTNET.name}`, - ); -} - -example1().catch(console.error); -``` diff --git a/pages/restake/_meta.ts b/pages/restake/_meta.ts deleted file mode 100644 index 17996508..00000000 --- a/pages/restake/_meta.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - "-- introduction": { - type: "separator", - title: "Introduction", - }, - introduction: "Overview", - "staking-intro": "Introduction to Staking", - nominator: "Nominating your TNT", - "restake-concepts": "Core concepts", - "-- restaking": { - type: "separator", - title: "Restaking", - }, - "restake-introduction": "Tangle Restaking", - incentives: "Incentives", - credits: "Tangle Credits", - how_to_restake: "How to Restake on Tangle", - restake_developers: "Developer Docs", - "-- liquid staking": { - type: "separator", - title: "Liquid Staking", - }, - "lst-concepts": "Introduction to Liquid Staking", - "lst-working": "How Liquid Staking Works", - create_a_pool: "Create a Liquid Staking Pool", - join_a_pool: "Join a Liquid Staking Pool", - "lst-rewards": "Incentives", - "lst-assets": "Supported Assets", - lst_developers: "Developer Docs", - "-- liquid restaking": { - type: "separator", - title: "Liquid Restaking", - }, - "lrt-concepts": "Introduction to Liquid Restaking", - "lrt-vs-lst": "Liquid Restaking vs. Liquid Staking", - lrt_developers: "Developer Docs", -}; - -export default meta; diff --git a/pages/restake/create_a_pool/_meta.ts b/pages/restake/create_a_pool/_meta.ts deleted file mode 100644 index b623ce2b..00000000 --- a/pages/restake/create_a_pool/_meta.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - "pool-roles": "Pool Roles", - "benefits-and-risks": "Benefits and Risks", - "lst-pool-create-tangle": "Create using Tangle DApp", - "lst-pool-create": "Create using PolkadotJS", -}; - -export default meta; diff --git a/pages/restake/create_a_pool/benefits-and-risks.mdx b/pages/restake/create_a_pool/benefits-and-risks.mdx deleted file mode 100644 index 77171e07..00000000 --- a/pages/restake/create_a_pool/benefits-and-risks.mdx +++ /dev/null @@ -1,28 +0,0 @@ -## Benefits of Creating a Liquid Staking Pool - -### 1. **Earning Commission** - -- **Benefit:** As the root or operator of the pool, you have the ability to set commission rates on staking rewards. This commission is taken from the pool’s rewards and serves as compensation for managing and maintaining the pool. -- **Impact:** The ability to earn a commission makes creating and managing a pool financially rewarding. Commission earnings grow as the pool attracts more participants. - -### 2. **Building a Community** - -- **Benefit:** A well-managed staking pool can attract a large number of participants. As the pool creator, you can build a strong community around your pool, fostering trust and loyalty. -- **Impact:** A loyal community of participants ensures a steady inflow of assets into the pool, making it sustainable in the long term. This can also enhance the pool’s reputation and attract more participants. - -### 3. **Increased Security and Decentralization** - -- **Benefit:** By creating a liquid staking pool, you contribute to the overall security and decentralization of the Tangle Network. Larger pools with more participants help secure the network by distributing stake across multiple validators. -- **Impact:** A decentralized network is more secure and resilient, benefiting the entire ecosystem. As a pool creator, you play a role in strengthening the Tangle Network’s security and robustness. - -## Risks of Creating and Managing a Pool - -### 1. **Validator Performance Risk** - -- **Risk:** If the pool’s nominator selects poor-performing validators, the pool could lose rewards or face slashing penalties. This could lead to a loss of trust among participants and damage the pool’s reputation. -- **Mitigation:** To minimize this risk, pool operators should conduct thorough research and choose validators with a proven track record of reliability and performance. - -### 2. **Security Risks** - -- **Risk:** Pools can be targeted by malicious actors who seek to exploit vulnerabilities in pool management or validator selection. -- **Mitigation:** Pool operators should implement strong security practices, such as regularly auditing pool operations, ensuring proper governance mechanisms, and closely monitoring participant behavior. diff --git a/pages/restake/create_a_pool/lst-pool-create-tangle.mdx b/pages/restake/create_a_pool/lst-pool-create-tangle.mdx deleted file mode 100644 index ac61f48f..00000000 --- a/pages/restake/create_a_pool/lst-pool-create-tangle.mdx +++ /dev/null @@ -1,29 +0,0 @@ -## How to Create a Liquid Staking Pool Using Tangle DApp - -### Step 1: Access Tangle DApp & Connect Wallet - -- Open [Tangle DApp's Liquid Staking page](https://app.tangle.tools/liquid-staking). -- Connect your wallet to the DApp by clicking on the **Connect Wallet** button on the top right and selecting your preferred wallet provider. -- Connect to the desired Tangle network by selecting the appropriate network from the dropdown list. Note that liquid staking pools are only available on the networks in which they are created. - -![Selecting a Liquid Staking Network](/images/liquid-staking/select-ls-network.png) - -### Step 2: Configure & Create a Pool - -- Once on the liquid staking page, scroll down until you see the **CREATE POOL** button. Click on it to bring up a form where you can configure your new pool. - -![Create Pool Button](/images/liquid-staking/create-pool-tangle/create-pool-btn.png) - -- Choose a brief, descriptive name for your pool so that other users can easily identify it, enter an initial bond amount, and set the pool's roles (Root, Nominator, Bouncer). -- By default, all of the roles will be assigned to the active wallet's address. You can change these addresses to other accounts if needed. -- Some details such as the pool's commission and nominations can be set **after** the pool is created: You'll be able to easily manage it under the **My Pools** tab. -- Once you've entered and verified all the details, click on the **Create Pool** button to initiate the transaction. - -![Pool Configuration Form](/images/liquid-staking/create-pool-tangle/create-pool-form.png) - -### Step 3: Manage Your Pool - -- After you've created your pool, you can view & manage it under the **My Pools** tab. Here, you can set the commission rate, manage nominations, and view the pool's performance. In case that you don't see the pool you've just created right away, try refreshing the page. -- Note that the `MANAGE` button or some of its dropdown options will only be visible **if the active account has the corresponding role**. For example, only the account with the nominator role will be able to see the `Update Nominations` button on the dropdown. If you've set all roles to the same account, you will have all the management options available. - -![My Pools Tab](/images/liquid-staking/my-pools.png) diff --git a/pages/restake/create_a_pool/lst-pool-create.mdx b/pages/restake/create_a_pool/lst-pool-create.mdx deleted file mode 100644 index ac610180..00000000 --- a/pages/restake/create_a_pool/lst-pool-create.mdx +++ /dev/null @@ -1,60 +0,0 @@ -## How to Create a Liquid Staking Pool Using PolkadotJS - -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the desired Tangle network by selecting the appropriate network from the left dropdown list. Note that liquid staking pools are only available on the networks in which they are created. -- In case that the network that you're looking for is not listed on the dropdown list, input its RPC endpoint under the `DEVELOPMENT` → `custom endpoint` input. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Step 2: Check Pool Creation Requirements - -- In order to prevent spam, pool creation may require a minimum bond amount to be deposited. Ensure that you have enough funds in your account to cover this requirement. -- These funds will not be forfeited; they will be bonded to the pool and will be returned to you when the pool is dissolved. - -![PolkadotJS UI: Check Min. Bond](/images/liquid-staking/create-pool-polkadotjs/check-min-create-bond.png) - -### Step 3: Create a Pool - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](/images/extrinsic.png) - -- Under the **lst** section, select **create(...)**. -- Enter the required details such as: - - **Initial deposit amount**: The amount of tokens you are contributing to the pool. This is in lowest unit, so should be in 18 decimal places. - - **Root account address**: The account that will be responsible for managing the pool. - - **Nominator account address**: The account that will nominate validators for the pool. - - **Bouncer account address**: The account responsible for managing participant entries and exits. - - **Pool Name**: Choose a brief and descriptive name for your pool. Does not need to be unique. - -You can use the same account for the root, nominator and bouncer or different accounts. - -![PolkadotJS Create Pool](/images/liquid-staking/create-pool-polkadotjs/create.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee and pool deposit. - -![PolkadotJS Transaction](/images/liquid-staking/create-pool-polkadotjs/sign.png) - -If successful, you should see the following confirmation toast notification: - -![PolkadotJS Pool Created](/images/liquid-staking/create-pool-polkadotjs/inblock.png) - -Let's break down the events. Navigate to the **Network** → **Explorer** tab, and you should see the following events: - -![PolkadotJS Events](/images/liquid-staking/create-pool-polkadotjs/events.png) - -- **lst.Bonded**: Your initial deposit was bonded to the pool. -- **lst.Created**: Pool creation confirmation, along with the pool's unique ID. -- **assets.Issued**: A new asset (LST) was created and issued the staked tokens to the creator. - -### Step 4: Configure Commission and Roles (Optional) - -- Set the commission rate you wish to charge as the pool manager by navigating to the **Developer** → **Extrinsics** tab, and under the **lst** section, selecting **setCommission(...)**. -- Note that this amount is in **perbill** (1/1,000,000) units, so a commission rate of 10% would be entered as `100 000`. Another example, a commission rate of 12.34% would be entered as `123 400`. - -![PolkadotJS Set Commission](/images/liquid-staking/create-pool-polkadotjs/commission.png) diff --git a/pages/restake/create_a_pool/pool-roles.mdx b/pages/restake/create_a_pool/pool-roles.mdx deleted file mode 100644 index b045449c..00000000 --- a/pages/restake/create_a_pool/pool-roles.mdx +++ /dev/null @@ -1,27 +0,0 @@ -## Roles in a Liquid Staking Pool - -There are several key roles involved in creating and maintaining a liquid staking pool: - -### 1. **Root** - -- **Role Description:** The root is the administrator of the pool with full control over its operations. They are responsible for setting and updating pool roles and managing critical operations like commission setup and governance decisions. -- **Benefits:** As the root, you have the ability to control the overall direction of the pool, set commission rates, and earn a portion of the staking rewards through commissions. -- **Risks:** The root must ensure that the pool operates fairly and transparently to maintain the trust of the participants. Mismanagement or excessive fees could drive users away. - -### 2. **Nominator** - -- **Role Description:** The nominator is responsible for selecting validators on behalf of the pool. Their role is critical in optimizing rewards for the pool members by choosing high-performing and secure validators. -- **Benefits:** The nominator can enhance the pool’s performance by choosing the best validators, maximizing rewards for participants. -- **Risks:** Poor validator selection could result in missed rewards or penalties, impacting the overall pool performance and the trust of participants. - -### 3. **Bouncer** - -- **Role Description:** The bouncer is responsible for managing the entry and exit of participants into the pool. They can block or allow participants, as well as manage pool access settings. -- **Benefits:** This role ensures the integrity of the pool by maintaining strict control over its participants. -- **Risks:** If the bouncer mismanages pool access or fails to protect against malicious actors, it can result in pool exploitation or security vulnerabilities. - -### 4. **Depositor** - -- **Role Description:** The depositor is the individual or entity that creates the pool by making an initial deposit. This role is critical because the depositor effectively initiates the pool’s staking process. -- **Benefits:** As the creator of the pool, the depositor can dictate initial terms and pool configurations. They also have the opportunity to collect significant rewards from the pool’s success. -- **Risks:** The depositor’s funds are locked as long as the pool remains operational, limiting liquidity. If the pool fails to attract enough participants or operate effectively, the depositor may face opportunity costs. diff --git a/pages/restake/credits/_meta.ts b/pages/restake/credits/_meta.ts deleted file mode 100644 index 09c936d3..00000000 --- a/pages/restake/credits/_meta.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - index: "Overview", - claiming: "How to Claim Credits", - precompile: "Credits Precompile", -}; - -export default meta; diff --git a/pages/restake/credits/claiming.mdx b/pages/restake/credits/claiming.mdx deleted file mode 100644 index 4485516a..00000000 --- a/pages/restake/credits/claiming.mdx +++ /dev/null @@ -1,82 +0,0 @@ ---- -title: How to Claim Credits -description: Step-by-step guide to claiming your earned credits from the Cloud Credits pallet ---- - -# How to Claim Credits - -This guide walks you through the process of claiming your earned credits from TNT staking through the Cloud Credits pallet. - -## Prerequisites - -- **Active TNT Stake**: You must have TNT staked through Tangle's multi-asset delegation system -- **Minimum Stake**: Your stake must meet the lowest tier threshold to earn credits -- **Off-chain Account**: An identifier linking your claim to off-chain credit management - -## Claiming via Polkadot.js Apps - -### Step 1: Connect to Tangle Network - -1. Go to [Polkadot.js Apps](https://polkadot.js.org/apps) -2. Connect to the Tangle Network endpoint -3. Import or connect your account with TNT stake - -### Step 2: Navigate to Extrinsics - -1. Go to **Developer** → **Extrinsics** -2. Select your account from the dropdown -3. Choose `cloudCredits` from the pallet list -4. Select `claimCredits` from the call dropdown - -### Step 3: Fill Parameters - -**Amount to Claim (`amount_to_claim`):** - -- Enter the number of credits you want to claim -- Must not exceed your accrued amount within the claim window -- Use the RPC query (see below) to check your maximum claimable amount - -**Off-chain Account ID (`offchain_account_id`):** - -- Your identifier for linking to off-chain credit systems -- Maximum length is configurable (check pallet constants) -- Keep this consistent across claims - -![PolkadotJS Claim](/images/claim.png) - -### Step 4: Submit Transaction - -1. Click **Submit Transaction** -2. Enter your password if prompted -3. Confirm the transaction -4. Monitor the transaction status in the **Network** → **Explorer** - -## Alternative: Asset-Specific Claims - -For claiming credits from specific asset stakes: - -1. Choose `claimCreditsWithAsset` instead of `claimCredits` -2. Add the **Asset ID** parameter for the specific asset -3. The same amount and off-chain account ID rules apply - -![PolkadotJS Claim](/images/claim-assets.png) - -## Events and Monitoring - -### CreditsClaimed Event - -When you successfully claim: - -``` -CreditsClaimed { - who: Your_Account_ID, - amount_claimed: Claimed_Amount, - offchain_account_id: Your_Offchain_ID -} -``` - -## Related Documentation - -- [Credits Overview](/restake/credits) - Understanding the Cloud Credits pallet -- [Credits Precompile](/restake/credits/precompile) - Technical documentation for developers -- [How to Restake](/restake/how_to_restake) - Guide to staking TNT tokens diff --git a/pages/restake/credits/index.mdx b/pages/restake/credits/index.mdx deleted file mode 100644 index 18e6aa24..00000000 --- a/pages/restake/credits/index.mdx +++ /dev/null @@ -1,39 +0,0 @@ ---- -title: Credits -description: Earn free usage credits for AI tools by staking TNT tokens on Tangle. ---- - -# Credits - -## What Are Credits? - -Credits are a way for re-stakers on Tangle to earn credits on [ai.tangle.tools](https://ai.tangle.tools). When you stake TNT tokens, you automatically earn credits that can be used for AI services like text generation, image creation, and other AI tools. - -## How to Earn Credits - -Stake TNT/LstTNT tokens on Tangle and you'll automatically start earning credits. The more TNT you stake, the more credits you earn over time. You don't need to do anything special - just stake your tokens and credits accumulate automatically. - -## How to Use Your Credits - -1. Stake TNT tokens on Tangle -2. Visit [ai.tangle.tools](https://ai.tangle.tools) -3. Claim your accumulated credits - -### Earning Mechanism - -Credits accumulate based on your staked TNT amount. Higher stake amounts earn credits at a higher rate. - -### Credit Expiry - -Credits have an expiry period to encourage regular usage rather than hoarding. On Tangle Mainnet, you can accumulate credits for up to one week from your last claim. After one week, your accumulated credits reset to zero and you start earning fresh credits again. - -This system encourages users to actively claim and use their credits rather than letting them pile up indefinitely. - -### Claiming Process - -To claim credits, you submit a transaction with your GitHub account as the off-chain ID. The system calculates how many credits you've earned based on your stake, verifies this amount, and emits an event that credits the specified amount to your GitHub account on ai.tangle.tools. - -## Next Steps - -- **[How to Claim Credits](/restake/credits/claiming)** - Learn how to claim your earned credits -- **[Credits Precompile](/restake/credits/precompile)** - Technical documentation for developers working with the credits system diff --git a/pages/restake/credits/precompile.mdx b/pages/restake/credits/precompile.mdx deleted file mode 100644 index 97fe0cf8..00000000 --- a/pages/restake/credits/precompile.mdx +++ /dev/null @@ -1,99 +0,0 @@ ---- -title: Credits Precompile -description: Solidity interface for interacting with the Tangle Credits system ---- - -# Credits Precompile - -The Credits precompile provides an Ethereum-compatible interface for interacting with the Tangle Credits system. This allows smart contracts and dApps to manage credits programmatically. - -## Contract Address - -- **Mainnet & Testnet**: `0x0000000000000000000000000000000000000825` - -## Interface - -```solidity -// SPDX-License-Identifier: GPL-3.0-only -pragma solidity >=0.8.3; - -/// @dev The Credits contract's address. -address constant CREDITS = 0x0000000000000000000000000000000000000825; - -/// @dev The Credits contract's instance. -Credits constant CREDITS_CONTRACT = Credits(CREDITS); - -/// @author The Tangle Team -/// @title Credits Pallet Interface -/// @notice Interface for interacting with the Tangle Credits system -/// @custom:address 0x0000000000000000000000000000000000000825 -interface Credits { - /// @dev Burn TNT tokens to generate credits - /// @param amount The amount of TNT to burn - /// @return Success status (0 for success) - function burn(uint256 amount) external returns (uint8); - - /// @dev Claim accumulated credits - /// @param amount The amount of credits to claim - /// @param offchainAccountId The off-chain account identifier - /// @return Success status (0 for success) - function claimCredits( - uint256 amount, - bytes calldata offchainAccountId - ) external returns (uint8); - - /// @dev Get the current credit emission rate for a staked amount - /// @param stakedAmount The amount of TNT staked - /// @return The credits earned per block - function getCurrentRate(uint256 stakedAmount) external view returns (uint256); - - /// @dev Calculate accrued credits for an account - /// @param account The account to check - /// @return The amount of claimable credits - function calculateAccruedCredits(address account) external view returns (uint256); - - /// @dev Get the current stake tier configuration - /// @return thresholds Array of stake thresholds - /// @return rates Array of emission rates per block - function getStakeTiers() external view returns ( - uint256[] memory thresholds, - uint256[] memory rates - ); - - /// @dev Get stake tier for a specific asset - /// @param assetId The asset identifier - /// @return thresholds Array of stake thresholds - /// @return rates Array of emission rates per block - function getAssetStakeTiers(uint256 assetId) external view returns ( - uint256[] memory thresholds, - uint256[] memory rates - ); - - /// @dev Events - event CreditsGrantedFromBurn(address indexed account, uint256 burned, uint256 credits); - event CreditsClaimed(address indexed account, uint256 amount, bytes offchainAccountId); -} -``` - -### Claiming Credits - -```solidity -contract CreditClaimer { - Credits constant credits = Credits(0x0000000000000000000000000000000000000825); - - function claimMyCredits(uint256 amount, string memory accountId) external { - // Convert string to bytes for off-chain account ID - bytes memory offchainId = bytes(accountId); - - // Claim the credits - uint8 result = credits.claimCredits(amount, offchainId); - require(result == 0, "Claim failed"); - } -} -``` - -## Related Documentation - -- [Credits Overview](/restake/credits/overview) -- [Claiming Credits](/restake/credits/claiming) -- [Multi-Asset Delegation](/developers/precompiles/features/multi-asset-delegation) diff --git a/pages/restake/how_to_restake/_meta.ts b/pages/restake/how_to_restake/_meta.ts deleted file mode 100644 index e9111314..00000000 --- a/pages/restake/how_to_restake/_meta.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - how_to_restake_tangle: "How to Restake: Tangle DApp", - how_to_restake_polkadotjs: "How to Restake: PolkadotJS", -}; - -export default meta; diff --git a/pages/restake/how_to_restake/how_to_restake_polkadotjs/delegate.mdx b/pages/restake/how_to_restake/how_to_restake_polkadotjs/delegate.mdx deleted file mode 100644 index ec4613ab..00000000 --- a/pages/restake/how_to_restake/how_to_restake_polkadotjs/delegate.mdx +++ /dev/null @@ -1,39 +0,0 @@ -import Callout from "/components/Callout"; - -## Delegate Using PolkadotJS - - -You should have deposited your tokens to the multiasset delegation vault before you can delegate. See the [Deposit Using PolkadotJS page](./deposit.mdx) for more information. - - -Delegators are similar to stakers in a consensus system like nominated proof of stake (NPoS), but they delegate their tokens to an operator, and participate in the rewards and risks similar to staking on a validator. - -To delegate, you need to call the `delegate` function. This function allocates assets to the chosen operator. - -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Step 2: Delegate - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](/images/extrinsic.png) - -- Under the **multiAssetDelegation** section, select **delegate(...)** and enter the operator's address, asset ID, and the amount of assets to delegate. - -![PolkadotJS Delegate](/images/restake/delegate/delegate.png) - -- Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. - -![PolkadotJS Delegate](/images/restake/delegate/delegatesign.png) - -- If successful, you should see the following confirmation toast notification: - -![PolkadotJS Delegate Success](/images/restake/delegate/delegateinblock.png) diff --git a/pages/restake/how_to_restake/how_to_restake_polkadotjs/deposit.mdx b/pages/restake/how_to_restake/how_to_restake_polkadotjs/deposit.mdx deleted file mode 100644 index b7d62bba..00000000 --- a/pages/restake/how_to_restake/how_to_restake_polkadotjs/deposit.mdx +++ /dev/null @@ -1,33 +0,0 @@ -## Deposit Using PolkadotJS - -Depositing is the process of allocating assets to the multiasset delegation vault. Deposits are required to participate in restaking (delegate). - -Users can deposit LST assets to the multiasset delegation vault. - -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Step 2: Deposit Assets - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](/images/extrinsic.png) - -- Under the **multiAssetDelegation** section, select **deposit(...)**, enter the asset ID and the amount of assets to deposit. - -![PolkadotJS Deposit](/images/restake/deposit/deposit.png) - -- Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. - -![PolkadotJS Transaction](/images/restake/deposit/depositsign.png) - -- If successful, you should see the following confirmation toast notification: - -![PolkadotJS Deposit Success](/images/restake/deposit/depositinblock.png) diff --git a/pages/restake/how_to_restake/how_to_restake_polkadotjs/unstake.mdx b/pages/restake/how_to_restake/how_to_restake_polkadotjs/unstake.mdx deleted file mode 100644 index 10865564..00000000 --- a/pages/restake/how_to_restake/how_to_restake_polkadotjs/unstake.mdx +++ /dev/null @@ -1,89 +0,0 @@ -## Unstake Using PolkadotJS - -The first step to exit restake is to unstake your tokens. This is done by calling the `unstake` function, which releases the locked assets and returns them to the deposit vault. - -Then, you can withdraw your assets from the deposit vault. See the [Withdraw page](./withdraw.mdx) for more information. - -Unstaking is a two step process: - -1. Call the `schedule_unstake` function to schedule the unstake request. -2. Call the `execute_unstake` function to execute the unstake request and release the funds after the unstake period has elapsed. - -### Schedule Unstake - -To unstake, you need to call the `schedule_unstake` function. This function schedules the unstake. - -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Step 2: Schedule Unstake - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](/images/extrinsic.png) - -- Under the **MultiAssetDelegation** section, select **ScheduleUnstake** and enter the amount of assets to unstake. - -![PolkadotJS Schedule Unstake](/images/restake/delegate/scheduleunstake.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. - -### Cancel Delegator Unstake - -To cancel the unstake, you need to call the `cancel_delegator_unstake` function. This function cancels the unstake. - -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Step 2: Cancel Delegator Unstake - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](/images/extrinsic.png) - -- Under the **MultiAssetDelegation** section, select **CancelDelegatorUnstake** and enter the amount of assets to unstake. - -![PolkadotJS Cancel Delegator Unstake](/images/restake/delegate/canceldelegatorunstake.png) - -Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. - -### Execute Delegator Unstake - -To execute the unstake, you need to call the `execute_unstake` function. This function executes the unstake. This function can only be called after the unstake period. - -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Step 2: Execute Delegator Unstake - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](/images/extrinsic.png) - -- Under the **multiAssetDelegation** section, select **executeDelegatorUnstake()**. -- Notice that there aren't any inputs for this function. This is because calling the function will execute **all** pending unstake requests that have reached their maturity. - -![PolkadotJS Execute Delegator Unstake](/images/restake/delegate/executedelegatorUnstake.png) - -- Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. diff --git a/pages/restake/how_to_restake/how_to_restake_polkadotjs/withdraw.mdx b/pages/restake/how_to_restake/how_to_restake_polkadotjs/withdraw.mdx deleted file mode 100644 index 30f22b43..00000000 --- a/pages/restake/how_to_restake/how_to_restake_polkadotjs/withdraw.mdx +++ /dev/null @@ -1,53 +0,0 @@ -import Callout from "/components/Callout"; - -## Withdraw Using PolkadotJS - - -You can only withdraw your "undelegated" (unstaked) assets. Make sure you have undelegated your assets before withdrawing. See the [Unstake page](./unstake.mdx) for more information. - - -Withdrawing is the process of releasing assets from the multiasset delegation vault. - -Similar to unstaking, it is composed of two steps: - -1. A withdrawal request is **scheduled**, which will be available for execution after its unstake period. -2. After the unstake period, you can **execute** the withdrawal to actually release the assets. - -### Accessing the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Schedule Withdraw - -The first step to complete a withdraw is to schedule a withdrawal. This is done by calling the `schedule_withdraw` function. - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](/images/extrinsic.png) - -- Under the **multiAssetDelegation** section, select **scheduleWithdraw(...)** and enter the asset ID along with the amount of assets to withdraw. - -![PolkadotJS ScheduleWithdraw](/images/restake/how-to-restake-polkadotjs/schedulewithdraw.png) - -- Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. - -### Execute Withdraw - -The second step to complete a withdraw is to execute the withdrawal. This is done by calling the `execute_withdraw` function. - -- Navigate to the **Developer** → **Extrinsics** tab on PolkadotJS. - -![PolkadotJS Extrinsics](/images/extrinsic.png) - -- Under the **multiAssetDelegation** section, select **executeWithdraw()**. -- Notice that there aren't any inputs for this function. This is because calling the function will execute **all** pending withdraw requests that have reached their maturity. - -![PolkadotJS ExecuteWithdraw](/images/restake/how-to-restake-polkadotjs/executewithdraw.png) - -- Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. diff --git a/pages/restake/how_to_restake/how_to_restake_tangle/_meta.ts b/pages/restake/how_to_restake/how_to_restake_tangle/_meta.ts deleted file mode 100644 index 436cb6f2..00000000 --- a/pages/restake/how_to_restake/how_to_restake_tangle/_meta.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - deposit: "Deposit", - delegate: "Delegate", - unstake: "Unstake", - withdraw: "Withdraw", -}; - -export default meta; diff --git a/pages/restake/incentives/configs.mdx b/pages/restake/incentives/configs.mdx deleted file mode 100644 index a387bf87..00000000 --- a/pages/restake/incentives/configs.mdx +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: Restaking Incentives -description: Learn about the restaking system in the Tangle Network, which encourages operators to execute multi-party computation (MPC) service roles, and understand the reward mechanisms and benefits for operators and delegators. ---- - -# Restaking Rewards Configs - -## Setting Incentive APY and Cap - -The `set_incentive_apy_and_cap` function allows the network (democracy) to configure the APY and cap for a specific asset Vault, ensuring that stakers know the potential rewards and the upper limits of participation for rewards. - -- **Function:** `set_incentive_apy_and_cap` -- **Description:** Sets the APY (Annual Percentage Yield) and cap for a specific asset Vault. -- **Inputs:** - - `Vault_id`: The ID of the Vault for which the APY and cap are being set. - - `apy`: The APY (percent) to be applied to the Vault. - - `cap`: The maximum amount of staked assets eligible for rewards in this Vault. - -Once set, the APY and cap are used to calculate the total rewards distributed among operators and delegators based on their staked amounts. - -## Whitelisting Blueprints for Rewards - -The network allows developers to create blueprints for restaking services. These blueprints can be whitelisted for rewards, meaning services built using these blueprints can attract incentivized staking. - -- **Function:** `whitelist_blueprint_for_rewards` -- **Description:** Whitelists a blueprint, allowing services using this blueprint to be eligible for reward distribution. diff --git a/pages/restake/incentives/how_rewards_work.mdx b/pages/restake/incentives/how_rewards_work.mdx deleted file mode 100644 index e892213c..00000000 --- a/pages/restake/incentives/how_rewards_work.mdx +++ /dev/null @@ -1,39 +0,0 @@ ---- -title: Restaking Incentives -description: Learn about the restaking system in the Tangle Network, which encourages operators to execute multi-party computation (MPC) service roles, and understand the reward mechanisms and benefits for operators and delegators. ---- - -# Restaking Rewards Overview - -Restaking rewards in the Tangle Network are designed to incentivize both operators and delegators through a structured system of rewards, which are distributed based on staked assets and specific reward configurations. Below is a detailed explanation of how the restaking rewards mechanism works, including setting APY and caps, distributing rewards, and managing assets within reward Vaults. - -## Key Concepts - -1. **APY (Annual Percentage Yield):** A percentage that determines the rewards distributed to stakers based on their contribution to the Vault. -2. **Cap:** The maximum amount of staked assets that can earn rewards within a given Vault. -3. **Reward Vaults:** Asset Vaults to which restaked tokens are assigned for reward distribution. -4. **Delegators:** Users who delegate their tokens to operators, sharing in the rewards generated by the operators' activities. -5. **Operators:** Validators who restake their tokens and provide services, earning rewards through their participation in the restaking protocol. -6. **Lock Duration Multipliers:** Enhanced rewards for longer lock periods (1.1x to 1.6x multipliers). -7. **Service-Specific Rewards:** Rewards tied to specific service blueprints and instances. - -## Whitelisting Blueprints for Rewards - -The network allows developers to create blueprints for restaking services. These blueprints can be whitelisted for rewards, meaning services built using these blueprints can attract incentivized staking. - -- **Function:** `whitelist_blueprint_for_rewards` -- **Description:** Whitelists a blueprint, allowing services using this blueprint to be eligible for reward distribution. - -## Distributing Rewards - -Rewards are distributed to delegators based on the total amount staked in a reward Vault and the Vault's APY and cap configuration. The `distribute_rewards` function handles the distribution process, ensuring that all delegators receive their fair share of rewards based on the assets they have staked. - -### Reward Calculation - -- **Total Reward Calculation:** The total reward is calculated by multiplying the APY by the total staked amount, capped at the Vault's limit. -- **Individual Reward Calculation:** Each delegator's reward is calculated as a percentage of the total reward based on the delegator’s stake in relation to the cap. - -### Example: - -- If a Vault has a cap of 1000 tokens and an APY of 10%, the total reward distributed will be 100 tokens (10% of 1000). -- If a delegator staked 100 tokens, they will receive 10 tokens as a reward (10% of the total reward). diff --git a/pages/restake/incentives/vaults.mdx b/pages/restake/incentives/vaults.mdx deleted file mode 100644 index a58aaaa1..00000000 --- a/pages/restake/incentives/vaults.mdx +++ /dev/null @@ -1,49 +0,0 @@ ---- -title: Restaking Incentives -description: Learn about the restaking system in the Tangle Network, which encourages operators to execute multi-party computation (MPC) service roles, and understand the reward mechanisms and benefits for operators and delegators. ---- - -# What are Vaults? - -Vaults are used to store and whitelist restaked tokens in the Tangle Network. They are used to distribute rewards to operators and delegators. - -## Whitelisting Blueprints for Rewards - -The network allows developers to create blueprints for restaking services. These blueprints can be whitelisted for rewards, meaning services built using these blueprints can attract incentivized staking. - -- **Function:** `whitelist_blueprint_for_rewards` -- **Description:** Whitelists a blueprint, allowing services using this blueprint to be eligible for reward distribution. - -### Reward Calculation - -- **Total Reward Calculation:** The total reward is calculated by multiplying the APY by the total staked amount, capped at the Vault's limit. -- **Individual Reward Calculation:** Each delegator's reward is calculated as a percentage of the total reward based on the delegator’s stake in relation to the cap. - -### Example: - -- If a Vault has a cap of 1000 tokens and an APY of 10%, the total reward distributed will be 100 tokens (10% of 1000). -- If a delegator staked 100 tokens, they will receive 10 tokens as a reward (10% of the total reward). - -## Managing Assets in Vaults - -Assets can be added or removed from reward Vaults using the following functions: - -### Adding an Asset to a Vault - -- **Function:** `add_asset_to_Vault` -- **Description:** Adds a new asset to an existing reward Vault. -- **Inputs:** - - `Vault_id`: The ID of the Vault to which the asset is being added. - - `asset_id`: The ID of the asset being added to the Vault. - -This function ensures that the asset is not already associated with a Vault before adding it. - -### Removing an Asset from a Vault - -- **Function:** `remove_asset_from_Vault` -- **Description:** Removes an asset from a reward Vault. -- **Inputs:** - - `Vault_id`: The ID of the Vault from which the asset is being removed. - - `asset_id`: The ID of the asset being removed. - -This function ensures that the asset is part of the specified Vault before removing it. diff --git a/pages/restake/introduction.mdx b/pages/restake/introduction.mdx deleted file mode 100644 index 40f76410..00000000 --- a/pages/restake/introduction.mdx +++ /dev/null @@ -1,53 +0,0 @@ -import CardGrid from "../../components/CardGrid"; - -# Staking & Restaking - -Tangle's core staking infrastructure is composed of three major pieces. The first is the base nominator proof of stake (NPos) mechanism for validator selection. The second is a native liquid staking protocol for validator specific staking operations and liquid staked tokens (LSTs). The third is the shared security restaking infrastructure for Tangle Blueprints that leverages any asset, especially the LSTs. - - - -## Nominated Staking - -The NPoS system is a validator selection and reward system. Users stake TNT on validators to select the validators that will secure the base layer and produce and finalize blocks. Validators are expected to adhere to the rules of the system's consensus protocol and are subject to slashing if they fail to provide validation operations. Stakers earn rewards proportional to their nominated stake on a validator. - -The NPoS staking system also determines one's eligibility for membership into Tangle's operator set under the restaking infrastructure. An operator must be a Tangle validator with some minimum nominated stake in the base staking system in order to be a valid operator in the restaking system and to be eligible for Blueprint registrations and service requests. - -## Restaking - -Tangle provides permissionless and asset-configurable restaking for Blueprints. Any asset created on and bridged to Tangle can be used as collateral to stake on operators. These restaked assets, commonly in the form of LSTs, act as security collateral for service instances that are requested on-demand. The restaking providers (the restakers) earn rewards proportional to the rewards issued to the services and Blueprints on Tangle, depending on the usage and utility of the services themselves. - -The restaking infrastructure divides assets into pools, which can be created to represent a single asset or a basket of similarly valued assets. Pools of assets are used to secure Tangle Blueprint service instances and are rewarded collectively as pools. This is beneficial when integrating many liquid staked tokens of a single protocol, such as validator-specific liquid staking protocols, or when bundling lots of different LSTs of a single ecosystem such as a basket of ETH LSTs. - -Users deposit assets into the restaking infrastructure by depositing into a pool. The user then stakes (similarly delegates) their asset on an operator who will leverage these assets to provide shared security to their service providing operations. If a validator misbehaves or fails to provide a service as outlined by their Blueprint specification, the user's assets will be liable to be slashed. - -## Liquid Staking - -Tangle includes a variety of liquid staking protocol implementations for partner projects and blockchain ecosystems, providing the restaking infrastructure with unique liquid staking tokens to be used in securing new services. The tokens Tangle takes an active part in developing and leveraging bear the prefix `tg...`. The `tg` LST protocols are stake operation specific liquid staking protocols. By stake operation, we mean a unique staking action that exists separate to another, such as staking on Validator A versus Validator B or staking on Vault A or Vault B for an arbitrary staking protocol. These examples would create `tgXYZ_A` and `tgXYZ_B` liquid staked tokens which are not fungible with respect to one another. - -In doing so, Tangle's LST protocol creates a plethora of new LSTs and a well-defined pool in the restaking infrastructure. Validators, node operators, and vaults of various protocols have unique assets to represent their operations, and Tangle's liquid staking tokens provides these communities with additional product opportunities for leveraging those assets and actions. - -## Liquid Restaking - -Liquid Restaking Tokens (LRTs) are used to restake any assets into the restaking infrastructure. -LRT in the nutshell is a way where you take any asset (usually an already staked asset, like stETH) and stake it again by depositing it into a the Tangle Restaking infrastructure and receive extra rewards for doing so. diff --git a/pages/restake/join_a_pool/_meta.ts b/pages/restake/join_a_pool/_meta.ts deleted file mode 100644 index ad3cf895..00000000 --- a/pages/restake/join_a_pool/_meta.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - tangle: "Join using Tangle DApp", - polkadotjs: "Join using PolkadotJS", -}; - -export default meta; diff --git a/pages/restake/join_a_pool/polkadotjs.mdx b/pages/restake/join_a_pool/polkadotjs.mdx deleted file mode 100644 index eb531df2..00000000 --- a/pages/restake/join_a_pool/polkadotjs.mdx +++ /dev/null @@ -1,52 +0,0 @@ -## How to Join a Liquid Staking Pool Using PolkadotJS - -### Step 1: Access the PolkadotJS Interface - -- Open [PolkadotJS Apps](https://polkadot.js.org/apps/). -- Connect to the Tangle Network by selecting the appropriate network from the dropdown list. - -For convenience, here are the PolkadotJS direct links for Tangle Testnet and Mainnet: - -- Tangle Testnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Ftestnet-rpc.tangle.tools#/explorer -- Tangle Mainnet: https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.tangle.tools#/explorer - -### Step 2: Navigate to LST Tab - -- Click on the **Developer** → **Extrinsics** tab from the top menu. - -![PolkadotJS Extrinsics](/images/extrinsic.png) - -- Select the **lst** section where you will see the `join` option. - -![PolkadotJS Join](/images/liquid-staking/join-pool-polkadotjs/join.png) - -### Step 3: Choose a Pool - -- Review the list of available staking pools, including details such as performance, commission rates, and validator selections. -- Select a pool that suits your preferences based on performance, operator reputation, and commission fees. - -You can view all pools by clicking on the **Developer** → **Chain state** tab and navigating to the **lst** section and selecting **bondedPools**. - -![PolkadotJS Bonded Pools](/images/liquid-staking/join-pool-polkadotjs/bondedpools.png) - -### Step 4: Join the Pool - -- Enter the amount of tokens you wish to stake in the pool and the pool ID. - -![PolkadotJS Join Pool](/images/liquid-staking/join-pool-polkadotjs/lst-join.png) - -- Confirm the transaction by signing it with your wallet, and your tokens will be bonded to the pool. - -![PolkadotJS Sign](/images/liquid-staking/join-pool-polkadotjs/sign.png) - -### Step 5: Confirmation - -- If successful, you should see the following confirmation toast notification: - -![PolkadotJS Pool Joined](/images/liquid-staking/join-pool-polkadotjs/success.png) - -- You can navigate to the **Network** tab to view the events. - -![PolkadotJS Events](/images/liquid-staking/join-pool-polkadotjs/events.png) - -- Finally, you should have the LST tokens in your account. diff --git a/pages/restake/join_a_pool/tangle.mdx b/pages/restake/join_a_pool/tangle.mdx deleted file mode 100644 index 6bcb7fe6..00000000 --- a/pages/restake/join_a_pool/tangle.mdx +++ /dev/null @@ -1,32 +0,0 @@ -## How to Join a Liquid Staking Pool Using Tangle DApp - -### Step 1: Access Tangle DApp & Connect Wallet - -- Open [Tangle DApp's Liquid Staking page](https://app.tangle.tools/liquid-staking). -- Connect your wallet to the DApp by clicking on the **Connect Wallet** button on the top right and selecting your preferred wallet provider. -- Connect to the desired Tangle network by selecting the appropriate network from the dropdown list. Note that liquid staking pools are only available on the networks in which they are created--If you don't see a specific pool that you're looking for, make sure you're connected to the correct network. - -![Selecting a Liquid Staking Network](/images/liquid-staking/select-ls-network.png) - -### Step 2: Enter a Deposit Amount & Select a Pool - -- Enter the amount of tokens you wish to deposit into the pool. You can view your available balance next to the wallet icon. - -![Entering Deposit Amount](/images/liquid-staking/input-amount-select-tab.png) - -- Select a pool by clicking on the **All Pools** tab, clicking on a protocol to expand its list of pools. -- The table also lists key details such as APY and Total Value Locked (TVL) for each pool. These metrics can help you evaluate the pool's performance and popularity. - -### Step 3: Join Pool & Stake - -- The lower input field displays the total amount of the selected pool's tokens that you'll receive from your deposit. This amount is calculated using the exchange rate and also includes the liquid staking fee, if applicable. -- Once you've double-checked all the transaction details, click on the **Join Pool & Stake** button to deposit your tokens into the selected pool, and proceed to confirm the transaction with your wallet. - -![Select a Pool and Click Stake](/images/liquid-staking/select-pool-and-click-stake.png) - -### Step 4: Monitor Your Stake - -- Use the **My Pools** tab to manage & monitor the pools and stake in which you're participating. Here, you can view your staked amount, rewards, and the pool's performance. -- You can also increase your stake and unstake a portion of your tokens. - -![My Pools Tab](/images/liquid-staking/my-pools.png) diff --git a/pages/restake/lrt-concepts.mdx b/pages/restake/lrt-concepts.mdx deleted file mode 100644 index 477acf38..00000000 --- a/pages/restake/lrt-concepts.mdx +++ /dev/null @@ -1,53 +0,0 @@ -# Liquid Restaking on Tangle Network - -## Introduction - -Liquid restaking allows users to participate in restaking while simultaneously maintaining liquidity of their assets. -This mechanism is implemented through smart contracts that define their own liquid restaking mechanics while proxying calls to the underlying staking system. This allows users to stake their tokens, earn rewards, and maintain liquidity without being subject to traditional staking lock-up periods. - -## Core Concepts - -### Delegation Vaults - -Delegation vaults are the backbone of liquid restaking on Tangle Network. These vaults: - -- Allow users to deposit tokens which are then delegated to Tangle operators running blueprint services -- Issue shares (liquid tokens) representing the user's deposited assets -- Handle the complexities of managing deposits, unstaking, and withdrawals -- Rewards distribution and claiming. - -### Liquid Tokens - -When users deposit assets into a liquid restaking vault, they receive shares in return. These shares: - -- Represent ownership of the underlying staked assets -- Can be transferred or traded while the underlying assets remain staked -- Serve as a claim ticket for both the original deposit and earned rewards -- Do not automatically increase in value like traditional LSTs but instead provide access to claim rewards - -### Multi-step Withdrawal Process - -The withdrawal process in liquid restaking involves multiple steps: - -1. **Schedule Unstake**: User initiates the unstaking process from the operator -2. **Execute Unstake**: The unstaking request is processed after the unbonding period -3. **Schedule Withdrawal**: User requests to withdraw their assets from the vault -4. **Execute Withdrawal**: The final withdrawal is processed, returning assets to the user - -This process ensures orderly exits while maintaining the security of the network. - -### Rewards Distribution - -Liquid restaking vaults use an accumulator-based system for tracking and distributing rewards: - -- Each reward token has a global accumulator tracking rewards-per-share over time -- User positions are tracked by snapshots recording their share balance and last claim point -- Rewards are calculated based on the difference between current and last-seen accumulator values -- This system ensures fair distribution regardless of when users entered the pool or how many shares they hold - -### Operator Delegation - -Vaults will automatically delegate deposited assets to selected Tangle operators who: - -- Run blueprint services on the network -- Generate rewards through their participation diff --git a/pages/restake/lrt-vs-lst.mdx b/pages/restake/lrt-vs-lst.mdx deleted file mode 100644 index 8699544b..00000000 --- a/pages/restake/lrt-vs-lst.mdx +++ /dev/null @@ -1,8 +0,0 @@ -## Liquid Restaking Tokens (LRT) vs Liquid Staking Tokens (LST) - -| Feature | LRT (Tangle Network) | LST (Traditional) | -| ---------------- | -------------------------------------------- | ------------------------------------ | -| Primary Function | Enables restaking across multiple Blueprints | Provides liquidity for staked assets | -| Risk Profile | Shared security across services | Single network exposure | -| Reward Sources | Multiple Services rewards + staking yields | Base chain staking rewards | -| Use Cases | Cross-service validation, Blueprint creation | Trading, lending, DeFi integrations | diff --git a/pages/restake/lrt_developers/_meta.ts b/pages/restake/lrt_developers/_meta.ts deleted file mode 100644 index be73dbf5..00000000 --- a/pages/restake/lrt_developers/_meta.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - intro: "Introduction", - "lrt-vault": "Creating a LRT on Tangle", -}; - -export default meta; diff --git a/pages/restake/lrt_developers/intro.mdx b/pages/restake/lrt_developers/intro.mdx deleted file mode 100644 index f9ccc994..00000000 --- a/pages/restake/lrt_developers/intro.mdx +++ /dev/null @@ -1,11 +0,0 @@ -# Building LRT on Tangle Network - -Tangle Multi-Asset Delegations (MADs) pallet precompile provides information about the current state of the Tangle network restaking functionality. -A precompiled contract is native Substrate code that has an Ethereum-style address and can be called using the Ethereum API, like any other smart contract. -The precompiles allow you to call the Substrate runtime directly which is not normally accessible from the Ethereum side of Tangle. - -### How to use the precompile - -We have a detailed page about the [Multi-Asset Delegation Precompile](../../developers/precompiles/features/multi-asset-delegation.mdx). - -In the next page, we will have a tutorial about how to use this precompile to build a LRT on Tangle. diff --git a/pages/restake/lrt_developers/lrt-vault.mdx b/pages/restake/lrt_developers/lrt-vault.mdx deleted file mode 100644 index d9c650c1..00000000 --- a/pages/restake/lrt_developers/lrt-vault.mdx +++ /dev/null @@ -1,230 +0,0 @@ -import GithubFileReaderDisplay from "../../../components/GithubFileReaderDisplay"; - -# Creating a LRT Vault on Tangle - -## Overview - -This tutorial walks through creating a Liquid Restaking Token (LRT) Vault on Tangle Network using the reference implementation from the [tangle-lrt](https://github.com/tangle-network/lrt) repository. LRT vaults allow users to receive liquid tokens representing their staked assets while participating in Tangle's restaking mechanism. - -## Prerequisites - -- Basic knowledge of Solidity and EVM development -- [Foundry](https://book.getfoundry.sh/) installed for smart contract development -- MetaMask wallet connected to Tangle Network -- Some test tokens for deployment (on testnet) - -Install Foundry: - -```bash -curl -L https://foundry.paradigm.xyz | bash -foundryup -``` - -## Understanding the Components - -The Tangle Liquid Restaking implementation consists of the following key components: - -1. **Vault Contract (TangleLiquidRestakingVault)**: An ERC4626-compliant vault that: - - - Manages deposits and withdrawals - - Implements reward distribution with index-based accounting - - Handles delegation through the MultiAssetDelegation precompile - - Provides liquid token representation of staked assets - -2. **MultiAssetDelegation Wrapper**: Interfaces with Tangle's MultiAssetDelegation precompile at `0x0000000000000000000000000000000000000822` -3. **Rewards Wrapper**: Interfaces with Tangle's Rewards precompile at `0x0000000000000000000000000000000000000825` - -Core features include: - - - -## Step 1: Setting Up the Project - -First, clone the reference implementation: - -```bash -git clone https://github.com/tangle-network/lrt -cd lrt -forge soldeer update -d -``` - -## Step 2: Core Implementation Details - -### Reward Distribution System - -The vault implements a sophisticated reward distribution system using index-based accounting: - - - -### Delegation Management - -The vault handles delegation through the MultiAssetDelegation precompile, managing deposits and withdrawals: - - - -## Step 3: Testing the Implementation - -Create test files in the `test` directory using Foundry's Solidity testing framework. Here's an example test structure: - -```solidity -// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.20; - -import "forge-std/Test.sol"; -import "../src/TangleLiquidRestakingVault.sol"; - -contract TangleLiquidRestakingVaultTest is Test { - TangleLiquidRestakingVault vault; - address baseToken; - bytes32 operator; - uint64[] blueprintSelection; - - function setUp() public { - // Setup test environment - baseToken = address(new ERC20("Test Token", "TEST")); - operator = bytes32(uint256(1)); - blueprintSelection = new uint64[](1); - blueprintSelection[0] = 1; - - // Deploy vault - vault = new TangleLiquidRestakingVault( - baseToken, - operator, - blueprintSelection, - MULTI_ASSET_DELEGATION_CONTRACT, - "Liquid Restaked Test", - "lrTEST" - ); - } - - function testDeposit() public { - uint256 amount = 1000e18; - deal(baseToken, address(this), amount); - - ERC20(baseToken).approve(address(vault), amount); - vault.deposit(amount, address(this)); - - assertEq(vault.balanceOf(address(this)), amount); - } -} -``` - -Run the tests using Forge: - -```bash -forge test -vv -``` - -## Step 4: Deployment - -The vault can be deployed using Forge's deployment capabilities: - -1. Create a deployment script in `script/DeployVault.s.sol`: - -```solidity -// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.20; - -import "forge-std/Script.sol"; -import "../src/TangleLiquidRestakingVault.sol"; - -contract DeployVault is Script { - function run() external { - uint256 deployerPrivateKey = vm.envUint("PRIVATE_KEY"); - vm.startBroadcast(deployerPrivateKey); - - // Deploy vault - bytes32 operator = bytes32(uint256(vm.envUint("OPERATOR_ID"))); - uint64[] memory blueprintSelection = new uint64[](1); - blueprintSelection[0] = uint64(vm.envUint("BLUEPRINT_ID")); - - TangleLiquidRestakingVault vault = new TangleLiquidRestakingVault( - vm.envAddress("BASE_TOKEN"), - operator, - blueprintSelection, - MULTI_ASSET_DELEGATION_CONTRACT, - "Liquid Restaked Token", - "LRT" - ); - - vm.stopBroadcast(); - } -} -``` - -2. Configure deployment variables in `.env`: - -```env -TANGLE_RPC_URL="https://testnet-rpc.tangle.tools" # or mainnet -PRIVATE_KEY="your-private-key" -BASE_TOKEN="0x..." -OPERATOR_ID="1" # your operator ID -BLUEPRINT_ID="1" # your blueprint ID -``` - -3. Deploy using Forge: - -```bash -forge script script/DeployVault.s.sol:DeployVault --rpc-url $TANGLE_RPC_URL --broadcast -``` - -## Step 5: Interacting with the Vault - -The vault exposes several key functions for user interaction: - -### Deposits and Withdrawals - - - -### Reward Management - - - -## Security Considerations - -The vault implements several security measures: - -1. **Access Control**: Uses Solmate's `Owned` for admin functions -2. **Reward Accounting**: Index-based accounting prevents double-claiming -3. **Withdrawal Process**: Two-step withdrawal process with unstaking period -4. **Math Safety**: Uses `FixedPointMathLib` for safe calculations - -Key security features from the contract: - -```solidity -/// @notice Scale factor for reward index calculations -uint256 private constant REWARD_FACTOR = 1e18; - -/// @notice Uses mulDivUp for final reward calculation -uint256 newRewards = snapshot.shareBalance.mulDivUp(indexDelta, REWARD_FACTOR); - -/// @notice Validates withdrawal amounts -if (scheduledWithdrawAmount[owner] < assets) revert NoScheduledAmount(); -``` - -For more information on the MultiAssetDelegation precompile and its features, see the [precompile documentation](../../developers/precompiles/features/multi-asset-delegation.mdx). diff --git a/pages/restake/lst-assets.mdx b/pages/restake/lst-assets.mdx deleted file mode 100644 index a4449e60..00000000 --- a/pages/restake/lst-assets.mdx +++ /dev/null @@ -1,21 +0,0 @@ -import Callout from "/components/Callout"; - -# Supported Liquid Staking Assets - - -When performing liquid staking operations, ensure that you are using the correct network for the asset you are working with. - - -There are various assets supported for liquid staking operations. The important thing is that each asset may only be available on a specific network. For example, TNT is available on the Tangle Mainnet, while DOT will be available on the Tangle Parachain. - -Support for additional assets may be added through on-chain governance, so join our Discord and Telegram channels to stay tuned for updates. If you happen to have suggestions for new assets, feel free to reach out! - -### Supported assets on Tangle Mainnet: - -- TNT - -### Supported assets on Tangle Parachain (Coming Soon): - -- DOT (Polkadot) -- BFC (Bifrost) -- PHA (Phala) diff --git a/pages/restake/lst-concepts.mdx b/pages/restake/lst-concepts.mdx deleted file mode 100644 index dc22ff21..00000000 --- a/pages/restake/lst-concepts.mdx +++ /dev/null @@ -1,57 +0,0 @@ -# Liquid Staking on Tangle Network - -## Introduction - -Liquid staking on Tangle allows users to participate in staking while still maintaining liquidity of their staked assets. This mechanism is implemented via delegation pools, enabling participants to stake their tokens in a pool, earn rewards, and retain the ability to unbond or transfer staked assets _without_ waiting for the traditional staking lock-up periods. - -![Liquid Staking Introduction](/images/liquid-staking/lst-intro.png) - -## Key Components of Liquid Staking - -### 1. **Pools and Roles** - -- **Pools**: The fundamental unit of liquid staking on Tangle. Pools are managed by specific roles: -- **Root**: The administrator who has full control over the pool and can update roles. -- **Nominator**: An account responsible for nominating validators on behalf of the pool. -- **Bouncer**: An account that can change the state of the pool (eg. open or close it, limiting whether new users can join the pool). -- **Depositor**: The user who creates the pool and deposits the initial bond. - -### 2. **Bonding and Staking** - -- Users can **bond** their assets to a pool using the `join` function, effectively staking their tokens in exchange for pool rewards. The bond amount is transferred to the pool's account increasing the pool's total bond and the user receives pool tokens (LST) in return. -- Additional funds can be **bonded** using the `bond_extra` function. This allows users to add more tokens to their existing stake, either from free balance or pending rewards. - -### 3. **Unbonding** - -- Users can **unbond** their staked tokens with the `unbond` function, which allows them to withdraw up to a specified amount of their staked assets. When unbonding, users submit their LST tokens, which are then burned. The system ensures that the initial deposit plus any rewards are automatically collected and returned during the withdraw unbonding process. -- **Permissionless unbonding**: Under specific conditions, such as when a pool is blocked or destroying, users can unbond their tokens permissionlessly. - -### 4. **Withdrawals** - -- **Withdraw Unbonded**: Once tokens are unbonded, users can withdraw their funds using the `withdraw_unbonded` function. This function can also be used permissionlessly if certain conditions are met (e.g., the pool is destroying, or the caller is the depositor and the only member left). - -### 5. **Pool Management** - -- **Create Pools**: Users can create new delegation pools with the `create` function. A pool requires an initial deposit, and the creator can set the roles for root, nominator, and bouncer. Pools can also be created with a previously used pool ID using the `create_with_pool_id` function. -- **Pool State Management**: The state of a pool can be modified using the `set_state` function. Pools can have various states, such as open, blocked, or destroying. Once a pool enters the destroying state, it cannot change its state again. - -### 6. **Rewards and Commission** - -- **Commission**: Pools can set a commission percentage using the `set_commission` function. This determines the amount of rewards that are paid to the pool's commission account from the total rewards. -- **Claiming Commission**: Pending commission can be claimed using the `claim_commission` function, which adds the claimed amount to the total claimed commission and resets pending commission to zero. -- **Maximum Commission**: The maximum allowable commission can be set with the `set_commission_max` function, ensuring that future commissions are bound by this limit. -- **Commission Change Rate**: The rate at which commission changes can be controlled using the `set_commission_change_rate` function. - -### 7. **Adjusting Pool Deposits** - -- Pools must maintain an existential deposit (ED) in the reward account to ensure proper functionality. The `adjust_pool_deposit` function allows users to either top up the deficit or withdraw any excess deposit from the pool. - -### 8. **Claim Permissions** - -- The ability to claim pending commission is controlled by the `set_commission_claim_permission` function, which determines who can claim the pool's pending commission. This allows the pool owner to delegate the commission claim to a different account. - -### What is commission change rate? - -The maximum rate increase allowed for a single commission update. Note that once set, the pool admin can only lower it. When setting the Change Rate, it will also be possible to set a minDelay quantified as the number of blocks (since last commission update) after which it is possible to change the commission (i.e. the minimum delay between commission updates). Note that once set, the pool admin can only increase it. - -Max Commission and Change Rate must not be necessarily set. It is the choice of the pool admin to set those parameters and provide transparency to the pool members about the pool's commission policy. diff --git a/pages/restake/lst-rewards.mdx b/pages/restake/lst-rewards.mdx deleted file mode 100644 index 86bf9c44..00000000 --- a/pages/restake/lst-rewards.mdx +++ /dev/null @@ -1,11 +0,0 @@ -# Tangle Liquid Staking Rewards - -### 1. **Earning Staking Rewards with Liquidity** - -- **Benefit:** Liquid staking allows participants to earn staking rewards without locking their assets for extended periods. Users can continue to receive rewards while maintaining the ability to transfer or utilize their staked tokens elsewhere in the ecosystem. -- **Incentive:** This flexibility makes liquid staking appealing to users who want to maximize their asset efficiency without sacrificing liquidity. They can participate in DeFi activities, trade, or provide liquidity on decentralized exchanges, all while earning staking rewards. - -### 2. **Reward Compounding** - -- **Benefit:** Liquid staking offers the opportunity for users to easily compound their rewards by restaking their earnings or investing them in other protocols. -- **Incentive:** Participants can maximize their returns by reinvesting their staking rewards or using them in various DeFi protocols, creating a feedback loop of earning and compounding. This flexibility is especially appealing for users focused on maximizing their overall yield. diff --git a/pages/restake/lst-working.mdx b/pages/restake/lst-working.mdx deleted file mode 100644 index ba7531cc..00000000 --- a/pages/restake/lst-working.mdx +++ /dev/null @@ -1,51 +0,0 @@ -import ExpandableImage from "../../components/ExpandableImage"; - -# Liquid Staking on Tangle Network - -## Liquid Staking Workflow - - - -### **Creating a Pool** - -The first step is to create a pool, this is permissionless and can be done by anyone. The pool is created with an initial deposit, which can be any amount of tokens, but must be greater than the minimum bond amount. - -If you are simply looking to liquid stake your assets, you don't have to create a pool. Instead, you can join an existing pool by using its unique id when bonding. - -More detailed instructions can be found at the [How to Create a Liquid Staking Pool](./create_a_pool/lst-pool-create-tangle.mdx) page. - -#### Who can create a pool? - -Pool creation is a permissionless process, which means that anyone can create a pool. The only requirement is that the initial deposit must be greater than the minimum bond amount. - -#### Why should I create my own pool? - -- **Choose Your Own Validators**: You can create a pool to nominate validators on behalf of your users. By creating a pool, you are responsible for the rewards of the assets in the pool. Always remember to always do due diligence on the validators you nominate. -- **Ongoing Maintenance**: You will need to nominate validators and ensure that the pool users are getting the best possible returns. In return for this service, you can set a commission rate for the pool. This means that you may need to regularly monitor the performance of the validators and update nominations. -- **Earn Commission**: If your pool is providing competitive reward rates, it will naturally attract more users, leading to higher liquidity, and thus increasing your commission earnings. - -### **Joining a Pool** - -A user initiates liquid staking by joining an open pool using the `join` function. This transfers the specified bond amount to the pool and allows the user to start earning rewards. - -When you join a pool, you receive liquid staking tokens (LSTs) in return. The amount of LST tokens you receive is based on the pool's reward rate and the amount of tokens you bond. - -#### Are all LST tokens the same? - -No, all LST tokens are not the same. Each pool is a separate entity with an associated LST, and the LST token of a pool is only valid for the pool it was created in. This means that the LST token of one pool is not accepted by another pool. - -This also means rewards are captured by the pool, and are not shared with other pools. - -### **Bonding Extra** - -Users can add additional tokens to their stake using the `bond_extra` function. The amount to be bonded can originate from free balance or from pending rewards. - -### **Unbonding and Withdrawing** - -When a user wants to exit the pool or reduce their stake, they can use the `unbond` function to unbond a portion of their stake. - -At the time of unbonding, the LST tokens are burned. There is a 28 day unbonding period for all unbonds. During this period, the unbonding tokens are locked and will not earn rewards. - -At the end of the unbonding period, you can withdraw your unbonded tokens. This is done using the `withdraw_unbonded` function. - -Learn more about how to execute these functions at the [Join a Liquid Staking Pool](./join_a_pool/tangle.mdx) page. diff --git a/pages/restake/lst_developers/_meta.ts b/pages/restake/lst_developers/_meta.ts deleted file mode 100644 index 3515da03..00000000 --- a/pages/restake/lst_developers/_meta.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - intro: "Introduction", - lst_precompile: "LST Precompile", -}; - -export default meta; diff --git a/pages/restake/lst_developers/intro.mdx b/pages/restake/lst_developers/intro.mdx deleted file mode 100644 index 2d595b26..00000000 --- a/pages/restake/lst_developers/intro.mdx +++ /dev/null @@ -1,25 +0,0 @@ -# Using Tangle LST in your project - -Tangle LST pallet precompile provides information about the current state of the Tangle network liquid staking functionality. -A precompiled contract is native Substrate code that has an Ethereum-style address and can be called using the Ethereum API, like any other smart contract. -The precompiles allow you to call the Substrate runtime directly which is not normally accessible from the Ethereum side of Tangle. - -### How to use the precompile - -Precompile can be used like any other Solidity interface. - -You can import the precompile in your Solidity project like this: - -```solidity -import "LSTPrecompile.sol"; -``` - -Then, to deposit tokens into the precompile, you can use the `deposit` function: - -```solidity -function join(uint256 amount, uint256 poolId) external; -``` - -This function will deposit the specified amount of tokens into the precompile and assign the corresponding amount of tokens to the `to` address. - -More information about the precompile can be found [here](./lst_precompile.mdx). diff --git a/pages/restake/lst_developers/lst_precompile.mdx b/pages/restake/lst_developers/lst_precompile.mdx deleted file mode 100644 index 9b5942d7..00000000 --- a/pages/restake/lst_developers/lst_precompile.mdx +++ /dev/null @@ -1,190 +0,0 @@ -### TangleLstPrecompile - -The `TangleLstPrecompile` is a precompiled contract that facilitates interaction with the Tangle network's pool management functionality. It provides a comprehensive interface for users to manage their pool operations. - -The latest version of the precompile can be found [here](https://github.com/tangle-network/tangle/blob/main/precompiles/tangle-lst/TangleLst.sol). - -#### Address - -- **Contract Address**: `0x0000000000000000000000000000000000000809` - -This interface is designed to be used by Solidity contracts to interact with the TangleLst pallet, enabling complex pool management operations on the Tangle network. - -#### Interface - -```solidity -// SPDX-License-Identifier: GPL-3.0-only -pragma solidity >=0.8.3; - -/// @dev The TangleLst contract's address. -address constant TANGLE_LST = 0x0000000000000000000000000000000000000809; - -/// @dev The TangleLst contract's instance. -TangleLst constant TANGLE_LST_CONTRACT = TangleLst(TANGLE_LST); - -/// @author The Tangle Team -/// @title Pallet TangleLst Interface -/// @title The interface through which solidity contracts will interact with the TangleLst pallet -/// @custom:address 0x0000000000000000000000000000000000000809 -interface TangleLst { - /// @dev Join a pool with a specified amount. - /// @param amount The amount to join with. - /// @param poolId The ID of the pool to join. - function join(uint256 amount, uint256 poolId) external returns (uint8); - - /// @dev Bond extra to a pool. - /// @param poolId The ID of the pool. - /// @param extraType The type of extra bond (0 for FreeBalance, 1 for Rewards). - /// @param extra The amount of extra bond. - function bondExtra(uint256 poolId, uint8 extraType, uint256 extra) external returns (uint8); - - /// @dev Unbond from a pool. - /// @param memberAccount The account of the member. - /// @param poolId The ID of the pool. - /// @param unbondingPoints The amount of unbonding points. - function unbond(bytes32 memberAccount, uint256 poolId, uint256 unbondingPoints) external returns (uint8); - - /// @dev Withdraw unbonded funds from a pool. - /// @param poolId The ID of the pool. - /// @param numSlashingSpans The number of slashing spans. - function poolWithdrawUnbonded(uint256 poolId, uint32 numSlashingSpans) external returns (uint8); - - /// @dev Withdraw unbonded funds for a member. - /// @param memberAccount The account of the member. - /// @param poolId The ID of the pool. - /// @param numSlashingSpans The number of slashing spans. - function withdrawUnbonded(bytes32 memberAccount, uint256 poolId, uint32 numSlashingSpans) external returns (uint8); - - /// @dev Create a new pool. - /// @param amount The initial amount to create the pool with. - /// @param root The root account of the pool. - /// @param nominator The nominator account of the pool. - /// @param bouncer The bouncer account of the pool. - /// @param name The name of the pool. - /// @param icon The icon for the pool. - function create(uint256 amount, bytes32 root, bytes32 nominator, bytes32 bouncer, bytes calldata name, bytes calldata icon) external returns (uint8); - - /// @dev Create a new pool with a specific pool ID. - /// @param amount The initial amount to create the pool with. - /// @param root The root account of the pool. - /// @param nominator The nominator account of the pool. - /// @param bouncer The bouncer account of the pool. - /// @param poolId The desired pool ID. - /// @param name The name of the pool. - /// @param icon The icon for the pool. - function createWithPoolId(uint256 amount, bytes32 root, bytes32 nominator, bytes32 bouncer, uint256 poolId, bytes calldata name, bytes calldata icon) external returns (uint8); - - /// @dev Nominate validators for a pool. - /// @param poolId The ID of the pool. - /// @param validators An array of validator accounts to nominate. - function nominate(uint256 poolId, bytes32[] calldata validators) external returns (uint8); - - /// @dev Set the state of a pool. - /// @param poolId The ID of the pool. - /// @param state The new state (0 for Open, 1 for Blocked, 2 for Destroying). - function setState(uint256 poolId, uint8 state) external returns (uint8); - - /// @dev Set metadata for a pool. - /// @param poolId The ID of the pool. - /// @param metadata The metadata to set. - function setMetadata(uint256 poolId, bytes calldata metadata) external returns (uint8); - - /// @dev Chill a pool (stop nominating validators). - /// @param poolId The ID of the pool. - function chill(uint256 poolId) external returns (uint8); - - /// @dev Set commission for a pool. - /// @param poolId The ID of the pool. - /// @param newCommission The new commission rate. - function setCommission(uint256 poolId, uint32 newCommission) external returns (uint8); - - /// @dev Claim commission for a pool. - /// @param poolId The ID of the pool. - function claimCommission(uint256 poolId) external returns (uint8); - - /// @dev Update roles for a pool. - /// @param poolId The ID of the pool. - /// @param newRoot The new root account. - /// @param newNominator The new nominator account. - /// @param newBouncer The new bouncer account. - function updateRoles(uint256 poolId, bytes32 newRoot, bytes32 newNominator, bytes32 newBouncer) external returns (uint8); - - /// @dev Adjust pool deposit. - /// @param poolId The ID of the pool. - function adjustPoolDeposit(uint256 poolId) external returns (uint8); - - /// @dev Set global configurations (only callable by root). - /// @param minJoinBond The minimum bond required to join a pool (0 for no change). - /// @param minCreateBond The minimum bond required to create a pool (0 for no change). - /// @param maxPools The maximum number of pools (0 for no change). - /// @param globalMaxCommission The global maximum commission percentage (0 for no change). - function setConfigs(uint256 minJoinBond, uint256 minCreateBond, uint32 maxPools, uint32 globalMaxCommission) external returns (uint8); -} -``` - -#### Pool Roles - -Each pool has three distinct roles: - -- **Root**: Administrator with full control over pool settings -- **Nominator**: Manages validator selection and staking strategy -- **Bouncer**: Controls member access and pool state - -#### Commission System - -Pools can set commission rates on rewards: - -- Commission is set as a percentage (0-100) -- Pool operators can claim accumulated commissions -- Global maximum commission limits are enforced by the network - -#### Example - -```solidity -contract PoolOperationsExample { - address constant precompileAddress = 0x0000000000000000000000000000000000000809; - ITangleLstPrecompile precompile = ITangleLstPrecompile(precompileAddress); - - function joinPool(uint256 amount, uint256 poolId) public returns (uint8) { - // Call the join function on the precompile contract - uint8 statusCode = precompile.join(amount, poolId); - - // Handle the status code as needed - require(statusCode == 0, "Join pool failed"); - - return statusCode; - } - - function createNewPool( - uint256 amount, - bytes32 root, - bytes32 nominator, - bytes32 bouncer, - string memory poolName - ) public returns (uint8) { - // Create a new pool with custom name and icon - uint8 statusCode = precompile.create( - amount, - root, - nominator, - bouncer, - bytes(poolName), - "" // Empty icon - ); - - require(statusCode == 0, "Pool creation failed"); - return statusCode; - } - - function nominateValidators( - uint256 poolId, - bytes32[] memory validators - ) public returns (uint8) { - // Nominate validators for a pool - uint8 statusCode = precompile.nominate(poolId, validators); - - require(statusCode == 0, "Nomination failed"); - return statusCode; - } -} -``` diff --git a/pages/restake/nominator.mdx b/pages/restake/nominator.mdx deleted file mode 100644 index aac104e9..00000000 --- a/pages/restake/nominator.mdx +++ /dev/null @@ -1,66 +0,0 @@ -# Nominating with Polkadot-JS - -## Bond Your Tokens - -1. Navigate to the "Staking" tab within the "Network" menu on the Polkadot-JS UI. -2. In the "Account actions" subsection (link), click the "+ Nominator" button. -3. Enter a "value bonded" less than your total TNT balance to cover transaction fees (at least 0.01 TNT). Keep a minimum of 0.1 TNT in your account to avoid the reaping threshold. -4. Select a payment destination for rewards, such as "Stash account (increase amount at stake)" to compound your earnings. - -## Nominate Validators - -1. Click "Nominate" on your bonded account. -2. Choose up to 24 validators to nominate. Be cautious, as you may be slashed if your validator misbehaves. -3. Confirm the transaction. Your nominations will become active in the next era (lasting six hours on the Tangle Network). -4. To claim rewards, you or your validator must submit a transaction specifying the validator ID and era index. Rewards are distributed automatically among the top nominators for that era. - -## Adjust Nominations - -To change your nominations, simply update your validator selections using the above methods. - -## Stop Nominating - -Here's a more concise and reorganized version of the section on stopping nominations: - -### Stopping Nominations and Unbonding Tokens - -You can choose to unbond (unstake) your tokens and stop being a nominator at any time. However, there is an unbonding period that serves as a cooldown, during which you won't receive rewards. Your tokens will become transferable again after this period. - -### Step 1: Stop Nominating (Chill) - -1. Navigate to the "Network" > "Staking" > "Accounts" page on the Polkadot-JS UI. -2. Click the "Stop" button next to your account to stop nominating validators. - -If you don't see the "Stop" button, you're not currently nominating. - -### Step 2: Unbond Tokens - -- After chilling your account, click the three dots next to it and select "Unbond funds." You will need to wait the unbonding period. - -### Step 3: Withdraw Unbonded Tokens - -Once the unbonding period is over, you can withdraw your unbonded tokens to make them transferable. - -- Click "Withdraw Unbonded" in the same menu as before, or click the blue padlock icon next to the "redeemable" balance. - -Your transferable balance will increase by the number of unbonded tokens. - -**Ensure your controller or staking proxy account has enough transferable balance to pay for transaction fees when unbonding. Otherwise, you may encounter an "InsufficientBalance" error. Transfer a small amount if needed to cover fees without dropping below the existential deposit.** - -## Claiming Rewards - -To claim pending payouts: - -1. Navigate to the "Payouts" tab under "Staking" on the Polkadot-JS UI. -2. Click "Payout all" and select your stash accounts. -3. Sign and submit the transaction to complete the payout process. - -## Using the Command-Line Interface (CLI) - -You can also manage your nominations using the @polkadot/api-cli package: - -1. Install the package globally with `npm install -g @polkadot/api-cli`. -2. Bond tokens: - `polkadot-js-api --seed "MNEMONIC_PHRASE" tx.staking.bond STASH_ADDRESS NUMBER_OF_TOKENS REWARD_DESTINATION --ws WEBSOCKET_ENDPOINT` -3. Nominate validators: - `polkadot-js-api --seed "MNEMONIC_PHRASE" tx.staking.nominate '["VALIDATOR_ADDRESS_1","VALIDATOR_ADDRESS_2"]' --ws wss:\\rpc.tangle.tools` diff --git a/pages/restake/restake-concepts.mdx b/pages/restake/restake-concepts.mdx deleted file mode 100644 index fce6edcf..00000000 --- a/pages/restake/restake-concepts.mdx +++ /dev/null @@ -1,76 +0,0 @@ -# Tangle Network Concepts - -This document introduces the core concepts necessary for understanding restaking and Actively Validated Services (AVS) in the Tangle Network ecosystem. It assumes the reader has a basic understanding of staking in a Proof-of-Stake (PoS) system. - -## Introduction - -Tangle Network is a decentralized infrastructure platform that enables the creation and deployment of secure, actively validated services called Blueprints. The network is powered by a unique restaking mechanism, which allows operators to stake their assets and earn rewards for providing computing resources and ensuring the security of the network. - -At its core, Tangle Network consists of three main components: - -1. **Blueprints**: Developers create Blueprints, which are specifications for decentralized services. Blueprints define the functionality, requirements, and incentive structure of a service. - -2. **Restaking**: Operators stake their assets on Blueprints to participate in the network and earn rewards. Delegators can also restake their assets with operators to share in the rewards. - -3. **Service Instances**: Users can request the deployment of live service instances based on Blueprints. Operators are then selected to run these instances based on their staked assets and other criteria defined by the requester. - -Tangle Network's modular architecture and restaking mechanism create a powerful incentive system that encourages the development of valuable services, ensures the security of the network, and enables users to access a wide range of decentralized applications. - -## Restaking - -Restaking is the process of automatically reinvesting staking rewards earned by operators and nominators back into staking. This mechanism helps to compound staking returns over time, as the reinvested rewards increase the staked amount and, consequently, the potential future rewards. - -Key points about restaking: - -1. In Tangle, noderunners who have restaked tokens delegated to them or restake their own tokens may become Operators who can complete jobs related to deployed blueprints. -1. Restaking is optional and can be enabled or disabled by operators and delegators, who delegate their restaked tokens to an operator. -1. When enabled, a specified percentage of staking rewards are automatically added to the staked amount at the end of each era. -1. Restaked rewards are subject to the same unbonding period as regular staked funds when a operator or nominator chooses to withdraw them. - -Restaking encourages long-term commitment to the network and helps to maintain a stable and predictable staking participation rate. - -## Blueprints - -Blueprints are specifications that define a service, similar to an actively validated service (AVS). However, Blueprints themselves are not live service instances. Developers create Blueprints by specifying a "gadget" binary, the jobs involved, a set of smart contracts for registration and requesting instances, and additional metadata. - -Key aspects of Blueprints: - -1. Blueprints are meant to be leveraged infinitely many times, allowing developers to define a service once and have it instantiated multiple times by different users with varying operator requirements. -2. Operators restake their assets on Blueprints to participate in running service instances, ensuring the security of the services. -3. Users can deploy live service instances using a Blueprint by specifying criteria such as the number of operators or other operator attributes. -4. Service instances are not guaranteed to involve all restaking operators; the service requester may only require a threshold of participants or participants satisfying certain registration criteria. - -Examples of services that can be built using Blueprints include: - -1. Oracle services for providing external data to smart contracts. -2. Privacy-preserving computation services for executing sensitive business logic. -3. Specialized data feeds or APIs for specific use cases. - -## Actively Validated Services (AVS) Instances - -Actively Validated Services (AVS) are a unique feature of the Tangle Network that allows operators to offer additional services beyond block production and finalization. These services are "actively validated," meaning they are executed by the operator nodes and the results are included in the blockchain's state. - -Examples of AVS include: - -1. Oracle services for providing external data to smart contracts. -2. Privacy-preserving computation services for executing sensitive business logic. -3. Specialized data feeds or APIs for specific use cases. - -Key aspects of AVS: - -1. Operators can choose to offer one or more AVS, depending on their technical capabilities and business interests. -2. AVS providers can charge fees for their services, creating additional revenue streams beyond staking rewards. -3. The security and correctness of AVS are enforced by the same consensus mechanism that secures the Tangle Network blockchain. -4. AVS expand the utility and versatility of the Tangle Network, enabling a wider range of applications and use cases. - -Operators interested in providing AVS should carefully assess the technical requirements, market demand, and potential risks associated with offering such services. - -## Interaction between Restaking and AVS - -Restaking and AVS are complementary mechanisms that can enhance the overall sustainability and growth of the Tangle Network ecosystem: - -1. Operators that provide valuable AVS can attract more staking support from nominators, leading to higher staking rewards and more funds available for restaking. -2. Restaking helps operators to accumulate more stake over time, increasing their chances of being selected for block production and AVS execution. -3. The additional revenue from AVS fees can be partially restaked, further compounding the operator's staking returns. - -By understanding and leveraging the synergies between restaking and AVS, operators can maximize their participation in the Tangle Network and contribute to its long-term success. diff --git a/pages/restake/restake-introduction.mdx b/pages/restake/restake-introduction.mdx deleted file mode 100644 index 1b169c48..00000000 --- a/pages/restake/restake-introduction.mdx +++ /dev/null @@ -1,35 +0,0 @@ -import ExpandableImage from "../../components/ExpandableImage"; - -# Introduction to Tangle Restaking - -Restaking is an innovative concept in blockchain technology that allows validators and token holders to reuse their staked tokens to secure additional services and earn rewards without unstaking from the original network. This enhances the efficiency and utility of staked assets. Restaking can be provided through native staking mechanisms or through staking of existing liquid staked tokens, exposing the stake to additional rewards and slashing conditions. - -Tangle provides permissionless asset restaking customers deploying Blueprint service instances. Any asset created on or bridged to Tangle can be used as collateral to stake in our restaking infrastructure. These restaked assets, commonly in the form of LSTs, act as security collateral for service instances that are requested on-demand. The liquidity providers (the restakers) earn rewards proportional to the rewards issued to the services and Blueprints on Tangle, depending on the usage and utility of the services themselves. - -## Benefits of Tangle Restaking include: - -- Restaking any asset -- Securing on-demand service instances with unique assets -- Increased efficiency of staked capital by sharing it across instances -- Additional revenue streams for stakers and operators -- Boosted security for protocols leveraging new assets as security capital -- Innovation in new blockchain services by harnessing decentralized resources - -## How Tangle Network Uses Restaking - - - -Tangle Network has implemented a unique restaking system to allow its validator set to provide Actively Validated Services (AVS) to power advanced decentralized applications. Users can restake their TNT tokens to run service instances based on blueprints created by developers. - -Tangle noderunners can opt-in to restake a portion of their staked TNT tokens to provide AVS instances such as oracles, privacy-preserving computation, and more. In return, they earn service fees and additional inflation rewards on top of their base validation rewards. - -### Lifecycle of an AVS Instance - -1. Developers create blueprints that define the specifications and requirements for AVS instances. -2. Operators create a restaking profile and register to blueprints they want to operate. -3. Users instance Tangle Blueprints and select from the set of registered operators and restaked assets. -4. Operators then execute the AVS instances they approve and earn rewards. Failure to do so may result in penalties or reduced rewards. - -This restaking model allows Tangle to offer unique AVS instances powered by its decentralized validator and operator set. Developers can leverage these services to easily deploy advanced decentralized applications like trustless cross-chain bridges, privacy solutions, identity systems, and more. - -By restaking, Tangle operators gain additional revenue, the network gains efficiency from its staked supply, and the ecosystem gains access to powerful new primitives to fuel innovation. Restaking helps align incentives and harness the security of the underlying proof-of-stake blockchain for exciting new use cases. diff --git a/pages/restake/restake_developers/_meta.ts b/pages/restake/restake_developers/_meta.ts deleted file mode 100644 index 74b01e2e..00000000 --- a/pages/restake/restake_developers/_meta.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Meta } from "nextra"; - -const meta: Meta = { - integration: "Integration", - restake_precompile: "Restake Precompile", - services_precompile: "Services Precompile", -}; - -export default meta; diff --git a/pages/restake/restake_developers/integration.mdx b/pages/restake/restake_developers/integration.mdx deleted file mode 100644 index 9ca98d16..00000000 --- a/pages/restake/restake_developers/integration.mdx +++ /dev/null @@ -1,29 +0,0 @@ -# Integrating with Tangle Restaking - -To integrate with Tangle Restaking for the purposes of building a DApp or interacting with the restake functionality, you can use the Tangle MultiAssetDelegation and Services precompiles. These contracts provide logic to build new liquid restaking token pools and UIs over Tangle Restaking. - -Tangle MultiAssetDelegation data provides information about the current state of the Tangle network restake functionality. -If you are looking to use this restake data in your project or build your DAPP, you can use the tangle-restake-precompile library to interact with the restake functionality. - -A precompiled contract is native Substrate code that has an Ethereum-style address and can be called using the Ethereum API, like any other smart contract. -The precompiles allow you to call the Substrate runtime directly which is not normally accessible from the Ethereum side of Tangle. - -### How to use the precompile - -The precompile can be used like any other Solidity interface. - -You can import the precompile in your Solidity project like this: - -```solidity -import "MultiAssetDelegationPrecompile.sol"; -``` - -To then deposit tokens into the precompile, you can use the `deposit` function: - -```solidity -function deposit(uint256 amount, address to) external; -``` - -This function will deposit the specified amount of tokens into the precompile and assign the corresponding amount of tokens to the `to` address. - -More information about the precompile can be found [here](./restake_precompile.mdx). diff --git a/pages/restake/restake_developers/restake_precompile.mdx b/pages/restake/restake_developers/restake_precompile.mdx deleted file mode 100644 index a4a4308e..00000000 --- a/pages/restake/restake_developers/restake_precompile.mdx +++ /dev/null @@ -1,121 +0,0 @@ -### MultiAssetDelegationPrecompile - -The `MultiAssetDelegationPrecompile` is a precompiled contract that facilitates interaction with the Tangle network's restake functionality. It provides a comprehensive interface for operators and delegators to manage their assets and staking operations. - -The latest version of the precompile can be found [here](https://github.com/tangle-network/tangle/blob/main/precompiles/multi-asset-delegation/MultiAssetDelegation.sol). - -#### Address - -- **Contract Address**: `0x0000000000000000000000000000000000000822` - -This interface is designed to be used by Solidity contracts to interact with the MultiAssetDelegation pallet, enabling complex asset management and staking operations on the Tangle network. - -#### Interface - -```solidity -// SPDX-License-Identifier: GPL-3.0-only -pragma solidity >=0.8.3; - -/// @dev The MultiAssetDelegation contract's address. -address constant MULTI_ASSET_DELEGATION = 0x0000000000000000000000000000000000000822; - -/// @dev The MultiAssetDelegation contract's instance. -MultiAssetDelegation constant MULTI_ASSET_DELEGATION_CONTRACT = MultiAssetDelegation(MULTI_ASSET_DELEGATION); - -/// @author The Tangle Team -/// @title Pallet MultiAssetDelegation Interface -/// @title The interface through which solidity contracts will interact with the MultiAssetDelegation pallet -/// @custom:address 0x0000000000000000000000000000000000000822 -interface MultiAssetDelegation { - /// @dev Join as an operator with a bond amount. - /// @param bondAmount The amount to bond as an operator. - function joinOperators(uint256 bondAmount) external returns (uint8); - - /// @dev Schedule to leave as an operator. - function scheduleLeaveOperators() external returns (uint8); - - /// @dev Cancel the scheduled leave as an operator. - function cancelLeaveOperators() external returns (uint8); - - /// @dev Execute the leave as an operator. - function executeLeaveOperators() external returns (uint8); - - /// @dev Bond more as an operator. - /// @param additionalBond The additional amount to bond. - function operatorBondMore(uint256 additionalBond) external returns (uint8); - - /// @dev Schedule to unstake as an operator. - /// @param unstakeAmount The amount to unstake. - function scheduleOperatorUnstake(uint256 unstakeAmount) external returns (uint8); - - /// @dev Execute the unstake as an operator. - function executeOperatorUnstake() external returns (uint8); - - /// @dev Cancel the scheduled unstake as an operator. - function cancelOperatorUnstake() external returns (uint8); - - /// @dev Go offline as an operator. - function goOffline() external returns (uint8); - - /// @dev Go online as an operator. - function goOnline() external returns (uint8); - - /// @dev Deposit an amount of an asset. - /// @param assetId The ID of the asset. - /// @param amount The amount to deposit. - function deposit(uint256 assetId, uint256 amount) external returns (uint8); - - /// @dev Schedule a withdrawal of an amount of an asset. - /// @param assetId The ID of the asset. - /// @param amount The amount to withdraw. - function scheduleWithdraw(uint256 assetId, uint256 amount) external returns (uint8); - - /// @dev Execute the scheduled withdrawal. - function executeWithdraw() external returns (uint8); - - /// @dev Cancel the scheduled withdrawal. - /// @param assetId The ID of the asset. - /// @param amount The amount to cancel withdrawal. - function cancelWithdraw(uint256 assetId, uint256 amount) external returns (uint8); - - /// @dev Delegate an amount of an asset to an operator. - /// @param operator The address of the operator. - /// @param assetId The ID of the asset. - /// @param amount The amount to delegate. - function delegate(bytes32 operator, uint256 assetId, uint256 amount) external returns (uint8); - - /// @dev Schedule an unstake of an amount of an asset as a delegator. - /// @param operator The address of the operator. - /// @param assetId The ID of the asset. - /// @param amount The amount to unstake. - function scheduleDelegatorUnstake(bytes32 operator, uint256 assetId, uint256 amount) external returns (uint8); - - /// @dev Execute the scheduled unstake as a delegator. - function executeDelegatorUnstake() external returns (uint8); - - /// @dev Cancel the scheduled unstake as a delegator. - /// @param operator The address of the operator. - /// @param assetId The ID of the asset. - /// @param amount The amount to cancel unstake. - function cancelDelegatorUnstake(bytes32 operator, uint256 assetId, uint256 amount) external returns (uint8); -} -``` - -#### Example - -```solidity -contract DepositExample { - address constant precompileAddress = 0x0000000000000000000000000000000000000822; - IMultiAssetDelegationPrecompile precompile = IMultiAssetDelegationPrecompile(precompileAddress); - - function depositAsset(uint256 assetId, uint256 amount) public returns (uint256) { - // Call the deposit function on the precompile contract - uint256 statusCode = precompile.deposit(assetId, amount); - - // Handle the status code as needed - require(statusCode == 0, "Deposit failed"); - - return statusCode; - } -} -``` diff --git a/pages/restake/restake_developers/services_precompile.mdx b/pages/restake/restake_developers/services_precompile.mdx deleted file mode 100644 index bf62193e..00000000 --- a/pages/restake/restake_developers/services_precompile.mdx +++ /dev/null @@ -1 +0,0 @@ -https://github.com/tangle-network/tangle/blob/main/precompiles/services/Services.sol diff --git a/pages/restake/staking-intro.mdx b/pages/restake/staking-intro.mdx deleted file mode 100644 index 27774381..00000000 --- a/pages/restake/staking-intro.mdx +++ /dev/null @@ -1,148 +0,0 @@ -# Introduction to Staking - -Staking is a fundamental aspect of blockchain networks that employ a Proof-of-Stake (PoS) consensus mechanism. It involves participants locking up tokens to secure the network, validate transactions, and, in return, earn rewards. This process is pivotal for maintaining network integrity, security, and continuity. By engaging in staking, token holders contribute to the network's resilience against attacks and improve the functionality of governance and new services on Tangle like multi-party computation (MPC.) - -## Proof-of-Stake (PoS) Explained - -Proof-of-Work (PoW) and Proof-of-Stake (PoS) are consensus mechanisms used to validate transactions and add new blocks to the blockchain in a decentralized manner. - -In PoW networks like Bitcoin, miners compete to solve complex mathematical puzzles. The first miner to solve the puzzle validates the block and is rewarded with cryptocurrency. However, this process requires vast amounts of computing power and electricity, making it energy-intensive and environmentally taxing. - -PoS networks like Tangle Network use a different approach to validate transactions while consuming far less energy. Instead of miners, PoS networks have validators who stake their coins to participate in the validation process. The more coins a validator stakes, the greater their chance of being selected to validate the next block. Validators are rewarded for honestly validating transactions. - -If validators act dishonestly, their staked coins can be partially or fully slashed, disincentivizing malicious behavior. The economic risks of losing their stake encourage validators to follow the protocol rules. This aligns the interests of validators with the stability and security of the network. - -In summary, PoS relies on validators with skin in the game to validate transactions through staking instead of PoW's energy-intensive mining puzzles. By tying validation to economic stakes, PoS promotes network security in an energy-efficient way. - -## Nominated Proof-of-Stake (nPoS) - -Tangle Network uses Nominated Proof-of-Stake (NPoS) to select validators for its consensus protocol in a novel way that enhances network security. - -NPoS incentivizes TNT holders to participate as nominators who can back validator candidates. Validator candidates signal their interest publicly. Nominators then submit a list of up to 16 candidates they want to support. - -The network distributes stakes evenly among the validator candidates to maximize economic security. The candidates with the most TNT backing are elected validators for the next era. - -Both nominators' and validators' stakes can be slashed for misbehavior, disincentivizing malicious actions, and encouraging nominators to support quality validators. At the same time, nominators also participate in the rewards distributed to the validator, typically 90-99% of the block reward earned by the validator is then broken up among a validator's set of nominators. - -Tangle Network uses advanced election algorithms based on the concept of Proportional Justified Representation to ensure fair validator selection and prevent centralized power. For details on the election algorithm that selects which validators participate in block production, see the [Phragmen Election Algorithm.](https://wiki.polkadot.network/docs/learn-phragmen#what-is-the-sequential-phragm%C3%A9n-method) - -In summary, NPoS cleverly leverages stakeholder participation and incentives to secure Tangle Network in a decentralized, trustless manner. - -### Roles in the Staking Ecosystem - -**Validators**: These are the network participants who lock up tokens as collateral to validate transactions and create new blocks. Validators are chosen based on the size of their stake and their reliability. They earn rewards for their service, which are typically shared with their nominators. - -**Nominators**: Nominators support validators by delegating their tokens to a validator's stake. This increases the validator's chance of being selected to validate transactions and, in turn, earns nominators a share of the rewards. -Staking Rewards and Risks - -Rewards are distributed to validators and their nominators as an incentive for their participation and the risks they take. These rewards are typically derived from transaction fees and inflationary mechanisms designed to encourage staking. However, staking is not without its risks. Participants can face "slashing", a penalty where a portion of the staked tokens is removed if the validator they support acts maliciously or fails to fulfill their validation duties properly. - -### Selecting and Nominating Validators - -Nominating validators on Tangle Network involves two key steps: - -1. Locking up TNT tokens on-chain through a process called bonding. -2. Selecting up to 16 validator candidates to back with your bonded tokens. - -#### Selecting Validators - -Choosing validators requires balancing rewards and risks to maximize your reward-to-risk ratio. Key criteria to evaluate include: - -- **Recent era points earned by the validator**: Above average signals recent activity. -- **Size of validator's self-stake**: this should be high to ensure they have a stake in the game and are aligned. -- **Total stake backing the validator:** If this is below the average validator stake, they will pay out more rewards. -- **Commission fee charged to nominators:** Typically lower is better, but a 1% minimum is common. -- **Verified identity status on-chain:** Tangle Network supports on-chain contact details for building trust. -- **Previous slash history:** Ideally they have not been slashed, showing their on-chain evidence of reliability. - -Paying attention to these factors helps nominators back validators that will maximize rewards while minimizing risks. - -Additionally, Validators should run nodes on robust infrastructure that meets software, hardware, and bandwidth requirements. Most validators use cloud hosting services that provide high availability and connectivity. - -Keep in mind active validators must stay online to avoid slashing penalties. If over 10% of active validators go offline, slashing is triggered. So beware of nominating multiple validators hosted by one provider - an outage could slash your stake. - -#### Nominating Validators - -Once validators are nominated, Tangle Network automatically allocates your bonded tokens to active validators each era. This provides flexibility compared to systems that lock you into a single validator. Tangle Network's nomination and automated allocation mechanisms optimize security through decentralized elections. - -Nominators on Tangle Network should carefully evaluate validators across these several criteria before nomination to maximize rewards and minimize risks. The system then provides ongoing flexibility in backing active validators. - -### Staking Rewards - -Validators who produce blocks earn token rewards, which they share with their nominators. Both validators and nominators receive staking rewards for locking up their TNT on-chain at the end of each era. - -Tangle Network pays equal rewards to all validators regardless of their staked amount. This prevents centralized power in a few large validators. There is a probabilistic component in calculating rewards, so they are not exactly equal for all validators. Validators can earn more era points and higher rewards by being active on-chain. - -After the validator's commission, staking rewards are distributed pro-rata amongst a validator's nominators based on each nominator's stake. - -### Slashing for Accountability - -Slashing is a penalty imposed on validators and their nominators for malicious behavior that threatens network security. Examples include double-signing transactions, censorship, and downtime. - -Slashing confiscates a portion of the validator's stake, disincentivizing attacks. The slashed tokens go to the network treasury rather than being burned or distributed as rewards. This allows the possibility of governance reversing improper slashes. - -The severity of the slash depends on the offense's impact on the network. Milder slashing applies to isolated unresponsiveness or equivocation. More severe slashing happens for misconduct with serious security or monetary risks like mass collusion. - -Validators with more backing get slashed more harshly to discourage nominators from over-concentrating with popular validators. Each validator is treated separately for slashing, so spreading nominations reduces potential losses. - -Slashing only applies to a nominator's active stake in a misbehaving validator. Inactive nominations are unaffected. The percentage slashed is proportional to the nominator's fraction of the total stake in that validator. - -There is a grace period after a slash where governance can reverse improper slashes before they take effect. Slashes become active after this period passes. - -In summary, slashing mainly punishes actual misbehavior rather than mistakes, while still disincentivizing actions that weaken network security and decentralization. The threat of losing stake keeps validators and nominators honest. - -### Monitor your Validators - -Nominating validators is not "set and forget." The system dynamics require nominators to periodically review validator performance and reputation. Failure to monitor could mean missed rewards or slashes. - -Criteria like era points will vary from era to era. One low-era point era does not necessitate switching validators. However consistent underperformance on era points or other issues may warrant nominating a better validator for returns and network health. - -Factors like identity, slash history, and commission rates are more stable. Optimize these for predictable rewards. The Polkadot Apps UI and the Tangle Network staking dashboard can guide you. - -### Claiming Your Staking Rewards - -Staking rewards accrue each era and must be claimed - they are not paid out automatically. Typically validators trigger payouts, but anyone can initiate reward distribution. - -Rewards remain claimable for a substantial number of eras - long enough that normal stakers need not worry about missing the window. Check the target network's specifics for the exact length. - -You can view and trigger unclaimed reward payouts for your nominated validators on the Staking Payouts page. Note that if anyone claims rewards for a validator you backed, you receive the payout even if you didn't personally trigger it. - -If no one claims your rewards before the expiration, or your validator fully unbonds, you lose the unclaimed rewards. Given unbonding takes multiple eras, check for unclaimed rewards at least that often. - -Rewards can go to the payout account or any other account. You can also top-up your bonded stake from rewards without fully unbonding. - -### Staking Securely with Stash and Proxy Accounts - -While many nominators use one account, a Stash account, Stakers can use two accounts for added security: - -**Stash account:** Holds the staked funds but delegates actions to a proxy. Can be kept offline (e.g. cold wallet) for security. Avoiding transactions here keeps history clean. -**Proxy account:** Acts on behalf of the stash, handling nominating and validating. Sets preferences like commission rates and reward payout accounts. Only needs funds for transaction fees. - -Benefits of this setup: - -- Proxy handles staking without accessing cold stash. No need to constantly use a hardware wallet. -- Stash transaction history stays clean as the proxy handles staking actions. -- Earned rewards can be automatically re-staked on stash for compounding. -- Caution: Never leave large balances on proxy accounts. As hot wallets, they are more exposed to hacks. Use stash or cold storage for funds not needed for staking transactions. - -### Unbonding and Withdrawing Stakes - -Tokens staked in the network are not immediately liquid. There's an "unbonding" period during which tokens cannot be transferred or used. To see the unbonding period, check the Parameters page. - -## Why Participate in Staking? - -Staking offers several benefits: - -- Rewards: Participants earn rewards, contributing to the potential for passive income. -- Network Security: Staking tokens help secure the network, making it more resilient against attacks. -- Governance: Stakers often have a say in network governance, influencing the future direction of the blockchain. - -Why Might One Hesitate to Stake? - -- Liquidity: Staked tokens are locked and cannot be traded or used until unbonded. -- Slashing Risks: Validators and nominators risk losing a portion of their stake if the validator behaves maliciously or negligently. -- Complexity: The process can be complex, especially for beginners. - -### Conclusion - -Staking is a critical mechanism for nPoS blockchain networks, offering a way to participate in and benefit from the network beyond mere token holding. It strengthens network security and decentralization while providing rewards to those who participate. diff --git a/pages/staking/_meta.ts b/pages/staking/_meta.ts new file mode 100644 index 00000000..87de3f61 --- /dev/null +++ b/pages/staking/_meta.ts @@ -0,0 +1,24 @@ +import { Meta } from "nextra"; + +const meta: Meta = { + "-- introduction": { + type: "separator", + title: "Introduction", + }, + introduction: "Staking Basics", + "staking-concepts": "Core Concepts", + "-- staking": { + type: "separator", + title: "Staking", + }, + "how-staking-works": "How Staking Works", + incentives: "Incentives", + how_to_stake: "How to Stake on Tangle", + "-- liquid-staking": { + type: "separator", + title: "Liquid Staking", + }, + "liquid-staking": "Liquid Delegation Vaults", +}; + +export default meta; diff --git a/pages/staking/how-staking-works.mdx b/pages/staking/how-staking-works.mdx new file mode 100644 index 00000000..ff1b98f0 --- /dev/null +++ b/pages/staking/how-staking-works.mdx @@ -0,0 +1,30 @@ +# How Staking Works + +Staking lets participants delegate assets to operators who run Blueprint services. Those assets back service execution and can be slashed if operators violate service requirements. + +Tangle provides permissionless, asset-configurable staking for Blueprint service instances. Any asset created on or bridged to Tangle can be used as collateral to stake on operators. Stakers earn rewards proportional to service usage and protocol incentive budgets. + +## Benefits of Tangle Staking include: + +- Staking any asset +- Securing on-demand service instances with unique assets +- Increased efficiency of staked capital by sharing it across instances +- Additional revenue streams for stakers and operators +- Boosted security for protocols leveraging new assets as security capital +- Innovation in new blockchain services by harnessing decentralized resources + +## How Staking Works on Tangle + +1. Developers publish Blueprints that define service requirements. +2. Operators register and opt into the Blueprints they want to run. +3. Stakers delegate assets to operators to provide economic security. +4. Customers instantiate services and select operators based on pricing and commitments. +5. Operators execute services and earn service fees; stakers share in rewards. + +Staking aligns operators, developers, and stakers around service reliability and makes it possible to secure on-demand services without a consensus-layer dependency. + +## Liquid Staking Option + +If you need a transferable staking position, use liquid delegation vaults. Vault shares are ERC20 tokens backed by a fixed operator + asset + blueprint selection. + +See [Liquid Staking](/staking/liquid-staking/introduction) for the detailed flow. diff --git a/pages/developers/precompiles/features/_meta.ts b/pages/staking/how_to_stake/_meta.ts similarity index 60% rename from pages/developers/precompiles/features/_meta.ts rename to pages/staking/how_to_stake/_meta.ts index 0fa02cf2..76e5aff8 100644 --- a/pages/developers/precompiles/features/_meta.ts +++ b/pages/staking/how_to_stake/_meta.ts @@ -1,7 +1,7 @@ import { Meta } from "nextra"; const meta: Meta = { - governance: "Governance Related", + how_to_stake_tangle: "How to Stake: Tangle DApp", }; export default meta; diff --git a/pages/restake/how_to_restake/how_to_restake_polkadotjs/_meta.ts b/pages/staking/how_to_stake/how_to_stake_tangle/_meta.ts similarity index 100% rename from pages/restake/how_to_restake/how_to_restake_polkadotjs/_meta.ts rename to pages/staking/how_to_stake/how_to_stake_tangle/_meta.ts diff --git a/pages/restake/how_to_restake/how_to_restake_tangle/delegate.mdx b/pages/staking/how_to_stake/how_to_stake_tangle/delegate.mdx similarity index 63% rename from pages/restake/how_to_restake/how_to_restake_tangle/delegate.mdx rename to pages/staking/how_to_stake/how_to_stake_tangle/delegate.mdx index 4abd6a12..74ccaad3 100644 --- a/pages/restake/how_to_restake/how_to_restake_tangle/delegate.mdx +++ b/pages/staking/how_to_stake/how_to_stake_tangle/delegate.mdx @@ -3,31 +3,31 @@ import Callout from "/components/Callout"; ## Delegate Using Tangle DApp -You should have deposited your tokens to the multiasset delegation vault before you can delegate. See the [Deposit Using Tangle DApp page](./deposit-tangle.mdx) for more information. If you have already deposited **and** delegated your assets under the **Deposit** tab, you can skip this step. +You should have deposited your tokens before you can delegate. See the [Deposit Using Tangle DApp page](./deposit.mdx) for more information. If you have already deposited **and** delegated your assets under the **Deposit** tab, you can skip this step. -Delegators are similar to stakers in a consensus system like nominated proof of stake (NPoS), but they delegate their tokens to an operator, and participate in the rewards and risks similar to staking on a validator. +Delegators delegate assets to an operator and participate in rewards (and risks, including slashing) based on that operator’s performance and the blueprint’s rules. ### Step 1: Access Tangle DApp & Connect Wallet -- Open [Tangle DApp's Restaking: Delegate page](https://app.tangle.tools/restake/stake). +- Open [Tangle DApp's Staking: Delegate page](https://app.tangle.tools/staking/stake). - Connect your wallet to the DApp by clicking on the **Connect Wallet** button on the top right and selecting your preferred wallet provider. ### Step 2: Delegate -1. Switch to the **Stake** tab on the Restaking page. +1. Switch to the **Stake** tab on the Staking page. 2. Ensure that you're connected to the appropriate network. You can switch networks by using the dropdown located at the top right corner of the page. 3. Click on the **Asset** dropdown, and select an asset from the modal. If the asset that you're looking for is not listed, ensure that you have correctly performed the deposit operation for that asset. Please note that your asset balance might be lower than expected or not available if you have already deposited and delegated at once under the **Deposit** tab. 4. Click on the **Select Operator** dropdown, and choose an operator from the list. You can also search for specific operators by entering their address in the search bar. 5. Enter the amount of assets that you'd like to delegate to the operator. -6. Review fees, unstake period, and bond less delay before proceeding. +6. Review fees, unstake period, and any unstake delay before proceeding. 7. If all the information is correct, the **Delegate** button should be enabled. Click on it to initiate the transaction. -![Delegate Steps](/images/restake/delegate/dapp-steps.png) +![Delegate Steps](/images/staking/delegate/dapp-steps.png) ### Step 3: Sign and Submit the Transaction - Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. - If successful, you should see the following confirmation toast notification: -![Delegate Transaction Confirmation Toast](/images/restake/delegate/delegate-tx-confirmation.png) +![Delegate Transaction Confirmation Toast](/images/staking/delegate/delegate-tx-confirmation.png) diff --git a/pages/restake/how_to_restake/how_to_restake_tangle/deposit.mdx b/pages/staking/how_to_stake/how_to_stake_tangle/deposit.mdx similarity index 69% rename from pages/restake/how_to_restake/how_to_restake_tangle/deposit.mdx rename to pages/staking/how_to_stake/how_to_stake_tangle/deposit.mdx index 1ce4b565..8206f2b9 100644 --- a/pages/restake/how_to_restake/how_to_restake_tangle/deposit.mdx +++ b/pages/staking/how_to_stake/how_to_stake_tangle/deposit.mdx @@ -1,29 +1,29 @@ ## Deposit Using Tangle DApp -Depositing is the process of allocating assets to the multiasset delegation vault. Deposits are required to participate in restaking (delegate). +Depositing is the process of allocating assets to the multiasset delegation vault. Deposits are required to participate in staking (delegate). -Users can deposit LST assets to the multiasset delegation vault. +Users can deposit supported assets to the multi-asset delegation vault. ### Step 1: Access Tangle DApp & Connect Wallet -- Open [Tangle DApp's Restaking: Deposit page](https://app.tangle.tools/restake/deposit). +- Open [Tangle DApp's Staking: Deposit page](https://app.tangle.tools/staking/deposit). - Connect your wallet to the DApp by clicking on the **Connect Wallet** button on the top right and selecting your preferred wallet provider. ### Step 2: Deposit Assets -1. Switch to the **Deposit** tab on the Restaking page. +1. Switch to the **Deposit** tab on the Staking page. 2. Select the appropriate network from the dropdown list. 3. Select an asset from the modal. 4. Enter the desired amount to deposit. 5. (Optional) If you'd like to also delegate your deposit right away: Click on the **Select Operator** dropdown, and choose an operator from the list. If you'd like to delegate later, skip this step. [Learn how to delegate later](./delegate.mdx). -6. Review any fees, APY, and other key information before proceeding. +6. Review any fees, incentive estimates, and other key information before proceeding. 7. If all the information is correct, the **Deposit + Delegate** (or **Deposit** if just depositing) button should be enabled. Click on it to initiate the transaction. -![Deposit Steps](/images/restake/deposit/dapp-steps.png) +![Deposit Steps](/images/staking/deposit/dapp-steps.png) ### Step 3: Sign and Submit the Transaction - Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. - If successful, you should see the following confirmation toast notification: -![Deposit Transaction Confirmation Toast](/images/restake/deposit/deposit-tx-confirmation.png) +![Deposit Transaction Confirmation Toast](/images/staking/deposit/deposit-tx-confirmation.png) diff --git a/pages/restake/how_to_restake/how_to_restake_tangle/unstake.mdx b/pages/staking/how_to_stake/how_to_stake_tangle/unstake.mdx similarity index 88% rename from pages/restake/how_to_restake/how_to_restake_tangle/unstake.mdx rename to pages/staking/how_to_stake/how_to_stake_tangle/unstake.mdx index b4f50936..38fedfc5 100644 --- a/pages/restake/how_to_restake/how_to_restake_tangle/unstake.mdx +++ b/pages/staking/how_to_stake/how_to_stake_tangle/unstake.mdx @@ -1,6 +1,6 @@ ## Unstake Using Tangle DApp -The first step to exit restake is to unstake your tokens. This is done by calling the `unstake` function internally, which releases the locked assets and returns them to the deposit vault. +The first step to exit staking is to unstake your tokens. This is done by calling the `unstake` function internally, which releases the locked assets and returns them to the deposit vault. Then, you can withdraw your assets from the deposit vault. See the [Withdraw page](./withdraw.mdx) for more information. @@ -11,7 +11,7 @@ The unstake is a two step process: ### Step 1: Access Tangle DApp & Connect Wallet -- Open [Tangle DApp's Restaking: Unstake page](https://app.tangle.tools/restake/unstake). +- Open [Tangle DApp's Staking: Unstake page](https://app.tangle.tools/staking/unstake). - Connect your wallet to the DApp by clicking on the **Connect Wallet** button on the top right and selecting your preferred wallet provider. ### Step 2: Schedule Unstake @@ -23,14 +23,14 @@ Once you're at the **Unstake** tab, you'll see a table on the right listing the 3. Review fees and unstake period before proceeding. 4. Click on the **Schedule Unstake** button to initiate the transaction. -![Unstake Steps](/images/restake/unstake/steps.png) +![Unstake Steps](/images/staking/unstake/steps.png) ### Step 3: Sign and Submit the Transaction - Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. - If successful, you should see the following confirmation toast notification: -![Unstake Transaction Confirmation Toast](/images/restake/unstake/unstake-tx-confirmation.png) +![Unstake Transaction Confirmation Toast](/images/staking/unstake/unstake-tx-confirmation.png) ### Cancel an Unstaking Request @@ -38,7 +38,7 @@ If you've scheduled an unstake request, you can choose to **cancel** it if you c Simply select the unstake request(s) that you'd like to cancel, then click on the **Cancel Unstake** button to initiate the transaction. -![Unstake Requests Table: Cancel Unstake](/images/restake/unstake/cancel-unstake.png) +![Unstake Requests Table: Cancel Unstake](/images/staking/unstake/cancel-unstake.png) ### Execute an Unstaking Request diff --git a/pages/restake/how_to_restake/how_to_restake_tangle/withdraw.mdx b/pages/staking/how_to_stake/how_to_stake_tangle/withdraw.mdx similarity index 89% rename from pages/restake/how_to_restake/how_to_restake_tangle/withdraw.mdx rename to pages/staking/how_to_stake/how_to_stake_tangle/withdraw.mdx index e69df300..7a7f79b0 100644 --- a/pages/restake/how_to_restake/how_to_restake_tangle/withdraw.mdx +++ b/pages/staking/how_to_stake/how_to_stake_tangle/withdraw.mdx @@ -19,25 +19,25 @@ The first step to complete a withdraw is to schedule a withdrawal. This is done ### Step 1: Access Tangle DApp & Connect Wallet -- Open [Tangle DApp's Restaking: Withdraw page](https://app.tangle.tools/restake/withdraw). +- Open [Tangle DApp's Staking: Withdraw page](https://app.tangle.tools/staking/withdraw). - Connect your wallet to the DApp by clicking on the **Connect Wallet** button on the top right and selecting your preferred wallet provider. ### Step 2: Schedule Withdraw -1. Select the **Withdraw** tab on the Restaking page. +1. Select the **Withdraw** tab on the Staking page. 2. Click on the **Asset** dropdown, and select an asset from the modal. 3. Enter the amount of assets that you'd like to withdraw. 4. Review any fees and the withdraw delay before proceeding. 5. Click on the **Schedule Withdraw** button to initiate the transaction. -![Withdraw Steps](/images/restake/withdraw/steps.png) +![Withdraw Steps](/images/staking/withdraw/steps.png) ### Step 3: Sign and Submit the Transaction - Sign and submit the transaction. Make sure the account you are using has enough balance to cover the transaction fee. - If successful, you should see the withdraw request appear in the table at the right side of the page, and also the following confirmation toast notification: -![Withdraw Transaction Confirmation Toast](/images/restake/withdraw/withdraw-tx-confirmation.png) +![Withdraw Transaction Confirmation Toast](/images/staking/withdraw/withdraw-tx-confirmation.png) ### Cancel a Withdraw Request @@ -45,7 +45,7 @@ If you've scheduled a withdraw request, you can choose to **cancel** it if you c Simply select the withdraw request(s) that you'd like to cancel, then click on the **Cancel Withdraw** button to initiate the transaction. -![Unstake Requests Table: Cancel Withdraw](/images/restake/withdraw/cancel-withdraw.png) +![Unstake Requests Table: Cancel Withdraw](/images/staking/withdraw/cancel-withdraw.png) ### Execute a Withdraw Request diff --git a/pages/restake/incentives/_meta.ts b/pages/staking/incentives/_meta.ts similarity index 66% rename from pages/restake/incentives/_meta.ts rename to pages/staking/incentives/_meta.ts index 64998d13..a5fbcae2 100644 --- a/pages/restake/incentives/_meta.ts +++ b/pages/staking/incentives/_meta.ts @@ -1,7 +1,8 @@ import { Meta } from "nextra"; const meta: Meta = { - how_rewards_work: "Overview", + how_rewards_work: "How Rewards Work", + claiming: "Claiming", vaults: "Vaults", configs: "Vault Configs", }; diff --git a/pages/staking/incentives/claiming.mdx b/pages/staking/incentives/claiming.mdx new file mode 100644 index 00000000..6051dc7e --- /dev/null +++ b/pages/staking/incentives/claiming.mdx @@ -0,0 +1,121 @@ +--- +title: Claiming Cheatsheet +description: One place to see where rewards accrue, how to visualize them, and how to claim efficiently. +--- + +# Claiming Cheatsheet + +This page answers three common questions: + +1. **Where do my rewards accrue?** +2. **How do I visualize them by source?** +3. **How do I claim with the fewest transactions?** + +## Quick Map (By Contract) + +- **Core protocol service fees (operators):** `Tangle` (`claimRewards`, `claimRewardsBatch`, `claimRewardsAll`). +- **Staker service fees + staker inflation:** `ServiceFeeDistributor` (`claimAll`, `claimAllBatch`, `claimFor`). +- **Staking incentives (TNT):** `RewardVaults` (`claimDelegatorRewards`, `claimDelegatorRewardsBatch`). +- **InflationPool merit rewards:** `InflationPool` (`claimOperatorRewards`, `claimCustomerRewards`, `claimDeveloperRewards`). + +## Delegator (Staker) + +**Where rewards accrue** + +- **Service fees + staker inflation:** `ServiceFeeDistributor` (per payment token). +- **TNT incentives:** `RewardVaults` (per staking asset). + +**How to visualize** + +- `ServiceFeeDistributor.pendingRewards(delegator, token)` +- `ServiceFeeDistributor.delegatorOperators(delegator)` and `delegatorAssets(delegator, operator)` +- `RewardVaults.pendingDelegatorRewards(asset, delegator, operator)` +- `RewardVaults.pendingDelegatorRewardsAll(asset, delegator)` +- `RewardVaults.getDelegatorPositions(asset, delegator)` + +**How to claim** + +- **Per token:** `ServiceFeeDistributor.claimAll(token)` +- **Multiple tokens in one tx:** `ServiceFeeDistributor.claimAllBatch(tokens)` +- **TNT incentives:** `RewardVaults.claimDelegatorRewardsBatch(asset, operators)` + +**Tx count expectation** + +- 1 tx per token in `ServiceFeeDistributor`, plus 1 tx per asset in `RewardVaults` (or use multicall). + +## Operator + +**Where rewards accrue** + +- **Service fee operator share:** `Tangle` pending rewards per payment token. +- **TNT operator incentives:** `InflationPool` pending operator rewards. +- **Optional TNT commission (from delegators):** `RewardVaults` pending commission per asset. + +**How to visualize** + +- `Tangle.rewardTokens(operator)` + `Tangle.pendingRewards(operator, token)` +- `InflationPool.pendingOperatorRewards(operator)` +- `RewardVaults.pendingOperatorCommission(asset, operator)` + +**How to claim** + +- **All tokens:** `Tangle.claimRewardsAll()` +- **Specific tokens:** `Tangle.claimRewardsBatch(tokens)` +- **Inflation incentives:** `InflationPool.claimOperatorRewards()` +- **Vault commission:** `RewardVaults.claimOperatorCommission(asset)` + +**Tx count expectation** + +- 1 tx for all operator service-fee tokens + 1 tx for InflationPool + 1 tx per reward-vault asset. + +## Customer + +**Where rewards accrue** + +- **Optional TNT incentives:** `InflationPool` pending customer rewards (only if weights allocate customer incentives). + +**How to visualize** + +- `InflationPool.pendingCustomerRewards(customer)` + +**How to claim** + +- `InflationPool.claimCustomerRewards()` (single tx) + +## Developer + +**Where rewards accrue** + +- **Service fees:** paid directly to the developer address at payment time (no claim). +- **Optional TNT incentives:** `InflationPool` pending developer rewards. + +**How to visualize** + +- `InflationPool.pendingDeveloperRewards(developer)` + +**How to claim** + +- `InflationPool.claimDeveloperRewards()` (single tx) + +## When To Use Multicall + +If you want a **single transaction** across multiple contracts (e.g., claim service fees + vault incentives + inflation rewards together), use a multicall. + +A typical call bundle for a staker might include: + +- `ServiceFeeDistributor.claimAllBatch(tokens)` +- `RewardVaults.claimDelegatorRewardsBatch(asset, operators)` (per asset) + +## Code References + +- `src/v2/core/Payments.sol` (operator service-fee claims) +- `src/v2/rewards/ServiceFeeDistributor.sol` (staker fee + staker inflation claims) +- `src/v2/rewards/RewardVaults.sol` (TNT vault incentives + operator commissions) +- `src/v2/rewards/InflationPool.sol` (operator/customer/developer inflation rewards) + +## Tests Worth Reading + +- `test/v2/tangle/Payments.t.sol` +- `test/v2/rewards/ServiceFeeDistributor.t.sol` +- `test/v2/Rewards.t.sol` +- `test/v2/InflationPool.t.sol` diff --git a/pages/staking/incentives/configs.mdx b/pages/staking/incentives/configs.mdx new file mode 100644 index 00000000..392cdeec --- /dev/null +++ b/pages/staking/incentives/configs.mdx @@ -0,0 +1,46 @@ +--- +title: Reward Configs +description: Key on-chain knobs for incentive programs and fee splits. +--- + +# Reward Configs + +This page summarizes the main on-chain configuration points for staking incentives. Exact addresses are deployment-dependent. + +## `RewardVaults` (Asset Incentives) + +Governance configures per-asset incentive vaults: + +- `createVault(asset, depositCap)` +- `updateVaultConfig(asset, depositCap)` +- `deactivateVault(asset)` +- `setOperatorCommission(newBps)` +- `setLockDurations(oneMonth, twoMonths, threeMonths, sixMonths)` + +## `InflationPool` (Funding and Distribution) + +`InflationPool` is pre-funded with TNT and distributes it in epochs: + +- `fund(amount)` (requires funder role) +- `distributeEpoch()` (anyone can call when ready) +- `distributeEpochWithServices(serviceIds)` (DISTRIBUTOR_ROLE; required for staker inflation) +- `setWeights(stakingBps, operatorsBps, customersBps, developersBps, stakersBps)` +- `setEpochLength(seconds)` +- `setStakerInflationConfig(tangle, serviceFeeDistributor)` + +The staking portion of each epoch is transferred to `RewardVaults`. + +## Service Fee Splits + +Service payments are split between developer / protocol / operators / stakers: + +- `Tangle.setPaymentSplit({ developerBps, protocolBps, operatorBps, stakerBps })` + +If staker fee distribution is enabled, the protocol routes staker shares to `ServiceFeeDistributor`. + +## Wiring Checklist (One-Time Deployment Setup) + +- `Tangle.setServiceFeeDistributor(distributor)` +- `MultiAssetDelegation.setServiceFeeDistributor(distributor)` +- `ServiceFeeDistributor.setInflationPool(inflationPool)` +- `InflationPool.setStakerInflationConfig(tangle, distributor)` diff --git a/pages/staking/incentives/how_rewards_work.mdx b/pages/staking/incentives/how_rewards_work.mdx new file mode 100644 index 00000000..f2a20189 --- /dev/null +++ b/pages/staking/incentives/how_rewards_work.mdx @@ -0,0 +1,93 @@ +--- +title: How Rewards Work +description: Learn how staking incentives work on Tangle and how operators and stakers earn. +--- + +# How Rewards Work + +Staking incentives on Tangle come from two sources: + +1. **Service fee revenue** paid by customers (in the service's payment token). +2. **TNT incentives** funded by governance (pre-funded, no minting). + +## ELI5 + +If you delegate to an operator that runs useful services, you earn: + +- **A share of the fees** those services pay (in the same token the customer used). +- **Optional TNT incentives** when the protocol has funded an incentives budget: _"I also earn TNT from RewardVaults plus optional staker inflation."_ + +You choose your exposure scope (All vs Fixed), and your share is calculated from your delegated amount and lock multiplier. + +## How The Money Moves On-Chain + +1. **A service is paid** (one-time or subscription). +2. `Payments.sol` splits the payment into developer, protocol, operator, and staker shares. +3. Operator shares accrue as pending rewards in the core protocol. +4. Staker shares are forwarded **per operator** to `ServiceFeeDistributor` (or to treasury if no distributor is configured). +5. `ServiceFeeDistributor` allocates those fees to delegators by score: + - Score = principal × lock multiplier (per asset, per operator). + - All vs Fixed blueprint selection gates which pools you participate in. + - Optional USD weighting and security commitments can scale the effective exposure. +6. **Optional TNT incentives** are distributed from `InflationPool` in epochs: + - The staking portion funds `RewardVaults` (TNT incentives by asset deposit and lock). + - If `stakersBps > 0`, a staker TNT budget is distributed by **service exposure** and routed through `ServiceFeeDistributor`. + +## Why This Design + +- **Single fee distributor**: All staker service-fee payouts use `ServiceFeeDistributor`, so the math is consistent across tokens. +- **Explicit budgets**: TNT incentives only exist if governance funds `InflationPool`. There is no hidden minting. +- **Exposure fairness without per-user loops**: Exposure is computed at distribution time (USD-weighted where possible), avoiding per-delegator loops in inflation logic. + +## How To Get Rewarded (Practical Steps) + +**As a staker** + +- Deposit and delegate in `MultiAssetDelegation`. +- Choose `All` or `Fixed` blueprint selection. +- Optional: add a lock multiplier for a higher reward score. +- Claim: + - TNT incentives from `RewardVaults`. + - Service-fee + staker-inflation rewards from `ServiceFeeDistributor`. + +**As an operator** + +- Self-stake `minOperatorStake` to register. +- Join services and set exposure and commitments responsibly. +- Claim operator service fees from the core protocol. +- Claim TNT commission from `RewardVaults` if you opt into commission. + +**As a blueprint developer** + +- Publish a blueprint and manage services. +- Earn the developer split from each service payment. + +## Examples + +**Example 1: Service fees (USDC)** +Alice delegates 100 wstETH to Operator O, All mode, no lock. +Bob delegates 200 wstETH to Operator O, All mode, 3-month lock (1.3x). +A service pays 10 USDC and the staker share is 2 USDC. +Alice score = 100, Bob score = 260, total = 360. +Alice earns ~0.56 USDC, Bob earns ~1.44 USDC. + +**Example 2: Inflation staker budget (TNT)** +An epoch has 1,000 TNT with stakersBps = 20%. +The keeper calls `distributeEpochWithServices` with active services. +InflationPool allocates 200 TNT by USD exposure per service/operator and +routes those TNT to `ServiceFeeDistributor`, where delegators claim as usual. + +## Where To Look In Code And Tests + +- Payment split and staker routing: `src/v2/core/Payments.sol` +- Staker fee distribution and claims: `src/v2/rewards/ServiceFeeDistributor.sol` +- Inflation budgets and staker exposure allocation: `src/v2/rewards/InflationPool.sol` +- TNT staking incentives: `src/v2/rewards/RewardVaults.sol` +- Tests: + - `test/v2/tangle/Payments.t.sol` + - `test/v2/rewards/ServiceFeeDistributor.t.sol` + - `test/v2/rewards/ServiceFeeDistributorStreaming.t.sol` + - `test/v2/Rewards.t.sol` + - `test/v2/InflationPool.t.sol` + +See [Claiming Cheatsheet](/staking/incentives/claiming) for one-tx claim options and multicall tips. diff --git a/pages/staking/incentives/vaults.mdx b/pages/staking/incentives/vaults.mdx new file mode 100644 index 00000000..63adec10 --- /dev/null +++ b/pages/staking/incentives/vaults.mdx @@ -0,0 +1,31 @@ +--- +title: Vaults (Terminology) +description: Clarify the different “vault” concepts used in staking and incentives. +--- + +# Vaults (Terminology) + +In Tangle documentation, “vault” can refer to different on-chain components. This page clarifies the terminology. + +## `RewardVaults` (TNT Incentives Per Asset) + +- `RewardVaults` is an on-chain system that pays **TNT incentives** for delegated assets. +- It maintains **one vault per asset** (native, TNT, etc.). +- Vaults are configured by governance (deposit cap + active status). +- Rewards are funded from `InflationPool` (pre-funded, no minting). +- Rewards are split across delegators by score (principal × lock multiplier), and operators can take a commission. +- Deposit caps are hard limits: deposits above the cap are rejected until capacity is freed. + +Why this exists: + +- Keeps incentive budgets explicit (only distributed if funded). +- Keeps per-asset incentives simple and measurable (deposit cap is the only throttle). +- Lets integrators estimate reward capacity without guessing “hidden” emission math. + +## `ServiceFeeDistributor` (Service Fee Revenue) + +Service-fee revenue share to stakers is handled by `ServiceFeeDistributor`: + +- It receives the staker portion of service payments, per operator. +- It accounts for `All` vs `Fixed` blueprint selection and optional per-asset security commitments. +- Delegators claim rewards from the distributor (or through the protocol UI). diff --git a/pages/staking/introduction.mdx b/pages/staking/introduction.mdx new file mode 100644 index 00000000..837b92a3 --- /dev/null +++ b/pages/staking/introduction.mdx @@ -0,0 +1,28 @@ +import CardGrid from "../../components/CardGrid"; + +# Economic Security + +Tangle's economic security stack centers on staking for Blueprint services. + + + +## Economic Security + +Tangle uses economic security to back operators who run Blueprint services. Operators register for services and provide compute. Assets can be staked to operators as collateral, aligning incentives and enabling slashing where policies require it. + +## Staking + +Tangle provides permissionless and asset-configurable staking for Blueprints. Assets can be staked to operators as collateral for service instances, and participants earn rewards based on service usage and incentives. + +Staking aligns operators, developers, and stakers around service reliability and performance. If an operator violates service requirements, delegated assets can be slashed under the protocol’s rules. diff --git a/pages/staking/liquid-staking/_meta.ts b/pages/staking/liquid-staking/_meta.ts new file mode 100644 index 00000000..a4203c33 --- /dev/null +++ b/pages/staking/liquid-staking/_meta.ts @@ -0,0 +1,11 @@ +import { Meta } from "nextra"; + +const meta: Meta = { + introduction: "Introduction", + "vaults-and-shares": "Vaults and Shares", + "staking-flow": "Staking Flow", + "factory-and-discovery": "Factory and Discovery", + "risks-and-limitations": "Risks and Limitations", +}; + +export default meta; diff --git a/pages/staking/liquid-staking/factory-and-discovery.mdx b/pages/staking/liquid-staking/factory-and-discovery.mdx new file mode 100644 index 00000000..57b55785 --- /dev/null +++ b/pages/staking/liquid-staking/factory-and-discovery.mdx @@ -0,0 +1,27 @@ +# Factory and Discovery + +`LiquidDelegationFactory` deploys vaults and provides discovery helpers. It creates **one vault per (operator, asset, blueprint selection)** and prevents duplicates. + +## Create a Vault + +- `createVault(operator, asset, blueprintIds)` +- `createAllBlueprintsVault(operator, asset)` for the All selection mode + +The factory will revert if the operator is not active or the asset is not enabled in `MultiAssetDelegation`. + +## Find Existing Vaults + +- `computeVaultKey(operator, asset, blueprintIds)` returns a deterministic key. +- `getVault(operator, asset, blueprintIds)` returns the vault address for that configuration. +- `getOperatorVaults(operator)` lists all vaults for an operator. +- `getAssetVaults(asset)` lists all vaults for an asset. +- `getAllVaults()` and `vaultCount()` expose a global view. + +## Token Names and Symbols + +Vaults generate metadata automatically: + +- **Name**: `Liquid Delegation {ASSET} Op-0x1234 {BP...}` +- **Symbol**: `ld{ASSET}-0x1234-{BP...}` + +These strings are derived from the operator address, asset symbol, and blueprint selection mode. diff --git a/pages/staking/liquid-staking/introduction.mdx b/pages/staking/liquid-staking/introduction.mdx new file mode 100644 index 00000000..c47f212e --- /dev/null +++ b/pages/staking/liquid-staking/introduction.mdx @@ -0,0 +1,58 @@ +# Liquid Staking (Liquid Delegation Vaults) + +Liquid staking on Tangle lets stakers keep a transferable position while their assets secure operator-run services. It is implemented as ERC-7540 liquid delegation vaults that wrap the standard staking flow in `MultiAssetDelegation`. + +Each vault is tied to one operator, one asset, and one blueprint selection mode. Deposits are immediate, while redemptions are asynchronous and follow protocol exit delays. + +## Who This Is For + +- **Stakers** who want liquidity without exiting their staking position. +- **Integrators** who want a tokenized staking position per operator and asset. +- **Operators** who want a clean vault surface for delegators. + +## What Changes vs Direct Staking + +- **Liquid shares**: you hold ERC20 shares instead of a static delegation record. +- **Fixed configuration**: operator + asset + blueprint selection is fixed per vault. +- **Async exits**: redemptions use the protocol's unstake delays. + +
    + Liquid delegation vault flow +
    + Liquid delegation vaults wrap protocol staking with transferable shares and + asynchronous exits. +
    +
    + +## Rewards and Accounting + +Liquid delegation vaults manage delegation and redemption. Incentives and service-fee accounting are still tracked by the protocol. Because the vault is the delegator on-chain, accruals are attributed to the vault address. + +Reward claim surfaces live in the protocol: + +- **Service fees** are claimed through [`ServiceFeeDistributor.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/ServiceFeeDistributor.sol). +- **TNT incentives** are claimed through [`RewardVaults.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/rewards/RewardVaults.sol). + +The vault does not automatically distribute those rewards to share holders; integrators need to wire a distribution path if they want reward pass-through. + +See [Claiming Cheatsheet](/staking/incentives/claiming) for function-level claim flows. + +## Core Contracts (GitHub) + +- [`LiquidDelegationVault.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/staking/LiquidDelegationVault.sol) +- [`LiquidDelegationFactory.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/staking/LiquidDelegationFactory.sol) +- [`MultiAssetDelegation.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/staking/MultiAssetDelegation.sol) + +## Related Interfaces (GitHub) + +- [`IERC7540.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IERC7540.sol) +- [`IMultiAssetDelegation.sol`](https://github.com/tangle-network/tnt-core/blob/main/src/interfaces/IMultiAssetDelegation.sol) + +## Next Steps + +- [How vaults and shares work](./vaults-and-shares.mdx) +- [Staking flow and redemption](./staking-flow.mdx) +- [Factory and discovery](./factory-and-discovery.mdx) diff --git a/pages/staking/liquid-staking/risks-and-limitations.mdx b/pages/staking/liquid-staking/risks-and-limitations.mdx new file mode 100644 index 00000000..43cf6f92 --- /dev/null +++ b/pages/staking/liquid-staking/risks-and-limitations.mdx @@ -0,0 +1,33 @@ +# Risks and Limitations + +Liquid staking introduces convenience, but it does not remove protocol-level risks. + +## Slashing Risk + +If an operator is slashed, the vault's underlying delegation is reduced and the share value drops. Share transfers do not avoid this risk. + +## Delayed Exits + +Redemptions are asynchronous. Shares are burned at `requestRedeem`, and assets are only available after the protocol delay window elapses. + +## Fixed Vault Configuration + +Vaults are fixed to one operator, one asset, and one blueprint selection mode. Changing exposure requires moving to a different vault. + +## Reward Pass-Through + +Rewards accrue to the vault address as the delegator. The vault does not distribute rewards to share holders by itself, so integrators must add a pass-through mechanism if required. + +See [Claiming Cheatsheet](/staking/incentives/claiming) for the protocol claim surface. + +## Asset Availability + +Vaults can only be created for assets that are enabled in `MultiAssetDelegation`. + +## Native Asset Support + +Native-asset unwrapping is not enabled in the current vault implementation. Use wrapped assets when available. + +## Redeem Request Matching + +`redeem` matches the first claimable request with the provided share amount. Avoid multiple outstanding requests for the same share quantity to prevent ambiguity. diff --git a/pages/staking/liquid-staking/staking-flow.mdx b/pages/staking/liquid-staking/staking-flow.mdx new file mode 100644 index 00000000..fdd31893 --- /dev/null +++ b/pages/staking/liquid-staking/staking-flow.mdx @@ -0,0 +1,73 @@ +# Staking Flow + +Liquid staking uses immediate deposits and asynchronous redemptions. + +## Deposit and Mint + +1. Approve the vault to spend the asset. +2. Call `deposit(assets, receiver)` or `mint(shares, receiver)`. +3. The vault deposits into `MultiAssetDelegation` and delegates to the configured operator and blueprint selection. +4. ERC20 vault shares are minted to the receiver. + +Notes: + +- Vaults currently use the ERC20 deposit path. Native-asset unwrapping is not enabled yet. +- The vault does not expose lock multipliers; deposits are unlocked by default. + +## Redeem (Asynchronous) + +1. Call `requestRedeem(shares, controller, owner)`. Shares are burned, an unstake is scheduled, and a request ID is returned. +2. Track status with `pendingRedeemRequest` and `claimableRedeemRequest`. +3. After the delay window, call `redeem(shares, receiver, controller)` to receive the underlying assets. + +The delay is the maximum of `delegationBondLessDelay` and `leaveDelegatorsDelay` from `MultiAssetDelegation`. + +## Delay Reference + +Delays are expressed in **rounds**. To convert to time, multiply by `roundDuration`. + +| Delay | Source | What It Controls | +| ------------------------- | ---------------------- | -------------------------------------------------------------------- | +| `delegationBondLessDelay` | `MultiAssetDelegation` | Minimum rounds before a redeem request is claimable. | +| `leaveDelegatorsDelay` | `MultiAssetDelegation` | Additional delay before exits finalize (vault uses the max of both). | +| `roundDuration` | `MultiAssetDelegation` | Length of a round; used to translate delays into wall-clock time. | + +See [Protocol Parameters](/network/network-parameters) for current defaults. + +## Controller Permissions + +Only the owner or an approved operator can request or redeem on a controller. Use `setOperator` to allow another address to act on your behalf. + +## Synchronous Withdrawals Are Disabled + +`withdraw` and `requestDeposit` revert with `AsyncRequired`. Always use the async flow. + +## Lifecycle Example + +1. A staker deposits 1,000 USDC into a vault for `Operator A` (All mode). The vault mints 1,000 shares. +2. The vault delegates to `Operator A` through `MultiAssetDelegation` with All blueprint exposure. +3. `Operator A` runs services. If fee and incentive accounting is enabled, it is attributed to the vault address. +4. A slash against `Operator A` reduces the vault’s backing assets. Each share now redeems for less than 1 USDC. +5. The staker requests redemption for 1,000 shares and waits out the delay window. +6. After the delay, the staker redeems and receives the reduced underlying amount. + +## Reward Claim Paths (Protocol-Level) + +Reward claims are not part of the vault interface. They live in protocol contracts and are tied to the **vault address** as the delegator: + +- **Service fees**: `ServiceFeeDistributor.claimAll(token)` or `claimFor(token, operator, asset)` +- **TNT incentives**: `RewardVaults.claimDelegatorRewards(asset, operator)` + +If you want rewards to flow to share holders, you need an explicit distribution mechanism on top of the vault. + +Example (vault address claims rewards): + +```solidity +// Service fees (per payment token) +ServiceFeeDistributor.claimAll(paymentToken); + +// TNT incentives (per staking asset + operator) +RewardVaults.claimDelegatorRewards(stakingAsset, operator); +``` + +After the vault claims, an external distributor can split proceeds across share holders using a snapshot or claim-time pro-rata approach. diff --git a/pages/staking/liquid-staking/vaults-and-shares.mdx b/pages/staking/liquid-staking/vaults-and-shares.mdx new file mode 100644 index 00000000..989988ad --- /dev/null +++ b/pages/staking/liquid-staking/vaults-and-shares.mdx @@ -0,0 +1,33 @@ +# Vaults and Shares + +Liquid delegation vaults are ERC20 share tokens with an ERC-7540 redemption interface. Each vault represents a single delegation position with a fixed configuration. + +## Vault Configuration + +Each vault is bound to: + +- **Operator**: the operator the vault delegates to. +- **Asset**: the ERC20 asset deposited into staking. +- **Blueprint selection**: + - **All**: the vault tracks all blueprints the operator participates in, including future additions. + - **Fixed**: the vault delegates only to a specific list of blueprint IDs. + +Blueprint selection is set at vault creation and cannot be changed later. + +## Share Accounting + +Vaults use the same share-based accounting model as `MultiAssetDelegation` with a small virtual offset to prevent first-depositor inflation. As a result: + +- Shares are minted on deposit and represent a claim on the underlying delegated assets. +- Share value can **move down** if the operator is slashed. +- Share value can change over time as the underlying delegation changes. + +## Liquidity Properties + +Vault shares are standard ERC20 tokens: + +- **Transferable** between wallets. +- **Approveable** for integrators and apps. +- **Composable** with other token-based systems. + +The vault itself remains the on-chain delegator; individual share transfers do not change the underlying delegation record. diff --git a/pages/staking/staking-concepts.mdx b/pages/staking/staking-concepts.mdx new file mode 100644 index 00000000..e5316db3 --- /dev/null +++ b/pages/staking/staking-concepts.mdx @@ -0,0 +1,37 @@ +# Staking Concepts + +This document introduces the core concepts for economic security on Tangle. + +## Key Roles + +- **Blueprint**: A reusable service template that defines interfaces, execution requirements, and expected outputs. +- **Service Instance**: A live deployment of a Blueprint that runs on operators. +- **Operator**: A node that runs service instances and earns fees for execution. +- **Staker (Delegator)**: A participant who delegates assets to operators to back service execution. + +## Staking Flow + +1. **Deposit assets** into the staking contract (`MultiAssetDelegation`). +2. **Delegate to an operator** and choose blueprint exposure: + - **`All`**: exposure to all blueprints the operator supports. + - **`Fixed`**: exposure to a specific set of blueprints. +3. **Earn rewards** from service fees and optional TNT budgets. + +## Incentives and Fees + +- **Service fees** are paid by customers and split across developers, operators, stakers, and the protocol treasury. +- **Optional TNT incentives** come from `InflationPool` when governance funds them. + +See [Incentives](/staking/incentives) for details. + +## Liquid Staking + +Liquid staking uses liquid delegation vaults to tokenize a delegation position into transferable shares. Each vault is tied to one operator, one asset, and one blueprint selection mode. + +See [Liquid Staking](/staking/liquid-staking/introduction) for the vault architecture and redemption flow. + +## Slashing and Security + +Operators can be slashed if they violate service requirements or fail to meet commitments. Stakers share in that risk based on their delegated exposure. + +Review risks before delegating or choosing exposure settings. diff --git a/pages/vibe/_meta.ts b/pages/vibe/_meta.ts new file mode 100644 index 00000000..d86694a4 --- /dev/null +++ b/pages/vibe/_meta.ts @@ -0,0 +1,16 @@ +import type { Meta } from "nextra"; + +const meta: Meta = { + introduction: "Introduction", + workflows: "Agent Workflows", + simulations: "Simulations", + profiles: "Profiles and Policies", + "-- profile schema": { + type: "separator", + title: "Profile Schema", + }, + "profile-schema": "OpenCode Profile Schema", + integrations: "Integrations", +}; + +export default meta; diff --git a/pages/vibe/integrations.mdx b/pages/vibe/integrations.mdx new file mode 100644 index 00000000..06fe9f2f --- /dev/null +++ b/pages/vibe/integrations.mdx @@ -0,0 +1,20 @@ +# Integrations + +The workbench is built to work with existing company systems. You can connect internal software, data sources, and operational tools so agents can act in context. + +## Typical Integrations + +- **Source control** for repositories and code review workflows +- **Ticketing and task systems** for structured work intake +- **Data sources** for retrieval and analysis +- **Internal APIs** for operational automation + +Integrations are governed by profiles and policies, so the agent only has access to what it needs. + +## Tool Servers And Gateways + +Advanced integrations can be exposed as tool servers. Profiles can allowlist local or remote servers, attach headers or environment variables, and set timeouts. This keeps sensitive systems behind explicit gates while still enabling powerful workflows. + +## Design Principle + +Integrations should be explicit and scoped. If a tool is not required for a workflow, it should not be enabled. diff --git a/pages/vibe/introduction.mdx b/pages/vibe/introduction.mdx new file mode 100644 index 00000000..9d7f5d1f --- /dev/null +++ b/pages/vibe/introduction.mdx @@ -0,0 +1,35 @@ +# Agentic Workbench + +The agentic workbench is Tangle's shared workspace for teams building with AI agents. It is multiplayer by design, so engineering, product, and business teams can work in the same space with shared context. Runs capture inputs, outputs, and logs so results are reviewable. + +## What You Can Do + +- **Build and iterate** on software with agent workflows in a shared project space. +- **Run background tasks** that keep working without babysitting. +- **Compare variants** across multiple approaches or branches. +- **Evaluate at scale** with simulations over large task sets. +- **Control access** with profiles, policies, and limits. +- **Connect tools** through integrations and shared workspace context. + +## Who This Is For + +- **Product teams** shipping AI-assisted software. [Start with workflows](/vibe/workflows). +- **Engineering orgs** standardizing agent workflows. [Define profiles](/vibe/profiles). +- **Builders** who need repeatable, reviewable output. [Run simulations](/vibe/simulations). + +## How It Connects + +The workbench sends execution to the sandbox runtime and can route payments through the protocol when workloads move to operator-hosted infrastructure. + +## Profiles Power The Runtime + +Workbench profiles are the control surface for the OpenCode sidecar. They define model routing, per-agent prompts, and tool permissions so every run behaves predictably without hard-coded rules. + +## Start Here (By Role) + +- **Builders**: Start with [agent workflows](/vibe/workflows). +- **Team leads**: Define guardrails in [profiles and policies](/vibe/profiles). +- **Evaluators**: Run [simulations](/vibe/simulations) before shipping. +- **Platform teams**: Wire tools in [integrations](/vibe/integrations). + +The workbench is available via partnership or early access. diff --git a/pages/vibe/profile-schema.mdx b/pages/vibe/profile-schema.mdx new file mode 100644 index 00000000..f453e279 --- /dev/null +++ b/pages/vibe/profile-schema.mdx @@ -0,0 +1,107 @@ +# Profile Schema + +This page documents the profile schema used to configure sidecar agents. Today, the schema maps to OpenCode. Over time, additional backends will share the same profile surface where possible. + +## Current Support + +- **OpenCode**: Full profile support (models, permissions, tools, tool servers, plugins). +- **Future backends**: We will document compatibility notes as new runtimes are added. + +## Top-Level Fields + +- **name**: Human-friendly identifier. +- **description**: Optional detail about what the profile is for. +- **extends**: Name of a base profile to inherit from. +- **model**: Primary model to use, in `provider/model-id` format. +- **small_model**: Optional secondary model for lighter tasks. +- **agent**: Map of per-agent overrides (plan, build, explore, or custom). +- **permission**: Global permission policy (edit, bash, webfetch, tool servers). +- **tools**: Map of tool names to `true` or `false`. +- **mcp**: Tool server configurations (allowlisted local or remote). +- **plugin**: Allowlisted plugin names. + +## Agent Overrides + +The `agent` field is a map keyed by role name. Common keys include `plan`, `build`, and `explore`, but custom names are allowed. + +Each agent config can include: + +- **model**: Override model for this agent. +- **temperature**: Sampling temperature (0.0 to 2.0). +- **top_p**: Nucleus sampling (0.0 to 1.0). +- **prompt**: Agent-specific system prompt. +- **tools**: Per-agent tool enablement map. +- **disable**: Disable this agent entirely. +- **description**: Human-readable note about this agent. +- **mode**: `primary`, `subagent`, or `all`. +- **permission**: Optional per-agent permission overrides. +- **maxSteps**: Hard cap on reasoning steps. + +## Permission Policy + +Permissions gate sensitive actions. Each field is one of `ask`, `allow`, or `deny`. + +- **edit**: File edit access. +- **bash**: Shell access (global or per-command map). +- **webfetch**: Web access. +- **mcp**: Tool server access. + +Policies can cap permissions (for example, forcing `bash` to `ask`), even if the profile requests more. + +## Tools + +The `tools` map enables or disables individual tools by name. This is the primary way to scope what the sidecar can execute. + +## Tool Servers (MCP) + +The `mcp` field defines allowlisted tool servers. Each entry is either: + +- **Local** + + - `type`: `local` + - `command`: array of command arguments + - `environment`: optional env map + - `enabled`: optional boolean + - `timeout`: optional milliseconds + +- **Remote** + - `type`: `remote` + - `url`: HTTPS endpoint + - `headers`: optional headers map + - `enabled`: optional boolean + - `timeout`: optional milliseconds + +Local tool servers are typically restricted by policy in production environments. + +## Plugins + +The `plugin` field is an array of allowlisted plugin names. Policies can block plugins entirely. + +## Inheritance And Merging + +If `extends` is set, the runtime loads the base profile and merges the override fields. Nested objects are merged so you can override only what changes. + +## Minimal Example + +```json +{ + "name": "team-default", + "extends": "tangle/base", + "model": "provider/model-id", + "agent": { + "plan": { "temperature": 0.2, "maxSteps": 8 }, + "build": { "temperature": 0.1 } + }, + "permission": { + "edit": "ask", + "bash": "ask", + "webfetch": "allow", + "mcp": "ask" + }, + "tools": { + "git": true, + "search": true, + "bash": false + } +} +``` diff --git a/pages/vibe/profiles.mdx b/pages/vibe/profiles.mdx new file mode 100644 index 00000000..2bbfc13e --- /dev/null +++ b/pages/vibe/profiles.mdx @@ -0,0 +1,68 @@ +# Profiles and Policies + +Profiles define how an agent behaves end to end. They package model choice, tool access, permissions, and guardrails into a reusable sidecar agent profile that the runtime enforces. This is how the workbench configures the OpenCode execution layer without hard-coding behavior into every task. + +## What A Profile Controls + +- **Model selection**: primary and small model routing per profile. +- **Per-agent tuning**: distinct configs for plan/build/explore (prompt, temperature, max steps). +- **Tool access**: enable or disable individual tools. +- **Permissions**: edit, bash, web access, and tool-server access (ask/allow/deny). +- **Command scoping**: bash can be limited by command allowlists. +- **Tool servers**: allowlisted local or remote servers with headers, env, and timeouts. +- **Plugins**: allowlisted extensions that can be attached to the runtime. +- **Inheritance**: extend a base profile and override only what changes. + +Profiles make it possible to standardize agent behavior across teams while still allowing targeted customization when needed. + +## Guardrails And Validation + +Profiles are validated before execution. Security policies can cap permissions (for example, "bash" at ask), block local tool servers, and require allowlisted plugins. Profiles that exceed policy are rejected so the runtime stays safe and predictable. + +## How It Shows Up In The Workbench + +The workbench will progressively expose profile controls: + +- **Base profiles** for org-wide defaults. +- **Profile variants** for team or project overrides. +- **Per-run overrides** for experimentation and simulations. + +If you want early access to advanced profile configuration, contact the team. + +Under the hood, profiles can extend a base profile. The runtime merges overrides with the base profile, validates the final config, and applies the result to the sidecar session. + +## Example Profile (Conceptual) + +```json +{ + "name": "team-default", + "extends": "tangle/base", + "model": "provider/model-id", + "small_model": "provider/model-small", + "agent": { + "plan": { "temperature": 0.2, "maxSteps": 8 }, + "build": { "temperature": 0.1, "tools": { "git": true, "bash": false } }, + "explore": { "temperature": 0.7 } + }, + "permission": { + "edit": "ask", + "bash": "ask", + "webfetch": "allow", + "mcp": "ask" + }, + "tools": { + "git": true, + "search": true, + "bash": false + }, + "mcp": { + "internal-search": { + "type": "remote", + "url": "https://tools.example.com/mcp", + "timeout": 10000 + } + } +} +``` + +For a field-by-field reference, see [Profile Schema](/vibe/profile-schema). The schema currently targets OpenCode, with additional backends planned. diff --git a/pages/vibe/simulations.mdx b/pages/vibe/simulations.mdx new file mode 100644 index 00000000..4c9bda70 --- /dev/null +++ b/pages/vibe/simulations.mdx @@ -0,0 +1,29 @@ +# Simulations + +Simulations are how you evaluate agent performance before shipping. Instead of a single run, the workbench can run many variations of the same task and help you compare outcomes. + +## What You Can Simulate + +- **Large batches**: Run 100+ task executions to see how an agent behaves across scenarios. +- **Side-by-side comparisons**: Compare different prompts, models, or tools. +- **Regression checks**: Catch regressions before they hit production workflows. + +## What You Get Back + +Each simulation produces a structured record of inputs, outputs, and logs. This makes it easier to score results, audit failures, and tune workflows based on evidence instead of anecdotes. + +## The Evaluation Loop + +Every execution generates traces: what agents did, inputs received, outputs produced, timing. These traces feed evaluation systems: + +- Which prompt structures produce better results? +- Which model configurations work for which tasks? +- Which tool combinations reduce errors? + +This creates a flywheel: more usage generates more traces, better traces improve the system, and a better system drives more usage. Early users benefit from infrastructure; later users benefit from accumulated learning. + +## When To Run Simulations + +- Before shipping a new workflow. +- After changing tools, models, or policies. +- When outcomes start drifting or costs spike. diff --git a/pages/vibe/workflows.mdx b/pages/vibe/workflows.mdx new file mode 100644 index 00000000..efdb7d12 --- /dev/null +++ b/pages/vibe/workflows.mdx @@ -0,0 +1,29 @@ +# Agent Workflows + +The workbench treats agent development as a workflow problem. You define the goal, the tools the agent can use, and the policies it must follow. The workbench handles orchestration, execution, and repeatability. + +## How Workflows Are Structured + +- **Projects** are the top-level container for code, prompts, and agent configurations. +- **Runs** capture a single execution, including inputs, outputs, and logs. +- **Background agents** keep long tasks moving while you focus elsewhere. +- **Versioned experiments** let you test multiple approaches in parallel. + +## What A Good Workflow Includes + +- **Clear outcome** (merge a PR, generate a report, update a spec). +- **Scoped tools** so the agent can act but not overreach. +- **Profile and limits** to control cost and risk in the runtime sidecar. +- **Review point** before changes reach production systems. + +## Parallel Agents + +Traditional AI interfaces present single-threaded conversation. Real projects involve exploration, comparison, and parallel investigation. + +- **Spawning sub-agents**: A primary agent working on a task can spawn sub-agents to research alternatives, investigate dependencies, or draft tests. Each runs independently and reports results back. +- **Forking for exploration**: Facing a decision, fork the context and direct each fork down a different path. Both develop in parallel. Compare results and pick the winner. +- **Spatial overview**: The workbench presents parallel activity visually, showing all active agents, their status, recent outputs, and relationships. + +## Why This Matters + +Teams need predictability when agents touch real systems. The workbench makes behavior reproducible, comparable, and reviewable while keeping execution controlled. diff --git a/pages/vision/_meta.ts b/pages/vision/_meta.ts new file mode 100644 index 00000000..67043e56 --- /dev/null +++ b/pages/vision/_meta.ts @@ -0,0 +1,10 @@ +import type { Meta } from "nextra"; + +const meta: Meta = { + introduction: "Mission", + "use-cases": "Use Cases", + architecture: "Architecture and Design Pillars", + "core-concepts": "Core Concepts and Terminology", +}; + +export default meta; diff --git a/pages/vision/architecture.mdx b/pages/vision/architecture.mdx new file mode 100644 index 00000000..8083c8ae --- /dev/null +++ b/pages/vision/architecture.mdx @@ -0,0 +1,47 @@ +# Architecture and Design Pillars + +Tangle ties together three layers most platforms separate: the workbench where work is created, the sandbox runtime where it executes, and the protocol that pays the operators who run it. + +
    + Autonomous work loop +
    + Autonomous work loop: workbench -> sandbox runtime -> protocol (payments + + evaluation). +
    +
    + +**Flow:** design -> execute -> evaluate -> pay. Each run generates evaluation data that feeds the next run. + +## What Runs Where + +| Layer | Runs here | Examples | +| --------------- | ----------------------------- | -------------------------------------------------------- | +| Workbench | Human and agent collaboration | Workflows, profiles, simulations, reviews | +| Sandbox runtime | Executed tasks and tools | Agent sessions, tool calls, file edits | +| Protocol | Coordination and settlement | Service registry, operator payments, staking, incentives | + +## System Architecture + +**1) Execution Layer** +Sandboxed runtimes with isolation, resource limits, and audit logs. This is where tasks actually run. + +**2) Protocol Layer** +The coordination plane. It handles operator discovery, payment routing, and incentive enforcement. + +**3) Experience Layer** +The agentic workbench and [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2). This is where teams design workflows, run simulations, and ship services. + +## Trust and Delegation + +Autonomous work only scales when delegation is safe. Tangle enforces policy gates before execution, emits logs and execution metadata for every run, and keeps results reviewable so teams can decide what reaches production. + +## Design Pillars + +- **Isolation first**: Every workload runs inside a sandbox with explicit permissions. +- **Verifiable outcomes**: Logs, metadata, and evaluations make results auditable. +- **Composable services**: Blueprints define reusable services that can be instantiated on demand. +- **Economic alignment**: Operators and developers earn based on usage and reliability. +- **Developer leverage**: Workflows are testable, repeatable, and easy to ship. diff --git a/pages/vision/core-concepts.mdx b/pages/vision/core-concepts.mdx new file mode 100644 index 00000000..4f8aa4ff --- /dev/null +++ b/pages/vision/core-concepts.mdx @@ -0,0 +1,17 @@ +# Core Concepts and Terminology + +These terms show up across the docs and define how the system works. + +- **Agentic workbench**: A shared workspace where humans and agents build, test, and operate autonomous work. +- **Sandbox runtime**: The isolated execution environment where agents and services run safely. +- **Agent**: An autonomous workflow or AI worker running inside a sandbox. +- **Agent profile**: A configuration that defines models, tools, budgets, and policies for an agent. +- **Evaluation**: A structured assessment of task results, cost, and policy compliance. +- **Simulation run**: A multi-variant evaluation of an agent or workflow across many tasks. +- **Blueprint**: A reusable service template that defines interfaces, execution requirements, and expected outputs. +- **Service instance**: A live deployment of a Blueprint, launched on demand and run by operators. +- **Job**: A unit of work submitted to a service instance. +- **Operator**: A node that provides compute and runs service instances. +- **Economic security provider**: A participant who backs operators with assets and shares in protocol economics. +- **Tangle protocol**: The coordination layer that schedules work, routes payments, and enforces incentives. +- **Payments**: The flow of value tied to usage, distributed to operators and developers. diff --git a/pages/vision/introduction.mdx b/pages/vision/introduction.mdx new file mode 100644 index 00000000..fea744c6 --- /dev/null +++ b/pages/vision/introduction.mdx @@ -0,0 +1,50 @@ +# Introduction and Mission + +Tangle is the shared operating layer for autonomous work: a workbench for teams and agents, a sandbox runtime for execution, and a protocol that pays the operators who host it. Every run emits evaluations so workflows improve with evidence, not guesswork. + +We started by building a protocol for complex services (MPC, zero-knowledge, distributed systems). That foundation maps directly to what AI needs: isolation, coordination, and economic accountability. + +## Mission + +**Make autonomous work safe, verifiable, and economically aligned.** + +AI should ship with the rigor of critical infrastructure. That means isolation by default, reviewable outcomes, and incentives that reward reliability. + +## Problem + +Autonomous work today is brittle: tasks run outside policy, results lack evidence, and responsibility is unclear. Teams either build their own infrastructure or avoid delegation entirely. + +## What Exists Today + +- **Managed workbench + runtime**: build, run, and review agent workflows with isolation, policy gates, and evaluation tools. +- **[Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2)**: package services into reusable components with predictable interfaces. + +## Where It Goes + +- **Operator marketplace**: runtime hosting becomes decentralized and paid through the protocol. +- **Protocol-native coordination**: payments, reliability, and service discovery move on-chain. + +## What Tangle Is + +- **Agentic workbench**: A shared workspace where humans and agents build and run autonomous work. +- **Sandbox runtime**: The execution layer that isolates tasks with policies, resource limits, and audit logs. +- **Protocol**: The coordination and payment layer that lets operators host workloads and get paid. +- **Evaluation loop**: Task and agent evaluations that improve workflows over time. + +## Who This Is For + +- **AI builders** shipping agentic products. [Start in the workbench](/vibe/introduction). +- **Protocol and infrastructure engineers** building new services. [Build with Blueprints](/developers/blueprints/introduction). +- **Operators** providing compute. [Operator onboarding](/operators/introduction). +- **Partners and investors** evaluating the stack. [Protocol overview](/network/overview). + +## Start Here + +- **Build workflows** in the [workbench](/vibe/introduction). +- **Package services** with the [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2). +- **Host workloads** via the [runtime](/infrastructure/introduction). +- **Integrate payments and coordination** in the [protocol](/network/overview). + +## The Long Term + +As AI becomes a workforce, the world needs an operating layer that turns intent into execution and settlement. Tangle is building that layer: where autonomous work is authored, run safely, and paid at scale. diff --git a/pages/vision/use-cases.mdx b/pages/vision/use-cases.mdx new file mode 100644 index 00000000..d8aa85d5 --- /dev/null +++ b/pages/vision/use-cases.mdx @@ -0,0 +1,39 @@ +# Use Cases + +Tangle supports AI-native products and infrastructure where work needs isolation, auditability, and reliable payments. These examples show how the workbench, sandbox runtime, and protocol fit together. + +## Agentic Software Engineering + +Run background agents that build features, refactor code, write tests, and open pull requests. Work stays isolated and reviewable. +Good for: product teams using the workbench with strict review gates. +Layers: workbench + runtime. + +## Evaluation and Governance Pipelines + +Execute large evaluation suites across prompts, models, and tools. Each run produces structured metrics so teams can track regressions and enforce policies. +Good for: AI builders who need repeatable evaluations and policy enforcement. +Layers: workbench + runtime. + +## Regulated and Sensitive Workflows + +Deploy AI workflows on protected data with strict isolation, resource limits, and audit logs. Outputs are verifiable and reviewable. +Good for: regulated teams that require strong isolation and auditability. +Layers: runtime + protocol (for paid operators). + +## Data and Knowledge Operations + +Use Blueprints to run extraction, transformation, labeling, and retrieval jobs. Operators provide compute while the protocol coordinates payments and accountability. +Good for: teams turning knowledge work into reliable, reusable services. +Layers: [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2) + protocol. + +## AI Ops and Reliability + +Operate autonomous monitoring, cost optimization, and incident response workflows. Operators are paid for uptime and performance, while services can enforce reliability targets. +Good for: operators and infrastructure engineers running long-lived services. +Layers: runtime + protocol. + +## Marketplace-Ready AI Services + +Package an agent workflow or infrastructure service as a Blueprint, publish it once, and let others instantiate it on demand. Payments flow to operators and developers based on usage. +Good for: builders who want distribution without running their own infrastructure. +Layers: [Blueprint SDK](https://github.com/tangle-network/blueprint/tree/v2) + protocol. diff --git a/public/images/architecture/autonomous-work-loop.svg b/public/images/architecture/autonomous-work-loop.svg new file mode 100644 index 00000000..8c8ddc2b --- /dev/null +++ b/public/images/architecture/autonomous-work-loop.svg @@ -0,0 +1,27 @@ + + + + + + + + Autonomous Work Loop + + + Shared Workbench + Teams + Agents + + + Secure Sandboxes + Runtime + + + Protocol Payments + Operators + + + + + + Evaluations + Data + diff --git a/public/images/architecture/incentives-flow.svg b/public/images/architecture/incentives-flow.svg new file mode 100644 index 00000000..cf0943e0 --- /dev/null +++ b/public/images/architecture/incentives-flow.svg @@ -0,0 +1,79 @@ + + + + + + + + + Incentives Flow: Service Fees + TNT Budgets + + + + + Service Fees + Customer payments + + + Payments.sol + Fee split + + + + + + Developer + + + Protocol Treasury + + + Operators + + + Stakers + + + + + + + + + ServiceFeeDistributor + + Staker share + + + Delegators + + + + + TNT Budgets + Governance-funded + + + InflationPool + Epoch distribution + + + + + RewardVaults + TNT staker incentives + + + Operator / Customer / Developer + Pending TNT balances + + + + + + + Optional staker exposure + + + TNT claims + diff --git a/public/images/architecture/liquid-delegation-flow.svg b/public/images/architecture/liquid-delegation-flow.svg new file mode 100644 index 00000000..63bd6cba --- /dev/null +++ b/public/images/architecture/liquid-delegation-flow.svg @@ -0,0 +1,46 @@ + + + + + + + + + Liquid Delegation Vault Flow + + + + Staker + Holds shares + + + Liquid Delegation Vault + ERC-7540 shares + Fixed operator + asset + + + MultiAssetDelegation + Delegation + delays + + + Operator Services + Runs Blueprints + + + Deposit + + + Delegate + + + Secure + + + + Redeem after delay window + + + + Slashing reduces backing + + diff --git a/public/images/claim-assets.png b/public/images/claim-assets.png deleted file mode 100644 index 47c8082d..00000000 Binary files a/public/images/claim-assets.png and /dev/null differ diff --git a/public/images/claim.png b/public/images/claim.png deleted file mode 100644 index 535aff1e..00000000 Binary files a/public/images/claim.png and /dev/null differ diff --git a/public/images/extrinsic.png b/public/images/extrinsic.png deleted file mode 100644 index 16089a2a..00000000 Binary files a/public/images/extrinsic.png and /dev/null differ diff --git a/public/images/liquid-staking/create-pool-polkadotjs/check-min-create-bond.png b/public/images/liquid-staking/create-pool-polkadotjs/check-min-create-bond.png deleted file mode 100644 index 672c4b14..00000000 Binary files a/public/images/liquid-staking/create-pool-polkadotjs/check-min-create-bond.png and /dev/null differ diff --git a/public/images/liquid-staking/create-pool-polkadotjs/commission.png b/public/images/liquid-staking/create-pool-polkadotjs/commission.png deleted file mode 100644 index e58193e0..00000000 Binary files a/public/images/liquid-staking/create-pool-polkadotjs/commission.png and /dev/null differ diff --git a/public/images/liquid-staking/create-pool-polkadotjs/create.png b/public/images/liquid-staking/create-pool-polkadotjs/create.png deleted file mode 100644 index 7bc8d397..00000000 Binary files a/public/images/liquid-staking/create-pool-polkadotjs/create.png and /dev/null differ diff --git a/public/images/liquid-staking/create-pool-polkadotjs/events.png b/public/images/liquid-staking/create-pool-polkadotjs/events.png deleted file mode 100644 index 84cb5256..00000000 Binary files a/public/images/liquid-staking/create-pool-polkadotjs/events.png and /dev/null differ diff --git a/public/images/liquid-staking/create-pool-polkadotjs/extrinsic.png b/public/images/liquid-staking/create-pool-polkadotjs/extrinsic.png deleted file mode 100644 index 8f826ebb..00000000 Binary files a/public/images/liquid-staking/create-pool-polkadotjs/extrinsic.png and /dev/null differ diff --git a/public/images/liquid-staking/create-pool-polkadotjs/inblock.png b/public/images/liquid-staking/create-pool-polkadotjs/inblock.png deleted file mode 100644 index 6c2b7b70..00000000 Binary files a/public/images/liquid-staking/create-pool-polkadotjs/inblock.png and /dev/null differ diff --git a/public/images/liquid-staking/create-pool-polkadotjs/sign.png b/public/images/liquid-staking/create-pool-polkadotjs/sign.png deleted file mode 100644 index 1a8f2a35..00000000 Binary files a/public/images/liquid-staking/create-pool-polkadotjs/sign.png and /dev/null differ diff --git a/public/images/liquid-staking/create-pool-tangle/create-pool-btn.png b/public/images/liquid-staking/create-pool-tangle/create-pool-btn.png deleted file mode 100644 index 45b7cd28..00000000 Binary files a/public/images/liquid-staking/create-pool-tangle/create-pool-btn.png and /dev/null differ diff --git a/public/images/liquid-staking/create-pool-tangle/create-pool-form.png b/public/images/liquid-staking/create-pool-tangle/create-pool-form.png deleted file mode 100644 index cc9d5372..00000000 Binary files a/public/images/liquid-staking/create-pool-tangle/create-pool-form.png and /dev/null differ diff --git a/public/images/liquid-staking/input-amount-select-tab.png b/public/images/liquid-staking/input-amount-select-tab.png deleted file mode 100644 index ba55ade6..00000000 Binary files a/public/images/liquid-staking/input-amount-select-tab.png and /dev/null differ diff --git a/public/images/liquid-staking/join-pool-polkadotjs/bondedpools.png b/public/images/liquid-staking/join-pool-polkadotjs/bondedpools.png deleted file mode 100644 index 7411f984..00000000 Binary files a/public/images/liquid-staking/join-pool-polkadotjs/bondedpools.png and /dev/null differ diff --git a/public/images/liquid-staking/join-pool-polkadotjs/events.png b/public/images/liquid-staking/join-pool-polkadotjs/events.png deleted file mode 100644 index e4270576..00000000 Binary files a/public/images/liquid-staking/join-pool-polkadotjs/events.png and /dev/null differ diff --git a/public/images/liquid-staking/join-pool-polkadotjs/join.png b/public/images/liquid-staking/join-pool-polkadotjs/join.png deleted file mode 100644 index f7b79d82..00000000 Binary files a/public/images/liquid-staking/join-pool-polkadotjs/join.png and /dev/null differ diff --git a/public/images/liquid-staking/join-pool-polkadotjs/lst-join.png b/public/images/liquid-staking/join-pool-polkadotjs/lst-join.png deleted file mode 100644 index e9611955..00000000 Binary files a/public/images/liquid-staking/join-pool-polkadotjs/lst-join.png and /dev/null differ diff --git a/public/images/liquid-staking/join-pool-polkadotjs/sign.png b/public/images/liquid-staking/join-pool-polkadotjs/sign.png deleted file mode 100644 index 22b35a1e..00000000 Binary files a/public/images/liquid-staking/join-pool-polkadotjs/sign.png and /dev/null differ diff --git a/public/images/liquid-staking/join-pool-polkadotjs/success.png b/public/images/liquid-staking/join-pool-polkadotjs/success.png deleted file mode 100644 index ace42404..00000000 Binary files a/public/images/liquid-staking/join-pool-polkadotjs/success.png and /dev/null differ diff --git a/public/images/liquid-staking/lst-intro.png b/public/images/liquid-staking/lst-intro.png deleted file mode 100644 index 0959354b..00000000 Binary files a/public/images/liquid-staking/lst-intro.png and /dev/null differ diff --git a/public/images/liquid-staking/lst-workflow.png b/public/images/liquid-staking/lst-workflow.png deleted file mode 100644 index 5f91ad73..00000000 Binary files a/public/images/liquid-staking/lst-workflow.png and /dev/null differ diff --git a/public/images/liquid-staking/my-pools.png b/public/images/liquid-staking/my-pools.png deleted file mode 100644 index 24139e5d..00000000 Binary files a/public/images/liquid-staking/my-pools.png and /dev/null differ diff --git a/public/images/liquid-staking/select-ls-network.png b/public/images/liquid-staking/select-ls-network.png deleted file mode 100644 index 1c642f4a..00000000 Binary files a/public/images/liquid-staking/select-ls-network.png and /dev/null differ diff --git a/public/images/liquid-staking/select-pool-and-click-stake.png b/public/images/liquid-staking/select-pool-and-click-stake.png deleted file mode 100644 index 610d9fc1..00000000 Binary files a/public/images/liquid-staking/select-pool-and-click-stake.png and /dev/null differ diff --git a/public/images/restake-workflow.png b/public/images/restake-workflow.png deleted file mode 100644 index a208cf72..00000000 Binary files a/public/images/restake-workflow.png and /dev/null differ diff --git a/public/images/restake/delegate/canceldelegatorunstake.png b/public/images/restake/delegate/canceldelegatorunstake.png deleted file mode 100644 index cbcffc89..00000000 Binary files a/public/images/restake/delegate/canceldelegatorunstake.png and /dev/null differ diff --git a/public/images/restake/delegate/delegate.png b/public/images/restake/delegate/delegate.png deleted file mode 100644 index 71ac630c..00000000 Binary files a/public/images/restake/delegate/delegate.png and /dev/null differ diff --git a/public/images/restake/delegate/delegateinblock.png b/public/images/restake/delegate/delegateinblock.png deleted file mode 100644 index b77c244f..00000000 Binary files a/public/images/restake/delegate/delegateinblock.png and /dev/null differ diff --git a/public/images/restake/delegate/delegatesign.png b/public/images/restake/delegate/delegatesign.png deleted file mode 100644 index e5c4b271..00000000 Binary files a/public/images/restake/delegate/delegatesign.png and /dev/null differ diff --git a/public/images/restake/delegate/executedelegatorUnstake.png b/public/images/restake/delegate/executedelegatorUnstake.png deleted file mode 100644 index 0b972c9d..00000000 Binary files a/public/images/restake/delegate/executedelegatorUnstake.png and /dev/null differ diff --git a/public/images/restake/delegate/scheduleunstake.png b/public/images/restake/delegate/scheduleunstake.png deleted file mode 100644 index bc46b7f5..00000000 Binary files a/public/images/restake/delegate/scheduleunstake.png and /dev/null differ diff --git a/public/images/restake/deposit/deposit.png b/public/images/restake/deposit/deposit.png deleted file mode 100644 index 585a09a3..00000000 Binary files a/public/images/restake/deposit/deposit.png and /dev/null differ diff --git a/public/images/restake/deposit/depositinblock.png b/public/images/restake/deposit/depositinblock.png deleted file mode 100644 index 508761d0..00000000 Binary files a/public/images/restake/deposit/depositinblock.png and /dev/null differ diff --git a/public/images/restake/deposit/depositsign.png b/public/images/restake/deposit/depositsign.png deleted file mode 100644 index 13bdd370..00000000 Binary files a/public/images/restake/deposit/depositsign.png and /dev/null differ diff --git a/public/images/restake/how-to-restake-polkadotjs/executewithdraw.png b/public/images/restake/how-to-restake-polkadotjs/executewithdraw.png deleted file mode 100644 index c1698ff1..00000000 Binary files a/public/images/restake/how-to-restake-polkadotjs/executewithdraw.png and /dev/null differ diff --git a/public/images/restake/how-to-restake-polkadotjs/schedulewithdraw.png b/public/images/restake/how-to-restake-polkadotjs/schedulewithdraw.png deleted file mode 100644 index 160bd7dc..00000000 Binary files a/public/images/restake/how-to-restake-polkadotjs/schedulewithdraw.png and /dev/null differ diff --git a/public/images/restake/bridge/confirmation.png b/public/images/staking/bridge/confirmation.png similarity index 100% rename from public/images/restake/bridge/confirmation.png rename to public/images/staking/bridge/confirmation.png diff --git a/public/images/restake/bridge/form.png b/public/images/staking/bridge/form.png similarity index 100% rename from public/images/restake/bridge/form.png rename to public/images/staking/bridge/form.png diff --git a/public/images/restake/bridge/metamask-tx-1.png b/public/images/staking/bridge/metamask-tx-1.png similarity index 100% rename from public/images/restake/bridge/metamask-tx-1.png rename to public/images/staking/bridge/metamask-tx-1.png diff --git a/public/images/restake/bridge/metamask-tx-2.png b/public/images/staking/bridge/metamask-tx-2.png similarity index 100% rename from public/images/restake/bridge/metamask-tx-2.png rename to public/images/staking/bridge/metamask-tx-2.png diff --git a/public/images/restake/bridge/select-networks.png b/public/images/staking/bridge/select-networks.png similarity index 100% rename from public/images/restake/bridge/select-networks.png rename to public/images/staking/bridge/select-networks.png diff --git a/public/images/restake/bridge/tx-status-toast.png b/public/images/staking/bridge/tx-status-toast.png similarity index 100% rename from public/images/restake/bridge/tx-status-toast.png rename to public/images/staking/bridge/tx-status-toast.png diff --git a/public/images/restake/delegate/dapp-steps.png b/public/images/staking/delegate/dapp-steps.png similarity index 100% rename from public/images/restake/delegate/dapp-steps.png rename to public/images/staking/delegate/dapp-steps.png diff --git a/public/images/restake/delegate/delegate-tx-confirmation.png b/public/images/staking/delegate/delegate-tx-confirmation.png similarity index 100% rename from public/images/restake/delegate/delegate-tx-confirmation.png rename to public/images/staking/delegate/delegate-tx-confirmation.png diff --git a/public/images/restake/deposit/dapp-steps.png b/public/images/staking/deposit/dapp-steps.png similarity index 100% rename from public/images/restake/deposit/dapp-steps.png rename to public/images/staking/deposit/dapp-steps.png diff --git a/public/images/restake/deposit/deposit-tx-confirmation.png b/public/images/staking/deposit/deposit-tx-confirmation.png similarity index 100% rename from public/images/restake/deposit/deposit-tx-confirmation.png rename to public/images/staking/deposit/deposit-tx-confirmation.png diff --git a/public/images/restake/unstake/cancel-unstake.png b/public/images/staking/unstake/cancel-unstake.png similarity index 100% rename from public/images/restake/unstake/cancel-unstake.png rename to public/images/staking/unstake/cancel-unstake.png diff --git a/public/images/restake/unstake/steps.png b/public/images/staking/unstake/steps.png similarity index 100% rename from public/images/restake/unstake/steps.png rename to public/images/staking/unstake/steps.png diff --git a/public/images/restake/unstake/unstake-requests-table.png b/public/images/staking/unstake/unstake-requests-table.png similarity index 100% rename from public/images/restake/unstake/unstake-requests-table.png rename to public/images/staking/unstake/unstake-requests-table.png diff --git a/public/images/restake/unstake/unstake-tx-confirmation.png b/public/images/staking/unstake/unstake-tx-confirmation.png similarity index 100% rename from public/images/restake/unstake/unstake-tx-confirmation.png rename to public/images/staking/unstake/unstake-tx-confirmation.png diff --git a/public/images/restake/withdraw/cancel-withdraw.png b/public/images/staking/withdraw/cancel-withdraw.png similarity index 100% rename from public/images/restake/withdraw/cancel-withdraw.png rename to public/images/staking/withdraw/cancel-withdraw.png diff --git a/public/images/restake/withdraw/steps.png b/public/images/staking/withdraw/steps.png similarity index 100% rename from public/images/restake/withdraw/steps.png rename to public/images/staking/withdraw/steps.png diff --git a/public/images/restake/withdraw/withdraw-tx-confirmation.png b/public/images/staking/withdraw/withdraw-tx-confirmation.png similarity index 100% rename from public/images/restake/withdraw/withdraw-tx-confirmation.png rename to public/images/staking/withdraw/withdraw-tx-confirmation.png diff --git a/scripts/solidity-docgen/templates/helpers/index.cjs b/scripts/solidity-docgen/templates/helpers/index.cjs new file mode 100644 index 00000000..e2894389 --- /dev/null +++ b/scripts/solidity-docgen/templates/helpers/index.cjs @@ -0,0 +1,37 @@ +const path = require("path"); + +function stripSol(value) { + if (typeof value !== "string") return value; + return value.replace(/\\.sol$/i, ""); +} + +function basenameNoExt(value) { + if (typeof value !== "string") return value; + const base = path.basename(value); + return base.replace(/\\.sol$/i, ""); +} + +function githubSourceUrl(absolutePath) { + if (typeof absolutePath !== "string") return ""; + const repoRoot = + process.env.TNT_CORE_DIR && process.env.TNT_CORE_DIR.length > 0 + ? path.resolve(process.env.TNT_CORE_DIR) + : path.resolve(process.cwd(), "../tnt-core"); + const repo = + process.env.TNT_CORE_GITHUB_REPO && process.env.TNT_CORE_GITHUB_REPO.length > 0 + ? process.env.TNT_CORE_GITHUB_REPO + : "https://github.com/tangle-network/tnt-core"; + + const ref = + process.env.TNT_CORE_GITHUB_REF && process.env.TNT_CORE_GITHUB_REF.length > 0 + ? process.env.TNT_CORE_GITHUB_REF + : "feature/modular-protocol"; + const rel = path.relative(repoRoot, absolutePath).split(path.sep).join("/"); + return `${repo}/blob/${ref}/${rel}`; +} + +module.exports = { + stripSol, + basenameNoExt, + githubSourceUrl, +}; diff --git a/theme.config.tsx b/theme.config.tsx index 3017844a..de8aeca1 100644 --- a/theme.config.tsx +++ b/theme.config.tsx @@ -15,12 +15,13 @@ function Head() { const { frontMatter } = useConfig(); // Set default values for title and description - const defaultTitle = "Tangle Documentation"; + const defaultTitle = "Tangle Docs"; + const tabTitle = "Tangle Docs - Autonomous Work"; const defaultDescription = "Comprehensive documentation for Tangle Tools"; // Get the title and description from the front matter, or use the default values const title = frontMatter.title - ? `${frontMatter.title} - Tangle Network` + ? `${frontMatter.title} - ${defaultTitle}` : defaultTitle; const description = frontMatter.description || defaultDescription; @@ -56,7 +57,7 @@ function Head() { - {title} + {tabTitle}