diff --git a/AGENTS.mk b/AGENTS.mk new file mode 100644 index 0000000..944cb70 --- /dev/null +++ b/AGENTS.mk @@ -0,0 +1,118 @@ +# +# Copyright (C) 2026 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 +# + +MAKEFILE_PATH := $(abspath $(lastword $(MAKEFILE_LIST))) +MAKEFILE_DIR := $(realpath $(dir $(MAKEFILE_PATH))) +EXTRACT_INFO_CMAKE := ${MAKEFILE_DIR}/extract_info.cmake +IDEAS_MAKEFILE := $(MAKEFILE_DIR)/IDEAS.mk + +ANTHROPIC_AUTH_TOKEN ?= $(OPENROUTER_API_KEY) +ANTHROPIC_BASE_URL ?= https://openrouter.ai/api +ANTHROPIC_API_KEY ?= "" + +AGENT_PROVIDER ?= openrouter +AGENT_MODEL ?= anthropic/claude-sonnet-4.6 +BASE_URL ?= "https://openrouter.ai/api/v1" +TRANSLATION_DIR ?= translation.$(shell git --git-dir=${MAKEFILE_DIR}/.git rev-parse HEAD) + +export EXTRACT_INFO_CMAKE + +TARGETS ?= $(shell [ -d build-ninja ] && find build-ninja -maxdepth 1 -type f -executable -exec basename {} \; | cut -d. -f1 | sed -e "s/^lib//gi") +ifeq (${TARGETS},) +ifeq ($(filter cmake clean,$(MAKECMDGOALS)),) +$(error No TARGETS found! You need to run cmake!) +endif +endif + +# Docker configuration +DOCKER_DIR := ${MAKEFILE_DIR}/docker +DOCKER_WORKDIR := /home/user/IDEAS +# Relative path to the current working directory +DOCKER_REL_CWD := $(patsubst $(MAKEFILE_DIR)/%,%,$(CURDIR)) +DOCKER_RUN ?= docker run --rm \ + --init \ + -it \ + -v $(MAKEFILE_DIR):$(DOCKER_WORKDIR) \ + -v $(DOCKER_DIR)/.venv:$(DOCKER_WORKDIR)/.venv \ + -e OPENROUTER_API_KEY \ + -e TRANSLATION_DIR \ + -e AGENT_PROVIDER \ + -e AGENT_MODEL \ + -e BASE_URL \ + -e RUSTFLAGS \ + -e VERBOSE \ + ideas-$(shell id -u) + +ifdef DOCKER_RUN + # Touch directory for correct permissions when mounted + VENV_SETUP = mkdir -p $(DOCKER_DIR)/.venv + # Run inside Docker container with exit-on-error + RUN_CMD = $(DOCKER_RUN) /bin/sh -c 'set -e; cd $(DOCKER_WORKDIR)/$(DOCKER_REL_CWD); $(1)' +else + VENV_SETUP = @true + RUN_CMD = $(1) +endif + +# cmake +.PHONY: cmake +cmake: build-ninja/cmake.log + +build-ninja/cmake.log: test_case/CMakeLists.txt ${EXTRACT_INFO_CMAKE} + uv run python -m ideas.cmake source_dir=test_case build_dir=build-ninja + @touch $@ + +build-ninja/CMakeCache.txt: build-ninja/cmake.log +build-ninja/compile_commands.json: build-ninja/cmake.log +build-ninja/build.log: build-ninja/cmake.log + +# test generation from project +.PHONY: testgen +testgen: test_crate/tests/test_assert.rs ; + +.PRECIOUS: test_crate/tests/test_assert.rs +test_crate/tests/test_assert.rs: + $(VENV_SETUP) + $(call RUN_CMD,\ + uv run python -m ideas.agents.testgen model=$(if $(AGENT_PROVIDER),${AGENT_PROVIDER}/,)${AGENT_MODEL} \ + c_code=test_case \ + project_name=$(notdir $(CURDIR)) \ + test_vectors_out=test_vectors/agent \ + test_crate_out=test_crate \ + hydra.output_subdir=.testgen \ + hydra.job.name=testgen \ + hydra.run.dir=test_vectors; \ + ) + + +# library targets: generate tests from the consolidated lib.c +.PRECIOUS: test_crates/%/tests/test_assert.rs +test_crates/%/tests/test_assert.rs: ${TRANSLATION_DIR}/%/src/lib.c | build-ninja/lib%.so.type + # Copy lib.c into test_targets//src/ so + # build.rs can use ../../test_targets//src/lib.c + # both in Docker /tmp and on disk + mkdir -p test_targets/$*/src + cp ${TRANSLATION_DIR}/$*/src/lib.c test_targets/$*/src/lib.c + $(VENV_SETUP) + $(call RUN_CMD,\ + uv run python -m ideas.agents.testgen model=$(if $(AGENT_PROVIDER),${AGENT_PROVIDER}/,)${AGENT_MODEL} \ + c_code=test_targets/$*/src/lib.c \ + project_name=$* \ + test_vectors_out=test_vectors/$*/agent \ + test_crate_out=test_crates/$* \ + hydra.output_subdir=.testgen \ + hydra.job.name=testgen \ + hydra.run.dir=test_vectors/$*; \ + ) + +# executable targets: do nothing +test_crates/%/tests/test_assert.rs: ${TRANSLATION_DIR}/%/src/main.c | build-ninja/%.type + mkdir -p test_crates/$*/tests + touch test_crates/$*/tests/test_assert.rs + +# fallback +test_crates/%/tests/test_assert.rs: + mkdir -p test_crates/$*/tests + touch test_crates/$*/tests/test_assert.rs diff --git a/IDEAS.mk b/IDEAS.mk index 150a62e..18caaeb 100644 --- a/IDEAS.mk +++ b/IDEAS.mk @@ -6,26 +6,32 @@ MAKEFILE_PATH := $(abspath $(lastword $(MAKEFILE_LIST))) MAKEFILE_DIR := $(realpath $(dir $(MAKEFILE_PATH))) +PIPELINE_DIR := ${MAKEFILE_DIR}/lib/pipeline_automation +PIPELINE_TAG := ideas/$(shell git rev-list -1 HEAD -- ${PIPELINE_DIR}) EXTRACT_INFO_CMAKE := ${MAKEFILE_DIR}/extract_info.cmake +AGENTS_MAKEFILE := $(MAKEFILE_DIR)/AGENTS.mk PROVIDER ?= hosted_vllm -MODEL ?= Qwen/Qwen3-Coder-30B-A3B-Instruct +MODEL ?= Qwen/Qwen3.5-397B-A17B HOST ?= localhost PORT ?= 8000 BASE_URL ?= http://${HOST}:${PORT}/v1 TRANSLATION_DIR ?= translation.$(shell git --git-dir=${MAKEFILE_DIR}/.git rev-parse HEAD) ifeq (${PROVIDER},hosted_vllm) override TRANSLATE_ARGS += model.base_url=${BASE_URL} -override WRAPPER_ARGS += model.base_url=${BASE_URL} endif RUSTFLAGS ?= -Awarnings## Ignore Rust compiler warnings CARGO_NET_OFFLINE ?= true## Cargo offline mode CFLAGS ?= -w## Ignore C compiler warnings export EXTRACT_INFO_CMAKE CFLAGS -GIT = git -C ${TRANSLATION_DIR} +VCS ?= git +GIT_AUTHOR_NAME ?= ideas +GIT_AUTHOR_EMAIL ?= ideas@localhost +export GIT_AUTHOR_NAME GIT_AUTHOR_EMAIL -TEST_FILES := $(realpath $(wildcard test_vectors/*.json)) +## Per-target test vectors: test_vectors//*.json +TEST_FILES = $(wildcard test_vectors/$*/*.json) TARGETS ?= $(shell [ -d build-ninja ] && find build-ninja -maxdepth 1 -type f -executable -exec basename {} \; | cut -d. -f1 | sed -e "s/^lib//gi") ifeq (${TARGETS},) ifeq ($(filter cmake clean,$(MAKECMDGOALS)),) @@ -56,42 +62,36 @@ ${TRANSLATION_DIR}/%/init: ${TRANSLATION_DIR}/%/src/main.c | build-ninja/%.type .PRECIOUS: ${TRANSLATION_DIR}/Cargo.toml ${TRANSLATION_DIR}/Cargo.toml: @mkdir -p ${TRANSLATION_DIR} - ${GIT} init --quiet --initial-branch=main - echo "Cargo.lock\ntarget/\n*.log\n*.jsonl" > ${TRANSLATION_DIR}/.gitignore - ${GIT} add .gitignore - ${GIT} commit --quiet --all --message "Initial commit" - echo -n "[workspace]\nresolver = \"3\"" > $@ - ${GIT} add Cargo.toml - ${GIT} commit --quiet --all --message "Created cargo workspace" + uv run python -m ideas.init.workspace cargo_toml=$@ vcs=${VCS} # initialize translated crate for each C target .PRECIOUS: ${TRANSLATION_DIR}/%/Cargo.toml ${TRANSLATION_DIR}/%/Cargo.toml: | ${TRANSLATION_DIR}/Cargo.toml build-ninja/lib%.so.type - uv run python -m ideas.init.crate crate_type=lib vcs=git \ + uv run python -m ideas.init.crate crate_type=lib vcs=${VCS} \ hydra.output_subdir=.init \ hydra.run.dir=${TRANSLATION_DIR}/$* .PRECIOUS: ${TRANSLATION_DIR}/%/Cargo.toml ${TRANSLATION_DIR}/%/Cargo.toml: | ${TRANSLATION_DIR}/Cargo.toml build-ninja/%.type - uv run python -m ideas.init.crate crate_type=bin vcs=git \ + uv run python -m ideas.init.crate crate_type=bin vcs=${VCS} \ hydra.output_subdir=.init \ hydra.run.dir=${TRANSLATION_DIR}/$* # consolidate each C target .PRECIOUS: ${TRANSLATION_DIR}/%/src/lib.c -${TRANSLATION_DIR}/%/src/lib.c: | ${TRANSLATION_DIR}/%/Cargo.toml build-ninja/lib%.so.type - uv run python -m ideas.init.consolidate filename=build-ninja/compile_commands.json \ +${TRANSLATION_DIR}/%/src/lib.c: | ${TRANSLATION_DIR}/%/Cargo.toml build-ninja/compile_commands.json build-ninja/lib%.so.sources + -uv run python -m ideas.init.consolidate filename=build-ninja/compile_commands.json \ + vcs=${VCS} \ cargo_toml=${TRANSLATION_DIR}/$*/Cargo.toml \ - export_symbols=build-ninja/lib$*.so.symbols \ source_priority=build-ninja/lib$*.so.sources \ hydra.output_subdir=.init.consolidate \ hydra.run.dir=${TRANSLATION_DIR}/$* .PRECIOUS: ${TRANSLATION_DIR}/%/src/main.c -${TRANSLATION_DIR}/%/src/main.c: | ${TRANSLATION_DIR}/%/Cargo.toml build-ninja/%.type - uv run python -m ideas.init.consolidate filename=build-ninja/compile_commands.json \ +${TRANSLATION_DIR}/%/src/main.c: | ${TRANSLATION_DIR}/%/Cargo.toml build-ninja/compile_commands.json build-ninja/%.sources + -uv run python -m ideas.init.consolidate filename=build-ninja/compile_commands.json \ + vcs=${VCS} \ cargo_toml=${TRANSLATION_DIR}/$*/Cargo.toml \ - export_symbols=build-ninja/$*.symbols \ source_priority=build-ninja/$*.sources \ hydra.output_subdir=.init.consolidate \ hydra.run.dir=${TRANSLATION_DIR}/$* @@ -103,19 +103,21 @@ ${TRANSLATION_DIR}/%/translate: ${TRANSLATION_DIR}/%/src/lib.rs | build-ninja/li ${TRANSLATION_DIR}/%/translate: ${TRANSLATION_DIR}/%/src/main.rs | build-ninja/%.type ; .PRECIOUS: ${TRANSLATION_DIR}/%/src/lib.rs -${TRANSLATION_DIR}/%/src/lib.rs: ${TRANSLATION_DIR}/%/src/lib.c | build-ninja/compile_commands.json build-ninja/lib%.so.symbols build-ninja/lib%.so.sources +${TRANSLATION_DIR}/%/src/lib.rs: ${TRANSLATION_DIR}/%/src/lib.c ${TRANSLATION_DIR}/%/tests/test_cases.rs | ${TRANSLATION_DIR}/%/Cargo.toml -uv run python -m ideas.translate model.name=${PROVIDER}/${MODEL} \ filename=${TRANSLATION_DIR}/$*/src/lib.c \ - vcs=git \ + cargo_toml=${TRANSLATION_DIR}/$*/Cargo.toml \ + vcs=${VCS} \ hydra.output_subdir=.translate \ hydra.job.name=translate \ hydra.run.dir=${TRANSLATION_DIR}/$* ${TRANSLATE_ARGS} .PRECIOUS: ${TRANSLATION_DIR}/%/src/main.rs -${TRANSLATION_DIR}/%/src/main.rs: ${TRANSLATION_DIR}/%/src/main.c | build-ninja/compile_commands.json build-ninja/%.symbols build-ninja/%.sources +${TRANSLATION_DIR}/%/src/main.rs: ${TRANSLATION_DIR}/%/src/main.c ${TRANSLATION_DIR}/%/tests/test_cases.rs | ${TRANSLATION_DIR}/%/Cargo.toml -uv run python -m ideas.translate model.name=${PROVIDER}/${MODEL} \ filename=${TRANSLATION_DIR}/$*/src/main.c \ - vcs=git \ + cargo_toml=${TRANSLATION_DIR}/$*/Cargo.toml \ + vcs=${VCS} \ hydra.output_subdir=.translate \ hydra.job.name=translate \ hydra.run.dir=${TRANSLATION_DIR}/$* ${TRANSLATE_ARGS} @@ -127,16 +129,9 @@ wrapper: $(patsubst %,${TRANSLATION_DIR}/%/wrapper,${TARGETS}) ; ${TRANSLATION_DIR}/%/wrapper: ${TRANSLATION_DIR}/%/src/wrapper.rs ; .PRECIOUS: ${TRANSLATION_DIR}/%/src/wrapper.rs -${TRANSLATION_DIR}/%/src/wrapper.rs: ${TRANSLATION_DIR}/%/src/lib.rs | build-ninja/lib%.so.symbols - -uv run python -m ideas.wrapper model.name=${PROVIDER}/${MODEL} \ - symbols=build-ninja/lib$*.so.symbols \ - cargo_toml=${TRANSLATION_DIR}/$*/Cargo.toml \ - vcs=git \ - hydra.output_subdir=.wrapper \ - hydra.job.name=wrapper \ - hydra.run.dir=${TRANSLATION_DIR}/$* ${WRAPPER_ARGS} - -${TRANSLATION_DIR}/%/src/wrapper.rs: ${TRANSLATION_DIR}/%/src/main.rs | build-ninja/%.symbols +${TRANSLATION_DIR}/%/src/wrapper.rs: ${TRANSLATION_DIR}/%/src/lib.rs | ${TRANSLATION_DIR}/%/Cargo.toml + touch $@ +${TRANSLATION_DIR}/%/src/wrapper.rs: ${TRANSLATION_DIR}/%/src/main.rs touch $@ # build @@ -152,6 +147,22 @@ ${TRANSLATION_DIR}/%/build.log: ${TRANSLATION_DIR}/%/src/wrapper.rs -export RUSTFLAGS=${RUSTFLAGS} && cargo build --quiet --manifest-path ${TRANSLATION_DIR}/$*/Cargo.toml 2> ${TRANSLATION_DIR}/$*/build.log @cat ${TRANSLATION_DIR}/$*/build.log +.PRECIOUS: ${TRANSLATION_DIR}/unsafety.json +${TRANSLATION_DIR}/unsafety.json: ${TRANSLATION_DIR}/build.log + uv run --with-requirements ${PIPELINE_DIR}/requirements.txt \ + python ${PIPELINE_DIR}/evaluate_unsafe_usage/invoke_unsafety.py \ + --container-name ${PIPELINE_TAG}/unsafety \ + $( [!TIP] +> If the `OPENROUTER_API_KEY` environment variable is set on the host, it will be automatically passed to the interactive session. + # Basic usage with OpenRouter Our translation framework treats [OpenRouter](https://openrouter.ai/) as the meta-provider of choice, allowing easy switching between models. The `MODEL` environment variable controls which LLM will be used, and should be the model's name on OpenRouter. diff --git a/docker/ideas.Dockerfile b/docker/ideas.Dockerfile index 5e5a38e..82a29dd 100644 --- a/docker/ideas.Dockerfile +++ b/docker/ideas.Dockerfile @@ -41,7 +41,7 @@ RUN ln -s /usr/bin/clang-21 /usr/bin/clang # Install uv ENV UV_INSTALL_DIR="/usr/local/bin" -RUN curl -LsSf https://astral.sh/uv/0.9.22/install.sh | sh +RUN curl -LsSf https://astral.sh/uv/0.10.9/install.sh | sh # Install Rust toolchain non-interactively RUN rustup default 1.88.0 @@ -68,3 +68,6 @@ ENV GIT_COMMITTER_EMAIL="ideas@ideas.local" # Configure Python and uv ENV PYTHONDONTWRITEBYTECODE=1 ENV UV_LINK_MODE="copy" + +# Shell quality-of-life for interactive use +COPY --chown=user:ideas ideas.bashrc /home/user/.bashrc diff --git a/docker/ideas.bashrc b/docker/ideas.bashrc new file mode 100644 index 0000000..9a68aa2 --- /dev/null +++ b/docker/ideas.bashrc @@ -0,0 +1,21 @@ +# Color support +alias ls="ls --color=auto" +alias grep="grep --color=auto" +alias ll="ls -alF" +alias la="ls -A" +alias l="ls -CF" + +# Colored prompt +PS1='\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ ' + +# History +HISTCONTROL=ignoreboth +shopt -s histappend +HISTSIZE=1000 +HISTFILESIZE=2000 + +# Check window size after each command +shopt -s checkwinsize + +# Make autocomplete: complete with filenames +complete -f make diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/CMakeLists.txt b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/CMakeLists.txt new file mode 100644 index 0000000..4d5706e --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/CMakeLists.txt @@ -0,0 +1,8 @@ +cmake_minimum_required(VERSION 3.19) +project(ci_wrapper) + +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}") +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}") +set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}") + +add_subdirectory(test_case) diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/CMakePresets.json b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/CMakePresets.json new file mode 100644 index 0000000..323c68f --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/CMakePresets.json @@ -0,0 +1,35 @@ +{ + "version": 6, + "cmakeMinimumRequired": { + "major": 3, + "minor": 19, + "patch": 0 + }, + "configurePresets": [ + { + "name": "base", + "hidden": true, + "generator": "Ninja", + "binaryDir": "build-ninja", + "cacheVariables": { + "CMAKE_C_STANDARD": "99", + "CMAKE_BUILD_TYPE": "Release" + } + }, + { + "name": "test", + "inherits": "base", + "cacheVariables": { + "HASH_BACKEND": "blake", + "SECPAR": "128f", + "THASH": "simple" + } + } + ], + "buildPresets": [ + { + "name": "test", + "configurePreset": "test" + } + ] +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/CMakeLists.txt b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/CMakeLists.txt new file mode 100644 index 0000000..d907ec6 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/CMakeLists.txt @@ -0,0 +1,18 @@ +cmake_minimum_required(VERSION 3.10) +project(SPHINCS_PLUS C) + +# Parameters for hash backend +set(HASH_BACKEND "haraka" CACHE STRING "Hash backend (haraka, sha2, shake256, blake)") +set(THASH "robust" CACHE STRING "thash variant (robust, simple)") +set(SECPAR "128s" CACHE STRING "hash parameters (secpar(s|f)") + +set(CMAKE_C_STANDARD 99) +find_program(CLANG_EXECUTABLE clang) +if(CLANG_EXECUTABLE) + set(CMAKE_C_COMPILER ${CLANG_EXECUTABLE}) +endif() +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O3") + +# Add subdirectories +add_subdirectory(lib) +add_subdirectory(app) diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/LICENSE b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/LICENSE new file mode 100644 index 0000000..670154e --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/LICENSE @@ -0,0 +1,116 @@ +CC0 1.0 Universal + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator and +subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for the +purpose of contributing to a commons of creative, cultural and scientific +works ("Commons") that the public can reliably and without fear of later +claims of infringement build upon, modify, incorporate in other works, reuse +and redistribute as freely as possible in any form whatsoever and for any +purposes, including without limitation commercial purposes. These owners may +contribute to the Commons to promote the ideal of a free culture and the +further production of creative, cultural and scientific works, or to gain +reputation or greater distribution for their Work in part through the use and +efforts of others. + +For these and/or other purposes and motivations, and without any expectation +of additional consideration or compensation, the person associating CC0 with a +Work (the "Affirmer"), to the extent that he or she is an owner of Copyright +and Related Rights in the Work, voluntarily elects to apply CC0 to the Work +and publicly distribute the Work under its terms, with knowledge of his or her +Copyright and Related Rights in the Work and the meaning and intended legal +effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be +protected by copyright and related or neighboring rights ("Copyright and +Related Rights"). Copyright and Related Rights include, but are not limited +to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, communicate, + and translate a Work; + + ii. moral rights retained by the original author(s) and/or performer(s); + + iii. publicity and privacy rights pertaining to a person's image or likeness + depicted in a Work; + + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + + v. rights protecting the extraction, dissemination, use and reuse of data in + a Work; + + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation thereof, + including any amended or successor version of such directive); and + + vii. other similar, equivalent or corresponding rights throughout the world + based on applicable law or treaty, and any national implementations thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention of, +applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and +unconditionally waives, abandons, and surrenders all of Affirmer's Copyright +and Related Rights and associated claims and causes of action, whether now +known or unknown (including existing as well as future claims and causes of +action), in the Work (i) in all territories worldwide, (ii) for the maximum +duration provided by applicable law or treaty (including future time +extensions), (iii) in any current or future medium and for any number of +copies, and (iv) for any purpose whatsoever, including without limitation +commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes +the Waiver for the benefit of each member of the public at large and to the +detriment of Affirmer's heirs and successors, fully intending that such Waiver +shall not be subject to revocation, rescission, cancellation, termination, or +any other legal or equitable action to disrupt the quiet enjoyment of the Work +by the public as contemplated by Affirmer's express Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason be +judged legally invalid or ineffective under applicable law, then the Waiver +shall be preserved to the maximum extent permitted taking into account +Affirmer's express Statement of Purpose. In addition, to the extent the Waiver +is so judged Affirmer hereby grants to each affected person a royalty-free, +non transferable, non sublicensable, non exclusive, irrevocable and +unconditional license to exercise Affirmer's Copyright and Related Rights in +the Work (i) in all territories worldwide, (ii) for the maximum duration +provided by applicable law or treaty (including future time extensions), (iii) +in any current or future medium and for any number of copies, and (iv) for any +purpose whatsoever, including without limitation commercial, advertising or +promotional purposes (the "License"). The License shall be deemed effective as +of the date CC0 was applied by Affirmer to the Work. Should any part of the +License for any reason be judged legally invalid or ineffective under +applicable law, such partial invalidity or ineffectiveness shall not +invalidate the remainder of the License, and in such case Affirmer hereby +affirms that he or she will not (i) exercise any of his or her remaining +Copyright and Related Rights in the Work or (ii) assert any associated claims +and causes of action with respect to the Work, in either case contrary to +Affirmer's express Statement of Purpose. + +4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + + b. Affirmer offers the Work as-is and makes no representations or warranties + of any kind concerning the Work, express, implied, statutory or otherwise, + including without limitation warranties of title, merchantability, fitness + for a particular purpose, non infringement, or the absence of latent or + other defects, accuracy, or the present or absence of errors, whether or not + discoverable, all to the greatest extent permissible under applicable law. + + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without limitation + any person's Copyright and Related Rights in the Work. Further, Affirmer + disclaims responsibility for obtaining any necessary consents, permissions + or other rights required for any use of the Work. + + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to this + CC0 or use of the Work. + +For more information, please see + diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/README.md b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/README.md new file mode 100644 index 0000000..f265f00 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/README.md @@ -0,0 +1,36 @@ +SPHINCS+ +========== +This repository contains a modified version of the [SPHINCS+ scheme reference](https://github.com/eyalr0/sphincsplusc/) including Jean-Philippe Aumasson's reference implementation of BLAKE. + +SPHINCS+ is a plausibly quantum resilient hash-based signature scheme which was selected by NIST for the [FIPS 205](https://csrc.nist.gov/pubs/fips/205/final) SLH-DSA "Stateless Hash-Based Digital Signature Standard" for post-quantum signatures. +The SPHINCS+ construction combines multiple layers: Winternitz one-time signatures (WOTS+), a few-time signature scheme (FORS), Merkle trees, and a hypertree structure to enable scalable signing without state management. + +## Build Instructions +Prerequisites: +- libcrypto + +### Parameters +CMake requires three arguments to build the libraries. +- `HASH_BACKEND` Defining which underlying hash function to use `(blake, sha2, shake, haraka)` +- `THASH` Determining whether to use the robust or simple construction `(robust, simple)` +- `SECPAR` Determining the security parameter and whether to use short or fast signatures `(128f, 128s, 192f, 192s, 256f, 256s)` + +The possible values are all listed in `CMakeLists.txt` with the exception of +`SECPAR`, which are formatted as the integer parameter with an appended character `s` or `f` indicating whether the signatures should prioritize length (short) or speed (fast). The integer parameter can either be 128, 192 or 256. An example `SECPAR` value is `128s`. + +### Build Commands +To build for a particular set of parameters in a subdirectory `build`: +``` +mdkir build +cmake -B build -DHASH_BACKEND=sha2 -DTHASH=robust -DSECPAR=192f +cmake --build build +``` + +## Associated Executable +For testing purposes, there is a provided executable whose main function can be found in [PQCgenKAT_sign.c](./app/src/PQCgenKAT_sign.c). +This executable performs an in-memory test of signing and verification capabilities before producing a shake256 digest of the signature transcripts. + +PQCgenKAT_sign.c links against the underlying hash backend with parameters defined by the CMake arguments `HASH_BACKEND`, `THASH`, and `SECPAR` described earlier. + +## License +Following the original code from the [SPHINCS+ submission repository](https://github.com/sphincs/sphincsplus), the [SPHINCS+ reference implementation](https://github.com/eyalr0/sphincsplusc/), and Jean-Philippe's BLAKE implementation, all included code is available under the CC0 1.0 Universal Public Domain Dedication, with the exception of rng.c and rng.h which were provided by NIST, and PQCgenKAT_sign.c which was originally provided by NIST, but we have altered to no longer perform file IO. diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/CMakeLists.txt b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/CMakeLists.txt new file mode 100644 index 0000000..fe1de99 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/CMakeLists.txt @@ -0,0 +1,47 @@ +# Core sources that are common to both variants --- NO random source here +set(SPHINCS_CORE_COMMON + src/address.c + src/fors.c + src/merkle.c + src/sign.c + src/utils.c + src/utilsx1.c + src/wots.c + src/wotsx1.c +) + +set(PARAMS "sphincs-${HASH_BACKEND}-${SECPAR}") +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DPARAMS=${PARAMS}") +if(${HASH_BACKEND} STREQUAL "blake") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DBLAKE_TR=1") +elseif(${HASH_BACKEND} STREQUAL "haraka") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DHARAKA_TR=1") +elseif(${HASH_BACKEND} STREQUAL "sha2") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DSHA2_TR=1") +elseif(${HASH_BACKEND} STREQUAL "shake") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DSHAKE_TR=1") +endif() + +# Build once as an OBJECT library (so tests and apps reuse the same objects) +add_library(sphincs_obj SHARED OBJECT ${SPHINCS_CORE_COMMON}) +target_include_directories(sphincs_obj PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include) + +# Normal core (uses randombytes.c) +add_library(sphincs_core SHARED + $ + src/randombytes.c +) +target_include_directories(sphincs_core PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include) + +# Deterministic core (uses rng.c) +add_library(sphincs_core_det SHARED + $ + src/rng.c +) +target_include_directories(sphincs_core_det PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include) + +# Main executable uses the deterministic core (no randombytes.c) +add_executable(driver src/PQCgenKAT_sign.c) + +# Link correct hash backend based on parameter +target_link_libraries(driver sphincs_core_det ${HASH_BACKEND} crypto) diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/address.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/address.h new file mode 100644 index 0000000..24a84eb --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/address.h @@ -0,0 +1,52 @@ +#ifndef SPX_ADDRESS_H +#define SPX_ADDRESS_H + +#include + +#include "params.h" + +/* The hash types that are passed to set_type */ +#define SPX_ADDR_TYPE_WOTS 0 +#define SPX_ADDR_TYPE_WOTSPK 1 +#define SPX_ADDR_TYPE_HASHTREE 2 +#define SPX_ADDR_TYPE_FORSTREE 3 +#define SPX_ADDR_TYPE_FORSPK 4 +#define SPX_ADDR_TYPE_WOTSPRF 5 +#define SPX_ADDR_TYPE_FORSPRF 6 + +#define set_layer_addr SPX_NAMESPACE(set_layer_addr) +void set_layer_addr(uint32_t addr[8], uint32_t layer); + +#define set_tree_addr SPX_NAMESPACE(set_tree_addr) +void set_tree_addr(uint32_t addr[8], uint64_t tree); + +#define set_type SPX_NAMESPACE(set_type) +void set_type(uint32_t addr[8], uint32_t type); + +/* Copies the layer and tree part of one address into the other */ +#define copy_subtree_addr SPX_NAMESPACE(copy_subtree_addr) +void copy_subtree_addr(uint32_t out[8], const uint32_t in[8]); + +/* These functions are used for WOTS and FORS addresses. */ + +#define set_keypair_addr SPX_NAMESPACE(set_keypair_addr) +void set_keypair_addr(uint32_t addr[8], uint32_t keypair); + +#define set_chain_addr SPX_NAMESPACE(set_chain_addr) +void set_chain_addr(uint32_t addr[8], uint32_t chain); + +#define set_hash_addr SPX_NAMESPACE(set_hash_addr) +void set_hash_addr(uint32_t addr[8], uint32_t hash); + +#define copy_keypair_addr SPX_NAMESPACE(copy_keypair_addr) +void copy_keypair_addr(uint32_t out[8], const uint32_t in[8]); + +/* These functions are used for all hash tree addresses (including FORS). */ + +#define set_tree_height SPX_NAMESPACE(set_tree_height) +void set_tree_height(uint32_t addr[8], uint32_t tree_height); + +#define set_tree_index SPX_NAMESPACE(set_tree_index) +void set_tree_index(uint32_t addr[8], uint32_t tree_index); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/api.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/api.h new file mode 100644 index 0000000..9e01b41 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/api.h @@ -0,0 +1,79 @@ +#ifndef SPX_API_H +#define SPX_API_H + +#include +#include + +#include "params.h" + +#ifndef CRYPTO_ALGNAME +#define CRYPTO_ALGNAME "SPHINCS+" +#endif + +#define CRYPTO_SECRETKEYBYTES SPX_SK_BYTES +#define CRYPTO_PUBLICKEYBYTES SPX_PK_BYTES +#define CRYPTO_BYTES SPX_BYTES +#define CRYPTO_SEEDBYTES 3*SPX_N + +/* + * Returns the length of a secret key, in bytes + */ +unsigned long long crypto_sign_secretkeybytes(void); + +/* + * Returns the length of a public key, in bytes + */ +unsigned long long crypto_sign_publickeybytes(void); + +/* + * Returns the length of a signature, in bytes + */ +unsigned long long crypto_sign_bytes(void); + +/* + * Returns the length of the seed required to generate a key pair, in bytes + */ +unsigned long long crypto_sign_seedbytes(void); + +/* + * Generates a SPHINCS+ key pair given a seed. + * Format sk: [SK_SEED || SK_PRF || PUB_SEED || root] + * Format pk: [root || PUB_SEED] + */ +int crypto_sign_seed_keypair(unsigned char *pk, unsigned char *sk, + const unsigned char *seed); + +/* + * Generates a SPHINCS+ key pair. + * Format sk: [SK_SEED || SK_PRF || PUB_SEED || root] + * Format pk: [root || PUB_SEED] + */ +int crypto_sign_keypair(unsigned char *pk, unsigned char *sk); + +/** + * Returns an array containing a detached signature. + */ +int crypto_sign_signature(uint8_t *sig, size_t *siglen, + const uint8_t *m, size_t mlen, const uint8_t *sk); + +/** + * Verifies a detached signature and message under a given public key. + */ +int crypto_sign_verify(const uint8_t *sig, size_t siglen, + const uint8_t *m, size_t mlen, const uint8_t *pk); + +/** + * Returns an array containing the signature followed by the message. + */ +int crypto_sign(unsigned char *sm, unsigned long long *smlen, + const unsigned char *m, unsigned long long mlen, + const unsigned char *sk); + +/** + * Verifies a given signature-message pair under a given public key. + */ +int crypto_sign_open(unsigned char *m, unsigned long long *mlen, + const unsigned char *sm, unsigned long long smlen, + const unsigned char *pk); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/context.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/context.h new file mode 100644 index 0000000..aded564 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/context.h @@ -0,0 +1,28 @@ +#ifndef SPX_CONTEXT_H +#define SPX_CONTEXT_H + +#include + +#include "params.h" + +typedef struct { + uint8_t pub_seed[SPX_N]; + uint8_t sk_seed[SPX_N]; + +#ifdef SPX_SHA2 + // sha256 state that absorbed pub_seed + uint8_t state_seeded[40]; + +# if SPX_SHA512 + // sha512 state that absorbed pub_seed + uint8_t state_seeded_512[72]; +# endif +#endif + +#ifdef SPX_HARAKA + uint64_t tweaked512_rc64[10][8]; + uint32_t tweaked256_rc32[10][8]; +#endif +} spx_ctx; + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/fors.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/fors.h new file mode 100644 index 0000000..ede956f --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/fors.h @@ -0,0 +1,36 @@ +#ifndef SPX_FORS_H +#define SPX_FORS_H + +#include + +#include "context.h" +#include "params.h" + +typedef struct fors_gen_leaf_info { + uint32_t leaf_addrx[8]; +} fors_gen_leaf_info; + +/** + * Signs a message m, deriving the secret key from sk_seed and the FTS address. + * Assumes m contains at least SPX_FORS_HEIGHT * SPX_FORS_TREES bits. + */ +#define fors_sign SPX_NAMESPACE(fors_sign) +void fors_sign(unsigned char *sig, unsigned char *pk, + const unsigned char *m, + const spx_ctx* ctx, + const uint32_t fors_addr[8]); + +/** + * Derives the FORS public key from a signature. + * This can be used for verification by comparing to a known public key, or to + * subsequently verify a signature on the derived public key. The latter is the + * typical use-case when used as an FTS below an OTS in a hypertree. + * Assumes m contains at least SPX_FORS_HEIGHT * SPX_FORS_TREES bits. + */ +#define fors_pk_from_sig SPX_NAMESPACE(fors_pk_from_sig) +void fors_pk_from_sig(unsigned char *pk, + const unsigned char *sig, const unsigned char *m, + const spx_ctx* ctx, + const uint32_t fors_addr[8]); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/forsx1.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/forsx1.h new file mode 100644 index 0000000..f40a13c --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/forsx1.h @@ -0,0 +1,12 @@ +#if !defined( FORSX1_H_ ) +#define FORSX1_H_ + +#include "context.h" +#include "fors.h" + +#define fors_gen_leafx1 SPX_NAMESPACE(fors_gen_leafx1) +void fors_gen_leafx1(unsigned char *leaf, + const spx_ctx *ctx, + uint32_t addr_idx, fors_gen_leaf_info *info); + +#endif /* FORSX1_H_ */ diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/hash.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/hash.h new file mode 100644 index 0000000..c8fb569 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/hash.h @@ -0,0 +1,28 @@ +#ifndef SPX_HASH_H +#define SPX_HASH_H + +#include + +#include "context.h" +#include "params.h" + +#define initialize_hash_function SPX_NAMESPACE(initialize_hash_function) +void initialize_hash_function(spx_ctx *ctx); + +#define prf_addr SPX_NAMESPACE(prf_addr) +void prf_addr(unsigned char *out, const spx_ctx *ctx, + const uint32_t addr[8]); + +#define gen_message_random SPX_NAMESPACE(gen_message_random) +void gen_message_random(unsigned char *R, const unsigned char *sk_prf, + const unsigned char *optrand, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx); + +#define hash_message SPX_NAMESPACE(hash_message) +void hash_message(unsigned char *digest, uint64_t *tree, uint32_t *leaf_idx, + const unsigned char *R, const unsigned char *pk, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/merkle.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/merkle.h new file mode 100644 index 0000000..3ca3160 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/merkle.h @@ -0,0 +1,19 @@ +#if !defined( MERKLE_H_ ) +#define MERKLE_H_ + +#include +#include "context.h" + +/* Generate a Merkle signature (WOTS signature followed by the Merkle */ +/* authentication path) */ +#define merkle_sign SPX_NAMESPACE(merkle_sign) +void merkle_sign(uint8_t *sig, unsigned char *root, + const spx_ctx* ctx, + uint32_t wots_addr[8], uint32_t tree_addr[8], + uint32_t idx_leaf); + +/* Compute the root node of the top-most subtree. */ +#define merkle_gen_root SPX_NAMESPACE(merkle_gen_root) +void merkle_gen_root(unsigned char *root, const spx_ctx* ctx); + +#endif /* MERKLE_H_ */ diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/params.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/params.h new file mode 100644 index 0000000..e4653b7 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/params.h @@ -0,0 +1,5 @@ +#define str(s) #s +#define xstr(s) str(s) + +#include xstr(../params/params-PARAMS.h) + diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/randombytes.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/randombytes.h new file mode 100644 index 0000000..671c1b1 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/randombytes.h @@ -0,0 +1,6 @@ +#ifndef SPX_RANDOMBYTES_H +#define SPX_RANDOMBYTES_H + +extern void randombytes(unsigned char * x,unsigned long long xlen); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/rng.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/rng.h new file mode 100644 index 0000000..c4f1c60 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/rng.h @@ -0,0 +1,54 @@ +// +// rng.h +// +// Created by Bassham, Lawrence E (Fed) on 8/29/17. +// Copyright © 2017 Bassham, Lawrence E (Fed). All rights reserved. +// + +#ifndef rng_h +#define rng_h + +#include + +#define RNG_SUCCESS 0 +#define RNG_BAD_MAXLEN -1 +#define RNG_BAD_OUTBUF -2 +#define RNG_BAD_REQ_LEN -3 + +typedef struct { + unsigned char buffer[16]; + unsigned long buffer_pos; + unsigned long length_remaining; + unsigned char key[32]; + unsigned char ctr[16]; +} AES_XOF_struct; + +typedef struct { + unsigned char Key[32]; + unsigned char V[16]; + int reseed_counter; +} AES256_CTR_DRBG_struct; + + +void +AES256_CTR_DRBG_Update(unsigned char *provided_data, + unsigned char *Key, + unsigned char *V); + +int +seedexpander_init(AES_XOF_struct *ctx, + unsigned char *seed, + unsigned char *diversifier, + unsigned long maxlen); + +int +seedexpander(AES_XOF_struct *ctx, unsigned char *x, unsigned long xlen); + +void +randombytes_init(unsigned char *entropy_input, + unsigned char *personalization_string); + +int +randombytes(unsigned char *x, unsigned long long xlen); + +#endif /* rng_h */ diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/thash.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/thash.h new file mode 100644 index 0000000..53b27d2 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/thash.h @@ -0,0 +1,13 @@ +#ifndef SPX_THASH_H +#define SPX_THASH_H + +#include + +#include "context.h" +#include "params.h" + +#define thash SPX_NAMESPACE(thash) +void thash(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/utils.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/utils.h new file mode 100644 index 0000000..1f9efe2 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/utils.h @@ -0,0 +1,55 @@ +#ifndef SPX_UTILS_H +#define SPX_UTILS_H + +#include + +#include "context.h" +#include "params.h" + +# define SPX_VLA(__t,__x,__s) __t __x[__s] + +/** + * Converts the value of 'in' to 'outlen' bytes in big-endian byte order. + */ +#define ull_to_bytes SPX_NAMESPACE(ull_to_bytes) +void ull_to_bytes(unsigned char *out, unsigned int outlen, + unsigned long long in); +#define u32_to_bytes SPX_NAMESPACE(u32_to_bytes) +void u32_to_bytes(unsigned char *out, uint32_t in); + +/** + * Converts the inlen bytes in 'in' from big-endian byte order to an integer. + */ +#define bytes_to_ull SPX_NAMESPACE(bytes_to_ull) +unsigned long long bytes_to_ull(const unsigned char *in, unsigned int inlen); + +/** + * Computes a root node given a leaf and an auth path. + * Expects address to be complete other than the tree_height and tree_index. + */ +#define compute_root SPX_NAMESPACE(compute_root) +void compute_root(unsigned char *root, const unsigned char *leaf, + uint32_t leaf_idx, uint32_t idx_offset, + const unsigned char *auth_path, uint32_t tree_height, + const spx_ctx *ctx, uint32_t addr[8]); + +/** + * For a given leaf index, computes the authentication path and the resulting + * root node using Merkle's TreeHash algorithm. + * Expects the layer and tree parts of the tree_addr to be set, as well as the + * tree type (i.e. SPX_ADDR_TYPE_HASHTREE or SPX_ADDR_TYPE_FORSTREE). + * Applies the offset idx_offset to indices before building addresses, so that + * it is possible to continue counting indices across trees. + */ +#define treehash SPX_NAMESPACE(treehash) +void treehash(unsigned char *root, unsigned char *auth_path, + const spx_ctx* ctx, + uint32_t leaf_idx, uint32_t idx_offset, uint32_t tree_height, + void (*gen_leaf)( + unsigned char* /* leaf */, + const spx_ctx* ctx /* ctx */, + uint32_t /* addr_idx */, const uint32_t[8] /* tree_addr */), + uint32_t tree_addr[8]); + + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/utilsx1.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/utilsx1.h new file mode 100644 index 0000000..14c98d0 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/utilsx1.h @@ -0,0 +1,30 @@ +#ifndef SPX_UTILSX4_H +#define SPX_UTILSX4_H + +#include + +#include "context.h" +#include "fors.h" +#include "params.h" +#include "wotsx1.h" + +/** + * For a given leaf index, computes the authentication path and the resulting + * root node using Merkle's TreeHash algorithm. + * Expects the layer and tree parts of the tree_addr to be set, as well as the + * tree type (i.e. SPX_ADDR_TYPE_HASHTREE or SPX_ADDR_TYPE_FORSTREE). + * Applies the offset idx_offset to indices before building addresses, so that + * it is possible to continue counting indices across trees. + */ +#define wots_treehashx1 SPX_NAMESPACE(wots_treehashx1) +void wots_treehashx1(unsigned char *root, unsigned char *auth_path, + const spx_ctx* ctx, + uint32_t leaf_idx, uint32_t idx_offset, uint32_t tree_height, + uint32_t tree_addrx4[8], leaf_info_x1 *info); + +#define fors_treehashx1 SPX_NAMESPACE(fors_treehashx1) +void fors_treehashx1(unsigned char *root, unsigned char *auth_path, + const spx_ctx* ctx, + uint32_t leaf_idx, uint32_t idx_offset, uint32_t tree_height, + uint32_t tree_addrx4[8], fors_gen_leaf_info *info); +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/wots.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/wots.h new file mode 100644 index 0000000..4e7692e --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/wots.h @@ -0,0 +1,25 @@ +#ifndef SPX_WOTS_H +#define SPX_WOTS_H + +#include + +#include "context.h" +#include "params.h" + +/** + * Takes a WOTS signature and an n-byte message, computes a WOTS public key. + * + * Writes the computed public key to 'pk'. + */ +#define wots_pk_from_sig SPX_NAMESPACE(wots_pk_from_sig) +void wots_pk_from_sig(unsigned char *pk, + const unsigned char *sig, const unsigned char *msg, + const spx_ctx *ctx, uint32_t addr[8]); + +/* + * Compute the chain lengths needed for a given message hash + */ +#define chain_lengths SPX_NAMESPACE(chain_lengths) +void chain_lengths(unsigned int *lengths, const unsigned char *msg); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/wotsx1.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/wotsx1.h new file mode 100644 index 0000000..411fa8e --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/include/wotsx1.h @@ -0,0 +1,36 @@ +#if !defined( WOTSX1_H_ ) +#define WOTSX1_H_ + +#include + +/* + * This is here to provide an interface to the internal wots_gen_leafx1 + * routine. While this routine is not referenced in the package outside of + * wots.c, it is called from the stand-alone benchmark code to characterize + * the performance + */ +typedef struct leaf_info_x1 { + unsigned char *wots_sig; + uint32_t wots_sign_leaf; /* The index of the WOTS we're using to sign */ + uint32_t *wots_steps; + uint32_t leaf_addr[8]; + uint32_t pk_addr[8]; +} leaf_info_x1; + +/* Macro to set the leaf_info to something 'benign', that is, it would */ +/* run with the same time as it does during the real signing process */ +/* Used only by the benchmark code */ +#define INITIALIZE_LEAF_INFO_X1(info, addr, step_buffer) { \ + info.wots_sig = 0; \ + info.wots_sign_leaf = ~0u; \ + info.wots_steps = step_buffer; \ + memcpy( &info.leaf_addr[0], addr, 32 ); \ + memcpy( &info.pk_addr[0], addr, 32 ); \ +} + +#define wots_gen_leafx1 SPX_NAMESPACE(wots_gen_leafx1) +void wots_gen_leafx1(unsigned char *dest, + const spx_ctx *ctx, + uint32_t leaf_idx, leaf_info_x1 *v_info); + +#endif /* WOTSX1_H_ */ diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-128f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-128f.h new file mode 100644 index 0000000..9c1f370 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-128f.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 16 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 66 +/* Number of subtree layer. */ +#define SPX_D 22 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 6 +#define SPX_FORS_TREES 33 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a BLAKE-based parameter set, hence whether we use BLAKE-256 + * exclusively or we use both BLAKE-256 and BLAKE-512 is controlled by + * the following #define */ +#define SPX_BLAKE512 0 /* Use BLAKE-256 for all hashes */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/blake/include/blake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-128s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-128s.h new file mode 100644 index 0000000..7f59db4 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-128s.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 16 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 63 +/* Number of subtree layer. */ +#define SPX_D 7 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 12 +#define SPX_FORS_TREES 14 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a BLAKE-based parameter set, hence whether we use BLAKE-256 + * exclusively or we use both BLAKE-256 and BLAKE-512 is controlled by + * the following #define */ +#define SPX_BLAKE512 0 /* Use BLAKE-256 for all hashes */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/blake/include/blake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-192f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-192f.h new file mode 100644 index 0000000..430fdd2 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-192f.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 24 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 66 +/* Number of subtree layer. */ +#define SPX_D 22 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 8 +#define SPX_FORS_TREES 33 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a BLAKE-based parameter set, hence whether we use BLAKE-256 + * exclusively or we use both BLAKE-256 and BLAKE-512 is controlled by + * the following #define */ +#define SPX_BLAKE512 1 /* Use BLAKE-512 */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/blake/include/blake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-192s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-192s.h new file mode 100644 index 0000000..3f5d9fe --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-192s.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 24 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 63 +/* Number of subtree layer. */ +#define SPX_D 7 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 14 +#define SPX_FORS_TREES 17 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a BLAKE-based parameter set, hence whether we use BLAKE-256 + * exclusively or we use both BLAKE-256 and BLAKE-512 is controlled by + * the following #define */ +#define SPX_BLAKE512 1 /* Use BLAKE-512 */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/blake/include/blake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-256f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-256f.h new file mode 100644 index 0000000..5d257c0 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-256f.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 32 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 68 +/* Number of subtree layer. */ +#define SPX_D 17 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 9 +#define SPX_FORS_TREES 35 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a BLAKE-based parameter set, hence whether we use BLAKE-256 + * exclusively or we use both BLAKE-256 and BLAKE-512 is controlled by + * the following #define */ +#define SPX_BLAKE512 1 /* Use BLAKE-512 */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/blake/include/blake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-256s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-256s.h new file mode 100644 index 0000000..9c242f8 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-blake-256s.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 32 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 64 +/* Number of subtree layer. */ +#define SPX_D 8 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 14 +#define SPX_FORS_TREES 22 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a BLAKE-based parameter set, hence whether we use BLAKE-256 + * exclusively or we use both BLAKE-256 and BLAKE-512 is controlled by + * the following #define */ +#define SPX_BLAKE512 1 /* Use BLAKE-512 */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/blake/include/blake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-128f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-128f.h new file mode 100644 index 0000000..6ff36d0 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-128f.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 16 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 66 +/* Number of subtree layer. */ +#define SPX_D 22 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 6 +#define SPX_FORS_TREES 33 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/haraka/include/haraka_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-128s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-128s.h new file mode 100644 index 0000000..a84eec9 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-128s.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 16 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 63 +/* Number of subtree layer. */ +#define SPX_D 7 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 12 +#define SPX_FORS_TREES 14 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/haraka/include/haraka_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-192f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-192f.h new file mode 100644 index 0000000..50a6730 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-192f.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 24 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 66 +/* Number of subtree layer. */ +#define SPX_D 22 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 8 +#define SPX_FORS_TREES 33 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/haraka/include/haraka_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-192s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-192s.h new file mode 100644 index 0000000..61dcd25 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-192s.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 24 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 63 +/* Number of subtree layer. */ +#define SPX_D 7 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 14 +#define SPX_FORS_TREES 17 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/haraka/include/haraka_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-256f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-256f.h new file mode 100644 index 0000000..d2cab73 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-256f.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 32 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 68 +/* Number of subtree layer. */ +#define SPX_D 17 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 9 +#define SPX_FORS_TREES 35 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/haraka/include/haraka_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-256s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-256s.h new file mode 100644 index 0000000..40a5145 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-haraka-256s.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 32 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 64 +/* Number of subtree layer. */ +#define SPX_D 8 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 14 +#define SPX_FORS_TREES 22 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/haraka/include/haraka_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-128f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-128f.h new file mode 100644 index 0000000..1392097 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-128f.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 16 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 66 +/* Number of subtree layer. */ +#define SPX_D 22 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 6 +#define SPX_FORS_TREES 33 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a SHA2-based parameter set, hence whether we use SHA-256 + * exclusively or we use both SHA-256 and SHA-512 is controlled by + * the following #define */ +#define SPX_SHA512 0 /* Use SHA-256 for all hashes */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/sha2/include/sha2_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-128s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-128s.h new file mode 100644 index 0000000..4a672d9 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-128s.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 16 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 63 +/* Number of subtree layer. */ +#define SPX_D 7 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 12 +#define SPX_FORS_TREES 14 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a SHA2-based parameter set, hence whether we use SHA-256 + * exclusively or we use both SHA-256 and SHA-512 is controlled by + * the following #define */ +#define SPX_SHA512 0 /* Use SHA-256 for all hashes */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/sha2/include/sha2_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-192f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-192f.h new file mode 100644 index 0000000..710de5f --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-192f.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 24 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 66 +/* Number of subtree layer. */ +#define SPX_D 22 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 8 +#define SPX_FORS_TREES 33 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a SHA2-based parameter set, hence whether we use SHA-256 + * exclusively or we use both SHA-256 and SHA-512 is controlled by + * the following #define */ +#define SPX_SHA512 1 /* Use SHA-512 for H and T_l, l >= 2 */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/sha2/include/sha2_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-192s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-192s.h new file mode 100644 index 0000000..eb8000a --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-192s.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 24 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 63 +/* Number of subtree layer. */ +#define SPX_D 7 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 14 +#define SPX_FORS_TREES 17 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a SHA2-based parameter set, hence whether we use SHA-256 + * exclusively or we use both SHA-256 and SHA-512 is controlled by + * the following #define */ +#define SPX_SHA512 1 /* Use SHA-512 for H and T_l, l >= 2 */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/sha2/include/sha2_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-256f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-256f.h new file mode 100644 index 0000000..77ab162 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-256f.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 32 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 68 +/* Number of subtree layer. */ +#define SPX_D 17 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 9 +#define SPX_FORS_TREES 35 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a SHA2-based parameter set, hence whether we use SHA-256 + * exclusively or we use both SHA-256 and SHA-512 is controlled by + * the following #define */ +#define SPX_SHA512 1 /* Use SHA-512 for H and T_l, l >= 2 */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/sha2/include/sha2_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-256s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-256s.h new file mode 100644 index 0000000..15d140d --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-sha2-256s.h @@ -0,0 +1,85 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 32 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 64 +/* Number of subtree layer. */ +#define SPX_D 8 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 14 +#define SPX_FORS_TREES 22 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* This is a SHA2-based parameter set, hence whether we use SHA-256 + * exclusively or we use both SHA-256 and SHA-512 is controlled by + * the following #define */ +#define SPX_SHA512 1 /* Use SHA-512 for H and T_l, l >= 2 */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/sha2/include/sha2_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-128f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-128f.h new file mode 100644 index 0000000..478aff8 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-128f.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 16 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 66 +/* Number of subtree layer. */ +#define SPX_D 22 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 6 +#define SPX_FORS_TREES 33 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/shake/include/shake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-128s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-128s.h new file mode 100644 index 0000000..bf974cf --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-128s.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 16 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 63 +/* Number of subtree layer. */ +#define SPX_D 7 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 12 +#define SPX_FORS_TREES 14 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/shake/include/shake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-192f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-192f.h new file mode 100644 index 0000000..887a886 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-192f.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 24 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 66 +/* Number of subtree layer. */ +#define SPX_D 22 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 8 +#define SPX_FORS_TREES 33 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/shake/include/shake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-192s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-192s.h new file mode 100644 index 0000000..f59ebde --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-192s.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 24 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 63 +/* Number of subtree layer. */ +#define SPX_D 7 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 14 +#define SPX_FORS_TREES 17 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/shake/include/shake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-256f.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-256f.h new file mode 100644 index 0000000..4305166 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-256f.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 32 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 68 +/* Number of subtree layer. */ +#define SPX_D 17 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 9 +#define SPX_FORS_TREES 35 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/shake/include/shake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-256s.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-256s.h new file mode 100644 index 0000000..93ea1d3 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/params/params-sphincs-shake-256s.h @@ -0,0 +1,80 @@ +#ifndef SPX_PARAMS_H +#define SPX_PARAMS_H + +#define SPX_NAMESPACE(s) SPX_##s + +/* Hash output length in bytes. */ +#define SPX_N 32 +/* Height of the hypertree. */ +#define SPX_FULL_HEIGHT 64 +/* Number of subtree layer. */ +#define SPX_D 8 +/* FORS tree dimensions. */ +#define SPX_FORS_HEIGHT 14 +#define SPX_FORS_TREES 22 +/* Winternitz parameter, */ +#define SPX_WOTS_W 16 + +/* The hash function is defined by linking a different hash.c file, as opposed + to setting a #define constant. */ + +/* For clarity */ +#define SPX_ADDR_BYTES 32 + +/* WOTS parameters. */ +#if SPX_WOTS_W == 256 + #define SPX_WOTS_LOGW 8 +#elif SPX_WOTS_W == 16 + #define SPX_WOTS_LOGW 4 +#else + #error SPX_WOTS_W assumed 16 or 256 +#endif + +#define SPX_WOTS_LEN1 (8 * SPX_N / SPX_WOTS_LOGW) + +/* SPX_WOTS_LEN2 is floor(log(len_1 * (w - 1)) / log(w)) + 1; we precompute */ +#if SPX_WOTS_W == 256 + #if SPX_N <= 1 + #define SPX_WOTS_LEN2 1 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 2 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#elif SPX_WOTS_W == 16 + #if SPX_N <= 8 + #define SPX_WOTS_LEN2 2 + #elif SPX_N <= 136 + #define SPX_WOTS_LEN2 3 + #elif SPX_N <= 256 + #define SPX_WOTS_LEN2 4 + #else + #error Did not precompute SPX_WOTS_LEN2 for n outside {2, .., 256} + #endif +#endif + +#define SPX_WOTS_LEN (SPX_WOTS_LEN1 + SPX_WOTS_LEN2) +#define SPX_WOTS_BYTES (SPX_WOTS_LEN * SPX_N) +#define SPX_WOTS_PK_BYTES SPX_WOTS_BYTES + +/* Subtree size. */ +#define SPX_TREE_HEIGHT (SPX_FULL_HEIGHT / SPX_D) + +#if SPX_TREE_HEIGHT * SPX_D != SPX_FULL_HEIGHT + #error SPX_D should always divide SPX_FULL_HEIGHT +#endif + +/* FORS parameters. */ +#define SPX_FORS_MSG_BYTES ((SPX_FORS_HEIGHT * SPX_FORS_TREES + 7) / 8) +#define SPX_FORS_BYTES ((SPX_FORS_HEIGHT + 1) * SPX_FORS_TREES * SPX_N) +#define SPX_FORS_PK_BYTES SPX_N + +/* Resulting SPX sizes. */ +#define SPX_BYTES (SPX_N + SPX_FORS_BYTES + SPX_D * SPX_WOTS_BYTES +\ + SPX_FULL_HEIGHT * SPX_N) +#define SPX_PK_BYTES (2 * SPX_N) +#define SPX_SK_BYTES (2 * SPX_N + SPX_PK_BYTES) + +#include "../../lib/shake/include/shake_offsets.h" + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/PQCgenKAT_sign.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/PQCgenKAT_sign.c new file mode 100644 index 0000000..2dbd1b5 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/PQCgenKAT_sign.c @@ -0,0 +1,426 @@ +// +// PQCgenKAT_sign.c +// +// Created by Bassham, Lawrence E (Fed) on 8/29/17. +// Copyright © 2017 Bassham, Lawrence E (Fed). All rights reserved. +// + +#include +#include +#include + +#include "../include/api.h" +#include "../include/rng.h" + +#define MAX_MARKER_LEN 50 +#define BASE_MLEN 33 +#define LOOP_COUNT 7 + +#define KAT_SUCCESS 0 +#define KAT_OVERFLOW -1 +#define KAT_CRYPTO_FAILURE -2 + +#ifdef BLAKE_TR +#include "../../lib/blake/include/blake.h" +#if SPX_N >= 24 +#define blakestateX blakestate512 +#define blakeX_init blake512_init +#define blakeX_update blake512_update +#define blakeX_final blake512_final +#define blakeX_output_bytes 64 +#else +#define blakestateX blakestate256 +#define blakeX_init blake256_init +#define blakeX_update blake256_update +#define blakeX_final blake256_final +#define blakeX_output_bytes 32 +#endif + +typedef blakestateX kat_tr_ctx; + +static inline void kat_tr_init(kat_tr_ctx *ctx) { + blakeX_init(ctx); + + static const uint8_t tag[] = "KAT-TRANSCRIPT-v1-BLAKE"; + blakeX_update(ctx, tag, sizeof tag - 1); + + const uint8_t sep = 0x00; + blakeX_update(ctx, &sep, 1); +} + +static inline void kat_tr_absorb_label(kat_tr_ctx *ctx, const char *label) { + const uint8_t *p = (const uint8_t *)label; + size_t n = 0; while(p[n]) n++; + blakeX_update(ctx, p, n); + + const uint8_t sep = 0x00; + blakeX_update(ctx, &sep, 1); +} + +static inline void kat_tr_absorb_u64(kat_tr_ctx *ctx, unsigned long long x) { + uint8_t le[8]; + size_t i; + for (i = 0; i < 8; i++) { + le[i] = (uint8_t)((x >> (8 * i)) & 0xFF); + } + + uint8_t lenle[8]; + unsigned long long L = 8; + for(i = 0; i < 8; i++) { + lenle[i] = (uint8_t)((L >> (8 * i)) & 0xFF); + } + + blakeX_update(ctx, lenle, 8); + blakeX_update(ctx, le, 8); +} + +static inline void kat_tr_absorb_bytes(kat_tr_ctx *ctx, const uint8_t *buf, size_t len) { + uint8_t lenle[8]; + unsigned long long L = (unsigned long long) len; + size_t i; + for(i = 0; i < 8; i++) { + lenle[i] = (uint8_t)((L >> (8 * i)) & 0xFF); + } + blakeX_update(ctx, lenle, 8); + if(len) { + blakeX_update(ctx, buf, len); + } +} + +static inline void kat_tr_final(kat_tr_ctx *ctx, uint8_t out32[32]) { + unsigned char outbuf[blakeX_output_bytes] = {0}; + blakeX_final(ctx, outbuf); + memcpy(out32, outbuf, 32); +} +#elif HARAKA_TR +#include "../../lib/haraka/include/haraka.h" + +typedef struct { + spx_ctx inner; + uint8_t s[65]; +} kat_tr_ctx; + +static inline void kat_tr_init(kat_tr_ctx *ctx) { + size_t i; + for(i = 0; i < SPX_N; ++i) { + ctx->inner.pub_seed[i] = 0; + ctx->inner.sk_seed[i] = 0; + } + + tweak_constants(&ctx->inner); + haraka_S_inc_init(ctx->s); + + static const uint8_t tag[] = "KAT-TRANSCRIPT-v1-HARAKA"; + haraka_S_inc_absorb(ctx->s, tag, sizeof tag - 1, &ctx->inner); + + const uint8_t sep = 0x00; + haraka_S_inc_absorb(ctx->s, &sep, 1, &ctx->inner); +} + +static inline void kat_tr_absorb_label(kat_tr_ctx *ctx, const char *label) { + const uint8_t *p = (const uint8_t *)label; + size_t n = 0; while(p[n]) n++; + haraka_S_inc_absorb(ctx->s, p, n, &ctx->inner); + + const uint8_t sep = 0x00; + haraka_S_inc_absorb(ctx->s, &sep, 1, &ctx->inner); +} + +static inline void kat_tr_absorb_u64(kat_tr_ctx *ctx, unsigned long long x) { + uint8_t le[8]; + size_t i; + for (i = 0; i < 8; i++) { + le[i] = (uint8_t)((x >> (8 * i)) & 0xFF); + } + + uint8_t lenle[8]; + unsigned long long L = 8; + for(i = 0; i < 8; i++) { + lenle[i] = (uint8_t)((L >> (8 * i)) & 0xFF); + } + + haraka_S_inc_absorb(ctx->s, lenle, 8, &ctx->inner); + haraka_S_inc_absorb(ctx->s, le, 8, &ctx->inner); +} + +static inline void kat_tr_absorb_bytes(kat_tr_ctx *ctx, const uint8_t *buf, size_t len) { + uint8_t lenle[8]; + unsigned long long L = (unsigned long long) len; + size_t i; + for(i = 0; i < 8; i++) { + lenle[i] = (uint8_t)((L >> (8 * i)) & 0xFF); + } + haraka_S_inc_absorb(ctx->s, lenle, 8, &ctx->inner); + if(len) { + haraka_S_inc_absorb(ctx->s, buf, len, &ctx->inner); + } +} + +static inline void kat_tr_final(kat_tr_ctx *ctx, uint8_t out32[32]) { + haraka_S_inc_finalize(ctx->s); + haraka_S_inc_squeeze(out32, 32, ctx->s, &ctx->inner); +} +#elif SHA2_TR +#include "../../lib/sha2/include/sha2.h" +#if SPX_N >= 24 +#define shaX_inc_init sha512_inc_init +#define shaX_inc_blocks sha512_inc_blocks +#define shaX_inc_finalize sha512_inc_finalize +#define shaX_state_len 72 +#define shaX_block_bytes 128 +#define shaX_output_bytes 64 +#else +#define shaX_inc_init sha256_inc_init +#define shaX_inc_blocks sha256_inc_blocks +#define shaX_inc_finalize sha256_inc_finalize +#define shaX_state_len 40 +#define shaX_block_bytes 64 +#define shaX_output_bytes 32 +#endif + +typedef struct { + uint8_t s[shaX_state_len]; +} kat_tr_ctx; + +static inline void kat_tr_init(kat_tr_ctx *ctx) { + static const uint8_t tag[] = "KAT-TRANSCRIPT-v1-SHA2"; + uint8_t block[shaX_block_bytes]; + size_t i; + + for (i = 0; i < sizeof tag - 1; ++i) { + block[i] = tag[i]; + } + for (i = sizeof tag - 1; i < shaX_block_bytes; ++i) { + block[i] = 0; + } + + shaX_inc_init(ctx->s); + shaX_inc_blocks(ctx->s, block, 1); +} + +static inline void kat_tr_absorb_label(kat_tr_ctx *ctx, const char *label) { + const uint8_t *p = (const uint8_t *)label; + size_t n = 0; while(p[n]) n++; + size_t block_count = (n + 1 + (shaX_block_bytes - 1)) / shaX_block_bytes; + + size_t i; + for(i = 0; i < block_count; ++i) { + uint8_t block[shaX_block_bytes]; + size_t j; + + for(j = 0; i * shaX_block_bytes + j < n && j < shaX_block_bytes; ++j) { + block[j] = p[i * shaX_block_bytes + j]; + } + + if(i * shaX_block_bytes + j == n && j < shaX_block_bytes) { + block[j] = 0x00; + ++j; + } + + for(; j < shaX_block_bytes; ++j) { + block[j] = 0; + } + + shaX_inc_blocks(ctx->s, block, 1); + } +} + +static inline void kat_tr_absorb_u64(kat_tr_ctx *ctx, unsigned long long x) { + uint8_t block[shaX_block_bytes]; + uint8_t le[8]; + size_t i; + for (i = 0; i < 8; i++) { + le[i] = (uint8_t)((x >> (8 * i)) & 0xFF); + } + + uint8_t lenle[8]; + unsigned long long L = 8; + for(i = 0; i < 8; i++) { + lenle[i] = (uint8_t)((L >> (8 * i)) & 0xFF); + } + + for (i = 0; i < 8; ++i) { + block[i] = lenle[i]; + } + for (i = 0; i < 8; ++i) { + block[8+i] = le[i]; + } + for (i = 16; i < shaX_block_bytes; ++i) { + block[i] = 0; + } + + shaX_inc_blocks(ctx->s, block, 1); +} + +static inline void kat_tr_absorb_bytes(kat_tr_ctx *ctx, const uint8_t *buf, size_t len) { + uint8_t lenle[shaX_block_bytes] = {0}; + unsigned long long L = (unsigned long long) len; + size_t i; + for(i = 0; i < 8; i++) { + lenle[i] = (uint8_t)((L >> (8 * i)) & 0xFF); + } + size_t block_count = (len + (shaX_block_bytes - 1)) / shaX_block_bytes; + shaX_inc_blocks(ctx->s, lenle, 1); + + if(len != 0) { + for(i = 0; i < block_count; ++i) { + uint8_t block[shaX_block_bytes]; + size_t j; + + for(j = 0; i * shaX_block_bytes + j < len && j < shaX_block_bytes; ++j) { + block[j] = buf[i * shaX_block_bytes + j]; + } + for(; j < shaX_block_bytes; ++j) { + block[j] = 0; + } + + shaX_inc_blocks(ctx->s, block, 1); + } + } +} + +static inline void kat_tr_final(kat_tr_ctx *ctx, uint8_t out32[32]) { + unsigned char outbuf[shaX_output_bytes] = {0}; + uint8_t final_block[shaX_block_bytes] = {0}; + shaX_inc_finalize(outbuf, ctx->s, final_block, 1); + memcpy(out32, outbuf, 32); +} +#elif SHAKE_TR +#include "../../lib/shake/include/fips202.h" +typedef struct { + uint64_t s[26]; +} kat_tr_ctx; + +static inline void kat_tr_init(kat_tr_ctx *ctx) { + shake256_inc_init(ctx->s); + + static const uint8_t tag[] = "KAT-TRANSCRIPT-v1-SHAKE"; + shake256_inc_absorb(ctx->s, tag, sizeof tag - 1); + + const uint8_t sep = 0x00; + shake256_inc_absorb(ctx->s, &sep, 1); +} + +static inline void kat_tr_absorb_label(kat_tr_ctx *ctx, const char *label) { + const uint8_t *p = (const uint8_t *)label; + size_t n = 0; while(p[n]) n++; + shake256_inc_absorb(ctx->s, p, n); + + const uint8_t sep = 0x00; + shake256_inc_absorb(ctx->s, &sep, 1); +} + +static inline void kat_tr_absorb_u64(kat_tr_ctx *ctx, unsigned long long x) { + uint8_t le[8]; + size_t i; + for (i = 0; i < 8; i++) { + le[i] = (uint8_t)((x >> (8 * i)) & 0xFF); + } + + uint8_t lenle[8]; + unsigned long long L = 8; + for(i = 0; i < 8; i++) { + lenle[i] = (uint8_t)((L >> (8 * i)) & 0xFF); + } + + shake256_inc_absorb(ctx->s, lenle, 8); + shake256_inc_absorb(ctx->s, le, 8); +} + +static inline void kat_tr_absorb_bytes(kat_tr_ctx *ctx, const uint8_t *buf, size_t len) { + uint8_t lenle[8]; + unsigned long long L = (unsigned long long) len; + size_t i; + for(i = 0; i < 8; i++) { + lenle[i] = (uint8_t)((L >> (8 * i)) & 0xFF); + } + shake256_inc_absorb(ctx->s, lenle, 8); + if(len) { + shake256_inc_absorb(ctx->s, buf, len); + } +} + +static inline void kat_tr_final(kat_tr_ctx *ctx, uint8_t out32[32]) { + shake256_inc_finalize(ctx->s); + shake256_inc_squeeze(out32, 32, ctx->s); +} +#endif + +int +main(void) +{ + static unsigned char m[BASE_MLEN * LOOP_COUNT]; + static unsigned char sm[BASE_MLEN * LOOP_COUNT + CRYPTO_BYTES]; + static unsigned char m1[BASE_MLEN * LOOP_COUNT + CRYPTO_BYTES]; + static unsigned char pk[CRYPTO_PUBLICKEYBYTES]; + static unsigned char sk[CRYPTO_SECRETKEYBYTES]; + static unsigned char seed[48]; + static unsigned char entropy_input[48]; + static unsigned char msg[BASE_MLEN * LOOP_COUNT]; + + unsigned long long mlen, smlen, mlen1; + int ret; + + // Deterministic entropy to seed DRBG to make .req + for (int i = 0; i < 48; i++) { + entropy_input[i] = (unsigned char)i; + } + randombytes_init(entropy_input, NULL); + + // Initialize Transcript + kat_tr_ctx tctx; + kat_tr_init(&tctx); + kat_tr_absorb_label(&tctx, "CRYPTO_ALGNAME"); + kat_tr_absorb_bytes(&tctx, (const uint8_t *)CRYPTO_ALGNAME, strlen(CRYPTO_ALGNAME)); + kat_tr_absorb_label(&tctx, "SKBYTES"); kat_tr_absorb_u64(&tctx, CRYPTO_SECRETKEYBYTES); + kat_tr_absorb_label(&tctx, "PKBYTES"); kat_tr_absorb_u64(&tctx, CRYPTO_PUBLICKEYBYTES); + kat_tr_absorb_label(&tctx, "SIGBYTES"); kat_tr_absorb_u64(&tctx, CRYPTO_BYTES); + + for (int i = 0; i < LOOP_COUNT; i++) { + randombytes(seed, sizeof seed); + + kat_tr_absorb_label(&tctx, "count"); kat_tr_absorb_u64(&tctx, (unsigned long long) i); + kat_tr_absorb_label(&tctx, "seed"); kat_tr_absorb_bytes(&tctx, seed, sizeof seed); + + mlen = (unsigned long long int)(BASE_MLEN * (i + 1)); + if (mlen > BASE_MLEN * LOOP_COUNT) { fprintf(stderr, "mlen overflow\n"); return KAT_OVERFLOW; } + + kat_tr_absorb_label(&tctx, "mlen"); kat_tr_absorb_u64(&tctx, mlen); + + randombytes(msg, mlen); + kat_tr_absorb_label(&tctx, "msg"); kat_tr_absorb_bytes(&tctx, msg, mlen); + + memset(m, 0, mlen); + memset(m1, 0, mlen + CRYPTO_BYTES); + memset(sm, 0, mlen + CRYPTO_BYTES); + memcpy(m, msg, mlen); + + // Keypair + ret = crypto_sign_keypair(pk, sk); + if (ret) { fprintf(stderr, "crypto_sign_keypair=%d\n", ret); return KAT_CRYPTO_FAILURE; } + kat_tr_absorb_label(&tctx, "pk"); kat_tr_absorb_bytes(&tctx, pk, CRYPTO_PUBLICKEYBYTES); + kat_tr_absorb_label(&tctx, "sk"); kat_tr_absorb_bytes(&tctx, sk, CRYPTO_SECRETKEYBYTES); + + // Sign + ret = crypto_sign(sm, &smlen, m, mlen, sk); + if (ret) { fprintf(stderr, "crypto_sign=%d\n", ret); return KAT_CRYPTO_FAILURE; } + kat_tr_absorb_label(&tctx, "smlen"); kat_tr_absorb_u64(&tctx, smlen); + kat_tr_absorb_label(&tctx, "sm"); kat_tr_absorb_bytes(&tctx, sm, smlen); + + // Verify + ret = crypto_sign_open(m1, &mlen1, sm, smlen, pk); + if (ret) { fprintf(stderr, "crypto_sign_open=%d\n", ret); return KAT_CRYPTO_FAILURE; } + if (mlen1 != mlen) { fprintf(stderr, "mlen mismatch\n"); return KAT_CRYPTO_FAILURE; } + if (memcmp(m, m1, mlen) != 0) { fprintf(stderr, "m mismatch\n"); return KAT_CRYPTO_FAILURE; } + } + + // Finalize transcript digest + uint8_t digest[32] = {0}; + kat_tr_final(&tctx, digest); + + printf("KAT transcript digest = "); + for (size_t i = 0; i < 32; i++) { printf("%02X", digest[i]); } + printf("\n"); + + return KAT_SUCCESS; +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/address.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/address.c new file mode 100644 index 0000000..5be8fca --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/address.c @@ -0,0 +1,104 @@ +#include +#include + +#include "../include/address.h" +#include "../include/params.h" +#include "../include/utils.h" + +/* + * Specify which level of Merkle tree (the "layer") we're working on + */ +void set_layer_addr(uint32_t addr[8], uint32_t layer) +{ + ((unsigned char *)addr)[SPX_OFFSET_LAYER] = (unsigned char)layer; +} + +/* + * Specify which Merkle tree within the level (the "tree address") we're working on + */ +void set_tree_addr(uint32_t addr[8], uint64_t tree) +{ +#if (SPX_TREE_HEIGHT * (SPX_D - 1)) > 64 + #error Subtree addressing is currently limited to at most 2^64 trees +#endif + ull_to_bytes(&((unsigned char *)addr)[SPX_OFFSET_TREE], 8, tree ); +} + +/* + * Specify the reason we'll use this address structure for, that is, what + * hash will we compute with it. This is used so that unrelated types of + * hashes don't accidentally get the same address structure. The type will be + * one of the SPX_ADDR_TYPE constants + */ +void set_type(uint32_t addr[8], uint32_t type) +{ + ((unsigned char *)addr)[SPX_OFFSET_TYPE] = (unsigned char)type; +} + +/* + * Copy the layer and tree fields of the address structure. This is used + * when we're doing multiple types of hashes within the same Merkle tree + */ +void copy_subtree_addr(uint32_t out[8], const uint32_t in[8]) +{ + memcpy( out, in, SPX_OFFSET_TREE+8 ); +} + +/* These functions are used for OTS addresses. */ + +/* + * Specify which Merkle leaf we're working on; that is, which OTS keypair + * we're talking about. + */ +void set_keypair_addr(uint32_t addr[8], uint32_t keypair) +{ + u32_to_bytes(&((unsigned char *)addr)[SPX_OFFSET_KP_ADDR], keypair); +} + +/* + * Copy the layer, tree and keypair fields of the address structure. This is + * used when we're doing multiple things within the same OTS keypair + */ +void copy_keypair_addr(uint32_t out[8], const uint32_t in[8]) +{ + memcpy( out, in, SPX_OFFSET_TREE+8 ); + memcpy( (unsigned char *)out + SPX_OFFSET_KP_ADDR, (unsigned char *)in + SPX_OFFSET_KP_ADDR, 4); +} + +/* + * Specify which Merkle chain within the OTS we're working with + * (the chain address) + */ +void set_chain_addr(uint32_t addr[8], uint32_t chain) +{ + ((unsigned char *)addr)[SPX_OFFSET_CHAIN_ADDR] = (unsigned char)chain; +} + +/* + * Specify where in the Merkle chain we are +* (the hash address) + */ +void set_hash_addr(uint32_t addr[8], uint32_t hash) +{ + ((unsigned char *)addr)[SPX_OFFSET_HASH_ADDR] = (unsigned char)hash; +} + +/* These functions are used for all hash tree addresses (including FORS). */ + +/* + * Specify the height of the node in the Merkle/FORS tree we are in + * (the tree height) + */ +void set_tree_height(uint32_t addr[8], uint32_t tree_height) +{ + ((unsigned char *)addr)[SPX_OFFSET_TREE_HGT] = (unsigned char)tree_height; +} + +/* + * Specify the distance from the left edge of the node in the Merkle/FORS tree + * (the tree index) + */ +void set_tree_index(uint32_t addr[8], uint32_t tree_index) +{ + u32_to_bytes(&((unsigned char *)addr)[SPX_OFFSET_TREE_INDEX], tree_index ); +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/fors.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/fors.c new file mode 100644 index 0000000..da07f27 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/fors.c @@ -0,0 +1,158 @@ +#include +#include +#include + +#include "../include/address.h" +#include "../include/fors.h" +#include "../include/forsx1.h" +#include "../include/hash.h" +#include "../include/thash.h" +#include "../include/utils.h" +#include "../include/utilsx1.h" + +static void fors_gen_sk(unsigned char *sk, const spx_ctx *ctx, + uint32_t fors_leaf_addr[8]) +{ + prf_addr(sk, ctx, fors_leaf_addr); +} + +static void fors_sk_to_leaf(unsigned char *leaf, const unsigned char *sk, + const spx_ctx *ctx, + uint32_t fors_leaf_addr[8]) +{ + thash(leaf, sk, 1, ctx, fors_leaf_addr); +} + +void fors_gen_leafx1(unsigned char *leaf, + const spx_ctx *ctx, + uint32_t addr_idx, fors_gen_leaf_info *info) +{ + struct fors_gen_leaf_info *fors_info = info; + uint32_t *fors_leaf_addr = fors_info->leaf_addrx; + + /* Only set the parts that the caller doesn't set */ + set_tree_index(fors_leaf_addr, addr_idx); + set_type(fors_leaf_addr, SPX_ADDR_TYPE_FORSPRF); + fors_gen_sk(leaf, ctx, fors_leaf_addr); + + set_type(fors_leaf_addr, SPX_ADDR_TYPE_FORSTREE); + fors_sk_to_leaf(leaf, leaf, + ctx, fors_leaf_addr); +} + +/** + * Interprets m as SPX_FORS_HEIGHT-bit unsigned integers. + * Assumes m contains at least SPX_FORS_HEIGHT * SPX_FORS_TREES bits. + * Assumes indices has space for SPX_FORS_TREES integers. + */ +static void message_to_indices(uint32_t *indices, const unsigned char *m) +{ + unsigned int i, j; + unsigned int offset = 0; + + for (i = 0; i < SPX_FORS_TREES; i++) { + indices[i] = 0; + for (j = 0; j < SPX_FORS_HEIGHT; j++) { + indices[i] ^= ((m[offset >> 3] >> (offset & 0x7)) & 1u) << j; + offset++; + } + } +} + +/** + * Signs a message m, deriving the secret key from sk_seed and the FTS address. + * Assumes m contains at least SPX_FORS_HEIGHT * SPX_FORS_TREES bits. + */ +void fors_sign(unsigned char *sig, unsigned char *pk, + const unsigned char *m, + const spx_ctx *ctx, + const uint32_t fors_addr[8]) +{ + uint32_t indices[SPX_FORS_TREES]; + unsigned char roots[SPX_FORS_TREES * SPX_N]; + uint32_t fors_tree_addr[8] = {0}; + struct fors_gen_leaf_info fors_info = {0}; + uint32_t *fors_leaf_addr = fors_info.leaf_addrx; + uint32_t fors_pk_addr[8] = {0}; + uint32_t idx_offset; + unsigned int i; + + copy_keypair_addr(fors_tree_addr, fors_addr); + copy_keypair_addr(fors_leaf_addr, fors_addr); + + copy_keypair_addr(fors_pk_addr, fors_addr); + set_type(fors_pk_addr, SPX_ADDR_TYPE_FORSPK); + + message_to_indices(indices, m); + + for (i = 0; i < SPX_FORS_TREES; i++) { + idx_offset = i * (1 << SPX_FORS_HEIGHT); + + set_tree_height(fors_tree_addr, 0); + set_tree_index(fors_tree_addr, indices[i] + idx_offset); + set_type(fors_tree_addr, SPX_ADDR_TYPE_FORSPRF); + + /* Include the secret key part that produces the selected leaf node. */ + fors_gen_sk(sig, ctx, fors_tree_addr); + set_type(fors_tree_addr, SPX_ADDR_TYPE_FORSTREE); + sig += SPX_N; + + /* Compute the authentication path for this leaf node. */ + fors_treehashx1(roots + i*SPX_N, sig, ctx, + indices[i], idx_offset, SPX_FORS_HEIGHT, + fors_tree_addr, &fors_info); + + sig += SPX_N * SPX_FORS_HEIGHT; + } + + /* Hash horizontally across all tree roots to derive the public key. */ + thash(pk, roots, SPX_FORS_TREES, ctx, fors_pk_addr); +} + +/** + * Derives the FORS public key from a signature. + * This can be used for verification by comparing to a known public key, or to + * subsequently verify a signature on the derived public key. The latter is the + * typical use-case when used as an FTS below an OTS in a hypertree. + * Assumes m contains at least SPX_FORS_HEIGHT * SPX_FORS_TREES bits. + */ +void fors_pk_from_sig(unsigned char *pk, + const unsigned char *sig, const unsigned char *m, + const spx_ctx* ctx, + const uint32_t fors_addr[8]) +{ + uint32_t indices[SPX_FORS_TREES]; + unsigned char roots[SPX_FORS_TREES * SPX_N]; + unsigned char leaf[SPX_N]; + uint32_t fors_tree_addr[8] = {0}; + uint32_t fors_pk_addr[8] = {0}; + uint32_t idx_offset; + unsigned int i; + + copy_keypair_addr(fors_tree_addr, fors_addr); + copy_keypair_addr(fors_pk_addr, fors_addr); + + set_type(fors_tree_addr, SPX_ADDR_TYPE_FORSTREE); + set_type(fors_pk_addr, SPX_ADDR_TYPE_FORSPK); + + message_to_indices(indices, m); + + for (i = 0; i < SPX_FORS_TREES; i++) { + idx_offset = i * (1 << SPX_FORS_HEIGHT); + + set_tree_height(fors_tree_addr, 0); + set_tree_index(fors_tree_addr, indices[i] + idx_offset); + + /* Derive the leaf from the included secret key part. */ + fors_sk_to_leaf(leaf, sig, ctx, fors_tree_addr); + sig += SPX_N; + + /* Derive the corresponding root node of this tree. */ + compute_root(roots + i*SPX_N, leaf, indices[i], idx_offset, + sig, SPX_FORS_HEIGHT, ctx, fors_tree_addr); + sig += SPX_N * SPX_FORS_HEIGHT; + } + + /* Hash horizontally across all tree roots to derive the public key. */ + thash(pk, roots, SPX_FORS_TREES, ctx, fors_pk_addr); +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/merkle.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/merkle.c new file mode 100644 index 0000000..9630b74 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/merkle.c @@ -0,0 +1,60 @@ +#include +#include + +#include "../include/address.h" +#include "../include/merkle.h" +#include "../include/params.h" +#include "../include/utils.h" +#include "../include/utilsx1.h" +#include "../include/wots.h" +#include "../include/wotsx1.h" + +/* + * This generates a Merkle signature (WOTS signature followed by the Merkle + * authentication path). This is in this file because most of the complexity + * is involved with the WOTS signature; the Merkle authentication path logic + * is mostly hidden in treehashx4 + */ +void merkle_sign(uint8_t *sig, unsigned char *root, + const spx_ctx *ctx, + uint32_t wots_addr[8], uint32_t tree_addr[8], + uint32_t idx_leaf) +{ + unsigned char *auth_path = sig + SPX_WOTS_BYTES; + struct leaf_info_x1 info = { 0 }; + unsigned steps[ SPX_WOTS_LEN ]; + + info.wots_sig = sig; + chain_lengths(steps, root); + info.wots_steps = steps; + + set_type(&tree_addr[0], SPX_ADDR_TYPE_HASHTREE); + set_type(&info.pk_addr[0], SPX_ADDR_TYPE_WOTSPK); + copy_subtree_addr(&info.leaf_addr[0], wots_addr); + copy_subtree_addr(&info.pk_addr[0], wots_addr); + + info.wots_sign_leaf = idx_leaf; + + wots_treehashx1(root, auth_path, ctx, + idx_leaf, 0, + SPX_TREE_HEIGHT, + tree_addr, &info); +} + +/* Compute root node of the top-most subtree. */ +void merkle_gen_root(unsigned char *root, const spx_ctx *ctx) +{ + /* We do not need the auth path in key generation, but it simplifies the + code to have just one treehash routine that computes both root and path + in one function. */ + unsigned char auth_path[SPX_TREE_HEIGHT * SPX_N + SPX_WOTS_BYTES]; + uint32_t top_tree_addr[8] = {0}; + uint32_t wots_addr[8] = {0}; + + set_layer_addr(top_tree_addr, SPX_D - 1); + set_layer_addr(wots_addr, SPX_D - 1); + + merkle_sign(auth_path, root, ctx, + wots_addr, top_tree_addr, + (uint32_t)~0 /* ~0 means "don't bother generating an auth path */ ); +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/randombytes.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/randombytes.c new file mode 100644 index 0000000..ac68df0 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/randombytes.c @@ -0,0 +1,43 @@ +/* +This code was taken from the SPHINCS reference implementation and is public domain. +*/ + +#include +#include + +#include "../include/randombytes.h" + +static int fd = -1; + +void randombytes(unsigned char *x, unsigned long long xlen) +{ + unsigned long long i; + + if (fd == -1) { + for (;;) { + fd = open("/dev/urandom", O_RDONLY); + if (fd != -1) { + break; + } + sleep(1); + } + } + + while (xlen > 0) { + if (xlen < 1048576) { + i = xlen; + } + else { + i = 1048576; + } + + i = (unsigned long long)read(fd, x, i); + if (i < 1) { + sleep(1); + continue; + } + + x += i; + xlen -= i; + } +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/rng.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/rng.c new file mode 100644 index 0000000..6e65ea0 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/rng.c @@ -0,0 +1,219 @@ +// +// rng.c +// +// Created by Bassham, Lawrence E (Fed) on 8/29/17. +// Copyright © 2017 Bassham, Lawrence E (Fed). All rights reserved. +// + +#include +#include +#include +#include + +#include "../include/rng.h" + +AES256_CTR_DRBG_struct DRBG_ctx; + +void AES256_ECB(unsigned char *key, unsigned char *ctr, unsigned char *buffer); + +/* + seedexpander_init() + ctx - stores the current state of an instance of the seed expander + seed - a 32 byte random value + diversifier - an 8 byte diversifier + maxlen - maximum number of bytes (less than 2**32) generated under this seed and diversifier + */ +int +seedexpander_init(AES_XOF_struct *ctx, + unsigned char *seed, + unsigned char *diversifier, + unsigned long maxlen) +{ + if ( maxlen >= 0x100000000 ) + return RNG_BAD_MAXLEN; + + ctx->length_remaining = maxlen; + + memcpy(ctx->key, seed, 32); + + memcpy(ctx->ctr, diversifier, 8); + ctx->ctr[11] = (unsigned char)(maxlen % 256); + maxlen >>= 8; + ctx->ctr[10] = (unsigned char)(maxlen % 256); + maxlen >>= 8; + ctx->ctr[9] = (unsigned char)(maxlen % 256); + maxlen >>= 8; + ctx->ctr[8] = (unsigned char)(maxlen % 256); + memset(ctx->ctr+12, 0x00, 4); + + ctx->buffer_pos = 16; + memset(ctx->buffer, 0x00, 16); + + return RNG_SUCCESS; +} + +/* + seedexpander() + ctx - stores the current state of an instance of the seed expander + x - returns the XOF data + xlen - number of bytes to return + */ +int +seedexpander(AES_XOF_struct *ctx, unsigned char *x, unsigned long xlen) +{ + unsigned long offset; + + if ( x == NULL ) + return RNG_BAD_OUTBUF; + if ( xlen >= ctx->length_remaining ) + return RNG_BAD_REQ_LEN; + + ctx->length_remaining -= xlen; + + offset = 0; + while ( xlen > 0 ) { + if ( xlen <= (16-ctx->buffer_pos) ) { // buffer has what we need + memcpy(x+offset, ctx->buffer+ctx->buffer_pos, xlen); + ctx->buffer_pos += xlen; + + return RNG_SUCCESS; + } + + // take what's in the buffer + memcpy(x+offset, ctx->buffer+ctx->buffer_pos, 16-ctx->buffer_pos); + xlen -= 16-ctx->buffer_pos; + offset += 16-ctx->buffer_pos; + + AES256_ECB(ctx->key, ctx->ctr, ctx->buffer); + ctx->buffer_pos = 0; + + //increment the counter + for (int i=15; i>=12; i--) { + if ( ctx->ctr[i] == 0xff ) + ctx->ctr[i] = 0x00; + else { + ctx->ctr[i]++; + break; + } + } + + } + + return RNG_SUCCESS; +} + + +static void handleErrors(void) +{ + ERR_print_errors_fp(stderr); + abort(); +} + +// Use whatever AES implementation you have. This uses AES from openSSL library +// key - 256-bit AES key +// ctr - a 128-bit plaintext value +// buffer - a 128-bit ciphertext value +void +AES256_ECB(unsigned char *key, unsigned char *ctr, unsigned char *buffer) +{ + EVP_CIPHER_CTX *ctx; + + int len; + + /* Create and initialise the context */ + if(!(ctx = EVP_CIPHER_CTX_new())) handleErrors(); + + if(1 != EVP_EncryptInit_ex(ctx, EVP_aes_256_ecb(), NULL, key, NULL)) + handleErrors(); + + if(1 != EVP_EncryptUpdate(ctx, buffer, &len, ctr, 16)) + handleErrors(); + + /* Clean up */ + EVP_CIPHER_CTX_free(ctx); +} + +void +randombytes_init(unsigned char *entropy_input, + unsigned char *personalization_string) +{ + unsigned char seed_material[48]; + + memcpy(seed_material, entropy_input, 48); + if (personalization_string) + for (int i=0; i<48; i++) + seed_material[i] ^= personalization_string[i]; + memset(DRBG_ctx.Key, 0x00, 32); + memset(DRBG_ctx.V, 0x00, 16); + AES256_CTR_DRBG_Update(seed_material, DRBG_ctx.Key, DRBG_ctx.V); + DRBG_ctx.reseed_counter = 1; +} + +int +randombytes(unsigned char *x, unsigned long long xlen) +{ + unsigned char block[16]; + int i = 0; + + while ( xlen > 0 ) { + //increment V + for (int j=15; j>=0; j--) { + if ( DRBG_ctx.V[j] == 0xff ) + DRBG_ctx.V[j] = 0x00; + else { + DRBG_ctx.V[j]++; + break; + } + } + AES256_ECB(DRBG_ctx.Key, DRBG_ctx.V, block); + if ( xlen > 15 ) { + memcpy(x+i, block, 16); + i += 16; + xlen -= 16; + } + else { + memcpy(x+i, block, xlen); + xlen = 0; + } + } + AES256_CTR_DRBG_Update(NULL, DRBG_ctx.Key, DRBG_ctx.V); + DRBG_ctx.reseed_counter++; + + return RNG_SUCCESS; +} + +void +AES256_CTR_DRBG_Update(unsigned char *provided_data, + unsigned char *Key, + unsigned char *V) +{ + unsigned char temp[48]; + + for (int i=0; i<3; i++) { + //increment V + for (int j=15; j>=0; j--) { + if ( V[j] == 0xff ) + V[j] = 0x00; + else { + V[j]++; + break; + } + } + + AES256_ECB(Key, V, temp+16*i); + } + if ( provided_data != NULL ) + for (int i=0; i<48; i++) + temp[i] ^= provided_data[i]; + memcpy(Key, temp, 32); + memcpy(V, temp+32, 16); +} + + + + + + + + + diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/sign.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/sign.c new file mode 100644 index 0000000..4a2fdc6 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/sign.c @@ -0,0 +1,287 @@ +#include +#include +#include + +#include "../include/address.h" +#include "../include/api.h" +#include "../include/fors.h" +#include "../include/hash.h" +#include "../include/merkle.h" +#include "../include/params.h" +#include "../include/randombytes.h" +#include "../include/thash.h" +#include "../include/utils.h" +#include "../include/wots.h" + +/* + * Returns the length of a secret key, in bytes + */ +unsigned long long crypto_sign_secretkeybytes(void) +{ + return CRYPTO_SECRETKEYBYTES; +} + +/* + * Returns the length of a public key, in bytes + */ +unsigned long long crypto_sign_publickeybytes(void) +{ + return CRYPTO_PUBLICKEYBYTES; +} + +/* + * Returns the length of a signature, in bytes + */ +unsigned long long crypto_sign_bytes(void) +{ + return CRYPTO_BYTES; +} + +/* + * Returns the length of the seed required to generate a key pair, in bytes + */ +unsigned long long crypto_sign_seedbytes(void) +{ + return CRYPTO_SEEDBYTES; +} + +/* + * Generates an SPX key pair given a seed of length + * Format sk: [SK_SEED || SK_PRF || PUB_SEED || root] + * Format pk: [PUB_SEED || root] + */ +int crypto_sign_seed_keypair(unsigned char *pk, unsigned char *sk, + const unsigned char *seed) +{ + spx_ctx ctx; + + /* Initialize SK_SEED, SK_PRF and PUB_SEED from seed. */ + memcpy(sk, seed, CRYPTO_SEEDBYTES); + + memcpy(pk, sk + 2*SPX_N, SPX_N); + + memcpy(ctx.pub_seed, pk, SPX_N); + memcpy(ctx.sk_seed, sk, SPX_N); + + /* This hook allows the hash function instantiation to do whatever + preparation or computation it needs, based on the public seed. */ + initialize_hash_function(&ctx); + + /* Compute root node of the top-most subtree. */ + merkle_gen_root(sk + 3*SPX_N, &ctx); + + memcpy(pk + SPX_N, sk + 3*SPX_N, SPX_N); + + return 0; +} + +/* + * Generates an SPX key pair. + * Format sk: [SK_SEED || SK_PRF || PUB_SEED || root] + * Format pk: [PUB_SEED || root] + */ +int crypto_sign_keypair(unsigned char *pk, unsigned char *sk) +{ + unsigned char seed[CRYPTO_SEEDBYTES]; + randombytes(seed, CRYPTO_SEEDBYTES); + crypto_sign_seed_keypair(pk, sk, seed); + + return 0; +} + +/** + * Returns an array containing a detached signature. + */ +int crypto_sign_signature(uint8_t *sig, size_t *siglen, + const uint8_t *m, size_t mlen, const uint8_t *sk) +{ + spx_ctx ctx; + + const unsigned char *sk_prf = sk + SPX_N; + const unsigned char *pk = sk + 2*SPX_N; + + unsigned char optrand[SPX_N]; + unsigned char mhash[SPX_FORS_MSG_BYTES]; + unsigned char root[SPX_N]; + uint32_t i; + uint64_t tree; + uint32_t idx_leaf; + uint32_t wots_addr[8] = {0}; + uint32_t tree_addr[8] = {0}; + + memcpy(ctx.sk_seed, sk, SPX_N); + memcpy(ctx.pub_seed, pk, SPX_N); + + /* This hook allows the hash function instantiation to do whatever + preparation or computation it needs, based on the public seed. */ + initialize_hash_function(&ctx); + + set_type(wots_addr, SPX_ADDR_TYPE_WOTS); + set_type(tree_addr, SPX_ADDR_TYPE_HASHTREE); + + /* Optionally, signing can be made non-deterministic using optrand. + This can help counter side-channel attacks that would benefit from + getting a large number of traces when the signer uses the same nodes. */ + randombytes(optrand, SPX_N); + /* Compute the digest randomization value. */ + gen_message_random(sig, sk_prf, optrand, m, mlen, &ctx); + + /* Derive the message digest and leaf index from R, PK and M. */ + hash_message(mhash, &tree, &idx_leaf, sig, pk, m, mlen, &ctx); + sig += SPX_N; + + set_tree_addr(wots_addr, tree); + set_keypair_addr(wots_addr, idx_leaf); + + /* Sign the message hash using FORS. */ + fors_sign(sig, root, mhash, &ctx, wots_addr); + sig += SPX_FORS_BYTES; + + for (i = 0; i < SPX_D; i++) { + set_layer_addr(tree_addr, i); + set_tree_addr(tree_addr, tree); + + copy_subtree_addr(wots_addr, tree_addr); + set_keypair_addr(wots_addr, idx_leaf); + + merkle_sign(sig, root, &ctx, wots_addr, tree_addr, idx_leaf); + sig += SPX_WOTS_BYTES + SPX_TREE_HEIGHT * SPX_N; + + /* Update the indices for the next layer. */ + idx_leaf = (tree & ((1 << SPX_TREE_HEIGHT)-1)); + tree = tree >> SPX_TREE_HEIGHT; + } + + *siglen = SPX_BYTES; + + return 0; +} + +/** + * Verifies a detached signature and message under a given public key. + */ +int crypto_sign_verify(const uint8_t *sig, size_t siglen, + const uint8_t *m, size_t mlen, const uint8_t *pk) +{ + spx_ctx ctx; + const unsigned char *pub_root = pk + SPX_N; + unsigned char mhash[SPX_FORS_MSG_BYTES]; + unsigned char wots_pk[SPX_WOTS_BYTES]; + unsigned char root[SPX_N]; + unsigned char leaf[SPX_N]; + unsigned int i; + uint64_t tree; + uint32_t idx_leaf; + uint32_t wots_addr[8] = {0}; + uint32_t tree_addr[8] = {0}; + uint32_t wots_pk_addr[8] = {0}; + + if (siglen != SPX_BYTES) { + return -1; + } + + memcpy(ctx.pub_seed, pk, SPX_N); + + /* This hook allows the hash function instantiation to do whatever + preparation or computation it needs, based on the public seed. */ + initialize_hash_function(&ctx); + + set_type(wots_addr, SPX_ADDR_TYPE_WOTS); + set_type(tree_addr, SPX_ADDR_TYPE_HASHTREE); + set_type(wots_pk_addr, SPX_ADDR_TYPE_WOTSPK); + + /* Derive the message digest and leaf index from R || PK || M. */ + /* The additional SPX_N is a result of the hash domain separator. */ + hash_message(mhash, &tree, &idx_leaf, sig, pk, m, mlen, &ctx); + sig += SPX_N; + + /* Layer correctly defaults to 0, so no need to set_layer_addr */ + set_tree_addr(wots_addr, tree); + set_keypair_addr(wots_addr, idx_leaf); + + fors_pk_from_sig(root, sig, mhash, &ctx, wots_addr); + sig += SPX_FORS_BYTES; + + /* For each subtree.. */ + for (i = 0; i < SPX_D; i++) { + set_layer_addr(tree_addr, i); + set_tree_addr(tree_addr, tree); + + copy_subtree_addr(wots_addr, tree_addr); + set_keypair_addr(wots_addr, idx_leaf); + + copy_keypair_addr(wots_pk_addr, wots_addr); + + /* The WOTS public key is only correct if the signature was correct. */ + /* Initially, root is the FORS pk, but on subsequent iterations it is + the root of the subtree below the currently processed subtree. */ + wots_pk_from_sig(wots_pk, sig, root, &ctx, wots_addr); + sig += SPX_WOTS_BYTES; + + /* Compute the leaf node using the WOTS public key. */ + thash(leaf, wots_pk, SPX_WOTS_LEN, &ctx, wots_pk_addr); + + /* Compute the root node of this subtree. */ + compute_root(root, leaf, idx_leaf, 0, sig, SPX_TREE_HEIGHT, + &ctx, tree_addr); + sig += SPX_TREE_HEIGHT * SPX_N; + + /* Update the indices for the next layer. */ + idx_leaf = (tree & ((1 << SPX_TREE_HEIGHT)-1)); + tree = tree >> SPX_TREE_HEIGHT; + } + + /* Check if the root node equals the root node in the public key. */ + if (memcmp(root, pub_root, SPX_N)) { + return -1; + } + + return 0; +} + + +/** + * Returns an array containing the signature followed by the message. + */ +int crypto_sign(unsigned char *sm, unsigned long long *smlen, + const unsigned char *m, unsigned long long mlen, + const unsigned char *sk) +{ + size_t siglen; + + crypto_sign_signature(sm, &siglen, m, (size_t)mlen, sk); + + memmove(sm + SPX_BYTES, m, mlen); + *smlen = siglen + mlen; + + return 0; +} + +/** + * Verifies a given signature-message pair under a given public key. + */ +int crypto_sign_open(unsigned char *m, unsigned long long *mlen, + const unsigned char *sm, unsigned long long smlen, + const unsigned char *pk) +{ + /* The API caller does not necessarily know what size a signature should be + but SPHINCS+ signatures are always exactly SPX_BYTES. */ + if (smlen < SPX_BYTES) { + memset(m, 0, smlen); + *mlen = 0; + return -1; + } + + *mlen = smlen - SPX_BYTES; + + if (crypto_sign_verify(sm, SPX_BYTES, sm + SPX_BYTES, (size_t)*mlen, pk)) { + memset(m, 0, smlen); + *mlen = 0; + return -1; + } + + /* If verification was successful, move the message to the right place. */ + memmove(m, sm + SPX_BYTES, *mlen); + + return 0; +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/utils.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/utils.c new file mode 100644 index 0000000..7f55917 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/utils.c @@ -0,0 +1,154 @@ +#include + +#include "../include/address.h" +#include "../include/hash.h" +#include "../include/params.h" +#include "../include/thash.h" +#include "../include/utils.h" + +/** + * Converts the value of 'in' to 'outlen' bytes in big-endian byte order. + */ +void ull_to_bytes(unsigned char *out, unsigned int outlen, + unsigned long long in) +{ + int i; + + /* Iterate over out in decreasing order, for big-endianness. */ + for (i = (signed int)outlen - 1; i >= 0; i--) { + out[i] = in & 0xff; + in = in >> 8; + } +} + +void u32_to_bytes(unsigned char *out, uint32_t in) +{ + out[0] = (unsigned char)(in >> 24); + out[1] = (unsigned char)(in >> 16); + out[2] = (unsigned char)(in >> 8); + out[3] = (unsigned char)in; +} + +/** + * Converts the inlen bytes in 'in' from big-endian byte order to an integer. + */ +unsigned long long bytes_to_ull(const unsigned char *in, unsigned int inlen) +{ + unsigned long long retval = 0; + unsigned int i; + + for (i = 0; i < inlen; i++) { + retval |= ((unsigned long long)in[i]) << (8*(inlen - 1 - i)); + } + return retval; +} + +/** + * Computes a root node given a leaf and an auth path. + * Expects address to be complete other than the tree_height and tree_index. + */ +void compute_root(unsigned char *root, const unsigned char *leaf, + uint32_t leaf_idx, uint32_t idx_offset, + const unsigned char *auth_path, uint32_t tree_height, + const spx_ctx *ctx, uint32_t addr[8]) +{ + uint32_t i; + unsigned char buffer[2 * SPX_N]; + + /* If leaf_idx is odd (last bit = 1), current path element is a right child + and auth_path has to go left. Otherwise it is the other way around. */ + if (leaf_idx & 1) { + memcpy(buffer + SPX_N, leaf, SPX_N); + memcpy(buffer, auth_path, SPX_N); + } + else { + memcpy(buffer, leaf, SPX_N); + memcpy(buffer + SPX_N, auth_path, SPX_N); + } + auth_path += SPX_N; + + for (i = 0; i < tree_height - 1; i++) { + leaf_idx >>= 1; + idx_offset >>= 1; + /* Set the address of the node we're creating. */ + set_tree_height(addr, i + 1); + set_tree_index(addr, leaf_idx + idx_offset); + + /* Pick the right or left neighbor, depending on parity of the node. */ + if (leaf_idx & 1) { + thash(buffer + SPX_N, buffer, 2, ctx, addr); + memcpy(buffer, auth_path, SPX_N); + } + else { + thash(buffer, buffer, 2, ctx, addr); + memcpy(buffer + SPX_N, auth_path, SPX_N); + } + auth_path += SPX_N; + } + + /* The last iteration is exceptional; we do not copy an auth_path node. */ + leaf_idx >>= 1; + idx_offset >>= 1; + set_tree_height(addr, tree_height); + set_tree_index(addr, leaf_idx + idx_offset); + thash(root, buffer, 2, ctx, addr); +} + +/** + * For a given leaf index, computes the authentication path and the resulting + * root node using Merkle's TreeHash algorithm. + * Expects the layer and tree parts of the tree_addr to be set, as well as the + * tree type (i.e. SPX_ADDR_TYPE_HASHTREE or SPX_ADDR_TYPE_FORSTREE). + * Applies the offset idx_offset to indices before building addresses, so that + * it is possible to continue counting indices across trees. + */ +void treehash(unsigned char *root, unsigned char *auth_path, const spx_ctx* ctx, + uint32_t leaf_idx, uint32_t idx_offset, uint32_t tree_height, + void (*gen_leaf)( + unsigned char* /* leaf */, + const spx_ctx* /* ctx */, + uint32_t /* addr_idx */, const uint32_t[8] /* tree_addr */), + uint32_t tree_addr[8]) +{ + SPX_VLA(uint8_t, stack, (tree_height+1)*SPX_N); + SPX_VLA(unsigned int, heights, tree_height+1); + unsigned int offset = 0; + uint32_t idx; + uint32_t tree_idx; + + for (idx = 0; idx < (uint32_t)(1 << tree_height); idx++) { + /* Add the next leaf node to the stack. */ + gen_leaf(stack + offset*SPX_N, ctx, idx + idx_offset, tree_addr); + offset++; + heights[offset - 1] = 0; + + /* If this is a node we need for the auth path.. */ + if ((leaf_idx ^ 0x1) == idx) { + memcpy(auth_path, stack + (offset - 1)*SPX_N, SPX_N); + } + + /* While the top-most nodes are of equal height.. */ + while (offset >= 2 && heights[offset - 1] == heights[offset - 2]) { + /* Compute index of the new node, in the next layer. */ + tree_idx = (idx >> (heights[offset - 1] + 1)); + + /* Set the address of the node we're creating. */ + set_tree_height(tree_addr, heights[offset - 1] + 1); + set_tree_index(tree_addr, + tree_idx + (idx_offset >> (heights[offset-1] + 1))); + /* Hash the top-most nodes from the stack together. */ + thash(stack + (offset - 2)*SPX_N, + stack + (offset - 2)*SPX_N, 2, ctx, tree_addr); + offset--; + /* Note that the top-most node is now one layer higher. */ + heights[offset - 1]++; + + /* If this is a node we need for the auth path.. */ + if (((leaf_idx >> heights[offset - 1]) ^ 0x1) == tree_idx) { + memcpy(auth_path + heights[offset - 1]*SPX_N, + stack + (offset - 1)*SPX_N, SPX_N); + } + } + } + memcpy(root, stack, SPX_N); +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/utilsx1.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/utilsx1.c new file mode 100644 index 0000000..e5449b4 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/utilsx1.c @@ -0,0 +1,174 @@ +#include + +#include "../include/address.h" +#include "../include/params.h" +#include "../include/forsx1.h" +#include "../include/thash.h" +#include "../include/utils.h" +#include "../include/fors.h" +#include "../include/utilsx1.h" + +/* + * Generate the entire Merkle tree, computing the authentication path for + * leaf_idx, and the resulting root node using Merkle's TreeHash algorithm. + * Expects the layer and tree parts of the tree_addr to be set, as well as the + * tree type (i.e. SPX_ADDR_TYPE_HASHTREE or SPX_ADDR_TYPE_FORSTREE) + * + * This expects tree_addr to be initialized to the addr structures for the + * Merkle tree nodes + * + * Applies the offset idx_offset to indices before building addresses, so that + * it is possible to continue counting indices across trees. + * + * This works by using the standard Merkle tree building algorithm, + */ +void wots_treehashx1(unsigned char *root, unsigned char *auth_path, + const spx_ctx* ctx, + uint32_t leaf_idx, uint32_t idx_offset, + uint32_t tree_height, + uint32_t tree_addr[8], + leaf_info_x1 *info) +{ + /* This is where we keep the intermediate nodes */ + SPX_VLA(uint8_t, stack, tree_height*SPX_N); + + uint32_t idx; + uint32_t max_idx = (uint32_t)((1 << tree_height) - 1); + for (idx = 0;; idx++) { + unsigned char current[2*SPX_N]; /* Current logical node is at */ + /* index[SPX_N]. We do this to minimize the number of copies */ + /* needed during a thash */ + wots_gen_leafx1( ¤t[SPX_N], ctx, idx + idx_offset, + info ); + + /* Now combine the freshly generated right node with previously */ + /* generated left ones */ + uint32_t internal_idx_offset = idx_offset; + uint32_t internal_idx = idx; + uint32_t internal_leaf = leaf_idx; + uint32_t h; /* The height we are in the Merkle tree */ + for (h=0;; h++, internal_idx >>= 1, internal_leaf >>= 1) { + + /* Check if we hit the top of the tree */ + if (h == tree_height) { + /* We hit the root; return it */ + memcpy( root, ¤t[SPX_N], SPX_N ); + return; + } + + /* + * Check if the node we have is a part of the + * authentication path; if it is, write it out + */ + if ((internal_idx ^ internal_leaf) == 0x01) { + memcpy( &auth_path[ h * SPX_N ], + ¤t[SPX_N], + SPX_N ); + } + + /* + * Check if we're at a left child; if so, stop going up the stack + * Exception: if we've reached the end of the tree, keep on going + * (so we combine the last 4 nodes into the one root node in two + * more iterations) + */ + if ((internal_idx & 1) == 0 && idx < max_idx) { + break; + } + + /* Ok, we're at a right node */ + /* Now combine the left and right logical nodes together */ + + /* Set the address of the node we're creating. */ + internal_idx_offset >>= 1; + set_tree_height(tree_addr, h + 1); + set_tree_index(tree_addr, internal_idx/2 + internal_idx_offset ); + + unsigned char *left = &stack[h * SPX_N]; + memcpy( ¤t[0], left, SPX_N ); + thash( ¤t[1 * SPX_N], + ¤t[0 * SPX_N], + 2, ctx, tree_addr); + } + + /* We've hit a left child; save the current for when we get the */ + /* corresponding right right */ + memcpy( &stack[h * SPX_N], ¤t[SPX_N], SPX_N); + } +} + +void fors_treehashx1(unsigned char *root, unsigned char *auth_path, + const spx_ctx* ctx, + uint32_t leaf_idx, uint32_t idx_offset, + uint32_t tree_height, + uint32_t tree_addr[8], + fors_gen_leaf_info *info) +{ + /* This is where we keep the intermediate nodes */ + SPX_VLA(uint8_t, stack, tree_height*SPX_N); + + uint32_t idx; + uint32_t max_idx = (uint32_t)((1 << tree_height) - 1); + for (idx = 0;; idx++) { + unsigned char current[2*SPX_N]; /* Current logical node is at */ + /* index[SPX_N]. We do this to minimize the number of copies */ + /* needed during a thash */ + + fors_gen_leafx1( ¤t[SPX_N], ctx, idx + idx_offset, + info ); + + /* Now combine the freshly generated right node with previously */ + /* generated left ones */ + uint32_t internal_idx_offset = idx_offset; + uint32_t internal_idx = idx; + uint32_t internal_leaf = leaf_idx; + uint32_t h; /* The height we are in the Merkle tree */ + for (h=0;; h++, internal_idx >>= 1, internal_leaf >>= 1) { + + /* Check if we hit the top of the tree */ + if (h == tree_height) { + /* We hit the root; return it */ + memcpy( root, ¤t[SPX_N], SPX_N ); + return; + } + + /* + * Check if the node we have is a part of the + * authentication path; if it is, write it out + */ + if ((internal_idx ^ internal_leaf) == 0x01) { + memcpy( &auth_path[ h * SPX_N ], + ¤t[SPX_N], + SPX_N ); + } + + /* + * Check if we're at a left child; if so, stop going up the stack + * Exception: if we've reached the end of the tree, keep on going + * (so we combine the last 4 nodes into the one root node in two + * more iterations) + */ + if ((internal_idx & 1) == 0 && idx < max_idx) { + break; + } + + /* Ok, we're at a right node */ + /* Now combine the left and right logical nodes together */ + + /* Set the address of the node we're creating. */ + internal_idx_offset >>= 1; + set_tree_height(tree_addr, h + 1); + set_tree_index(tree_addr, internal_idx/2 + internal_idx_offset ); + + unsigned char *left = &stack[h * SPX_N]; + memcpy( ¤t[0], left, SPX_N ); + thash( ¤t[1 * SPX_N], + ¤t[0 * SPX_N], + 2, ctx, tree_addr); + } + + /* We've hit a left child; save the current for when we get the */ + /* corresponding right right */ + memcpy( &stack[h * SPX_N], ¤t[SPX_N], SPX_N); + } +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/wots.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/wots.c new file mode 100644 index 0000000..aa413a8 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/wots.c @@ -0,0 +1,112 @@ +#include +#include + +#include "../include/address.h" +#include "../include/hash.h" +#include "../include/params.h" +#include "../include/thash.h" +#include "../include/utils.h" +#include "../include/utilsx1.h" +#include "../include/wots.h" +#include "../include/wotsx1.h" + +// TODO clarify address expectations, and make them more uniform. +// TODO i.e. do we expect types to be set already? +// TODO and do we expect modifications or copies? + +/** + * Computes the chaining function. + * out and in have to be n-byte arrays. + * + * Interprets in as start-th value of the chain. + * addr has to contain the address of the chain. + */ +static void gen_chain(unsigned char *out, const unsigned char *in, + unsigned int start, unsigned int steps, + const spx_ctx *ctx, uint32_t addr[8]) +{ + uint32_t i; + + /* Initialize out with the value at position 'start'. */ + memcpy(out, in, SPX_N); + + /* Iterate 'steps' calls to the hash function. */ + for (i = start; i < (start+steps) && i < SPX_WOTS_W; i++) { + set_hash_addr(addr, i); + thash(out, out, 1, ctx, addr); + } +} + +/** + * base_w algorithm as described in draft. + * Interprets an array of bytes as integers in base w. + * This only works when log_w is a divisor of 8. + */ +static void base_w(unsigned int *output, const int out_len, + const unsigned char *input) +{ + int in = 0; + int out = 0; + unsigned char total; + int bits = 0; + int consumed; + + for (consumed = 0; consumed < out_len; consumed++) { + if (bits == 0) { + total = input[in]; + in++; + bits += 8; + } + bits -= SPX_WOTS_LOGW; + output[out] = (total >> bits) & (SPX_WOTS_W - 1); + out++; + } +} + +/* Computes the WOTS+ checksum over a message (in base_w). */ +static void wots_checksum(unsigned int *csum_base_w, + const unsigned int *msg_base_w) +{ + unsigned int csum = 0; + unsigned char csum_bytes[(SPX_WOTS_LEN2 * SPX_WOTS_LOGW + 7) / 8]; + unsigned int i; + + /* Compute checksum. */ + for (i = 0; i < SPX_WOTS_LEN1; i++) { + csum += SPX_WOTS_W - 1 - msg_base_w[i]; + } + + /* Convert checksum to base_w. */ + /* Make sure expected empty zero bits are the least significant bits. */ + csum = csum << ((8 - ((SPX_WOTS_LEN2 * SPX_WOTS_LOGW) % 8)) % 8); + ull_to_bytes(csum_bytes, sizeof(csum_bytes), csum); + base_w(csum_base_w, SPX_WOTS_LEN2, csum_bytes); +} + +/* Takes a message and derives the matching chain lengths. */ +void chain_lengths(unsigned int *lengths, const unsigned char *msg) +{ + base_w(lengths, SPX_WOTS_LEN1, msg); + wots_checksum(lengths + SPX_WOTS_LEN1, lengths); +} + +/** + * Takes a WOTS signature and an n-byte message, computes a WOTS public key. + * + * Writes the computed public key to 'pk'. + */ +void wots_pk_from_sig(unsigned char *pk, + const unsigned char *sig, const unsigned char *msg, + const spx_ctx *ctx, uint32_t addr[8]) +{ + unsigned int lengths[SPX_WOTS_LEN]; + uint32_t i; + + chain_lengths(lengths, msg); + + for (i = 0; i < SPX_WOTS_LEN; i++) { + set_chain_addr(addr, i); + gen_chain(pk + i*SPX_N, sig + i*SPX_N, + lengths[i], SPX_WOTS_W - 1 - lengths[i], ctx, addr); + } +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/wotsx1.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/wotsx1.c new file mode 100644 index 0000000..f6d44fe --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/app/src/wotsx1.c @@ -0,0 +1,73 @@ +#include +#include + +#include "../include/address.h" +#include "../include/hash.h" +#include "../include/params.h" +#include "../include/thash.h" +#include "../include/utils.h" +#include "../include/wots.h" +#include "../include/wotsx1.h" + +/* + * This generates a WOTS public key + * It also generates the WOTS signature if leaf_info indicates + * that we're signing with this WOTS key + */ +void wots_gen_leafx1(unsigned char *dest, + const spx_ctx *ctx, + uint32_t leaf_idx, leaf_info_x1 *v_info) { + struct leaf_info_x1 *info = v_info; + uint32_t *leaf_addr = info->leaf_addr; + uint32_t *pk_addr = info->pk_addr; + unsigned int i, k; + unsigned char pk_buffer[ SPX_WOTS_BYTES ]; + unsigned char *buffer; + uint32_t wots_k_mask; + + if (leaf_idx == info->wots_sign_leaf) { + /* We're traversing the leaf that's signing; generate the WOTS */ + /* signature */ + wots_k_mask = 0; + } else { + /* Nope, we're just generating pk's; turn off the signature logic */ + wots_k_mask = (uint32_t)~0; + } + + set_keypair_addr( leaf_addr, leaf_idx ); + set_keypair_addr( pk_addr, leaf_idx ); + + for (i = 0, buffer = pk_buffer; i < SPX_WOTS_LEN; i++, buffer += SPX_N) { + uint32_t wots_k = info->wots_steps[i] | wots_k_mask; /* Set wots_k to */ + /* the step if we're generating a signature, ~0 if we're not */ + + /* Start with the secret seed */ + set_chain_addr(leaf_addr, i); + set_hash_addr(leaf_addr, 0); + set_type(leaf_addr, SPX_ADDR_TYPE_WOTSPRF); + + prf_addr(buffer, ctx, leaf_addr); + + set_type(leaf_addr, SPX_ADDR_TYPE_WOTS); + + /* Iterate down the WOTS chain */ + for (k=0;; k++) { + /* Check if this is the value that needs to be saved as a */ + /* part of the WOTS signature */ + if (k == wots_k) { + memcpy( info->wots_sig + i * SPX_N, buffer, SPX_N ); + } + + /* Check if we hit the top of the chain */ + if (k == SPX_WOTS_W - 1) break; + + /* Iterate one step on the chain */ + set_hash_addr(leaf_addr, k); + + thash(buffer, buffer, 1, ctx, leaf_addr); + } + } + + /* Do the final thash to generate the public keys */ + thash(dest, pk_buffer, SPX_WOTS_LEN, ctx, pk_addr); +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/CMakeLists.txt b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/CMakeLists.txt new file mode 100644 index 0000000..6beebc8 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(${HASH_BACKEND}) diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/CMakeLists.txt b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/CMakeLists.txt new file mode 100644 index 0000000..2ffb071 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/CMakeLists.txt @@ -0,0 +1,13 @@ +set(BLAKE_SOURCES + src/blake256.c + src/blake512.c + src/hash_blake.c + ../../app/src/utils.c + src/thash_blake_${THASH}.c +) + +set(PARAMS "sphincs-blake-${SECPAR}") +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DPARAMS=${PARAMS}") + +add_library(blake SHARED ${BLAKE_SOURCES}) +target_include_directories(blake PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include) diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/include/blake.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/include/blake.h new file mode 100644 index 0000000..fe7508e --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/include/blake.h @@ -0,0 +1,51 @@ +#ifndef SPX_BLAKE_H +#define SPX_BLAKE_H + +#include + +#define SPX_BLAKE256_OUTPUT_BYTES 32 /* This does not necessarily equal SPX_N */ +#define SPX_BLAKE512_OUTPUT_BYTES 64 + +#if SPX_BLAKE256_OUTPUT_BYTES < SPX_N + #error Linking against BLAKE-256 with N larger than 32 bytes is not supported +#endif + +typedef struct +{ + unsigned int h[8], s[4], t[2]; + int buflen, nullt; + unsigned char buf[64]; +} blakestate256; + +typedef struct +{ + unsigned long long h[8], s[4], t[2]; + int buflen, nullt; + unsigned char buf[128]; +} blakestate512; + +/* Implementation of Blake-512 */ +int blake512(uint8_t *out, const unsigned char *in, unsigned long long inlen); + +void blake512_init(blakestate512 *S); +void blake512_compress(blakestate512 *S, const unsigned char *block); +void blake512_update(blakestate512 *S, const unsigned char *in, unsigned long long inlen); +void blake512_final(blakestate512 *S, unsigned char *out); + +/* Implementation of Blake-256 */ +int blake256(unsigned char *out, const unsigned char *in, unsigned long long inlen); + +void blake256_init(blakestate256 *S); +void blake256_compress(blakestate256 *S, const unsigned char *block); +void blake256_update(blakestate256 *S, const unsigned char *in, unsigned long long inlen); +void blake256_final(blakestate256 *S, unsigned char *out); + +#define blake256_mgf1 SPX_NAMESPACE(blake256_mgf1) +void blake256_mgf1(unsigned char *out, unsigned long outlen, + const unsigned char *in, unsigned long inlen); + +#define blake512_mgf1 SPX_NAMESPACE(blake512_mgf1) +void blake512_mgf1(unsigned char *out, unsigned long outlen, + const unsigned char *in, unsigned long inlen); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/include/blake_offsets.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/include/blake_offsets.h new file mode 100644 index 0000000..5f08e03 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/include/blake_offsets.h @@ -0,0 +1,20 @@ +#if !defined( BLAKE_OFFSETS_H_ ) +#define BLAKE_OFFSETS_H_ + +/* + * Offsets of various fields in the address structure when we use BLAKE as + * the Sphincs+ hash function + */ + +#define SPX_OFFSET_LAYER 3 /* The byte used to specify the Merkle tree layer */ +#define SPX_OFFSET_TREE 8 /* The start of the 8 byte field used to specify the tree */ +#define SPX_OFFSET_TYPE 19 /* The byte used to specify the hash type (reason) */ +#define SPX_OFFSET_KP_ADDR 20 /* The start of the 4 byte field used to specify the key pair address */ +#define SPX_OFFSET_CHAIN_ADDR 27 /* The byte used to specify the chain address (which Winternitz chain) */ +#define SPX_OFFSET_HASH_ADDR 31 /* The byte used to specify the hash address (where in the Winternitz chain) */ +#define SPX_OFFSET_TREE_HGT 27 /* The byte used to specify the height of this node in the FORS or Merkle tree */ +#define SPX_OFFSET_TREE_INDEX 28 /* The start of the 4 byte field used to specify the node in the FORS or Merkle tree */ + +#define SPX_BLAKE 1 + +#endif /* BLAKE_OFFSETS_H_ */ diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/blake256.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/blake256.c new file mode 100644 index 0000000..9e05a43 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/blake256.c @@ -0,0 +1,402 @@ +// supercop-20140525/crypto_hash/blake256/sandy + +/* + BLAKE reference C implementation + + Copyright (c) 2012 Jean-Philippe Aumasson + + To the extent possible under law, the author(s) have dedicated all copyright + and related and neighboring rights to this software to the public domain + worldwide. This software is distributed without any warranty. + + You should have received a copy of the CC0 Public Domain Dedication along + with this software. If not, see + . + */ + +#include +#include + +#include "../../../app/include/utils.h" + +#include "../include/blake.h" + +typedef unsigned long long crypto_uint64; +typedef unsigned int crypto_uint32; +typedef unsigned char crypto_uint8; + +typedef crypto_uint64 u64; +typedef crypto_uint32 u32; +typedef crypto_uint8 u8; + +#define U8TO32(p) \ + (((uint32_t)((p)[0]) << 24) | ((uint32_t)((p)[1]) << 16) | \ + ((uint32_t)((p)[2]) << 8) | ((uint32_t)((p)[3]) )) +#define U32TO8(p, v) \ + (p)[0] = (uint8_t)((v) >> 24); \ + (p)[1] = (uint8_t)((v) >> 16); \ + (p)[2] = (uint8_t)((v) >> 8); \ + (p)[3] = (uint8_t)((v)); + +static const u32 cst[16] = { + 0x243F6A88,0x85A308D3,0x13198A2E,0x03707344, + 0xA4093822,0x299F31D0,0x082EFA98,0xEC4E6C89, + 0x452821E6,0x38D01377,0xBE5466CF,0x34E90C6C, + 0xC0AC29B7,0xC97C50DD,0x3F84D5B5,0xB5470917}; + +static const u8 padding[] = + {0x80,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; + +#define BLAKE256_ROT(x,n) (((x)<<(32-n))|( (x)>>(n))) + +void blake256_compress( blakestate256 *S, const unsigned char *block ) +{ + u32 m0; + u32 m1; + u32 m2; + u32 m3; + u32 m4; + u32 m5; + u32 m6; + u32 m7; + u32 m8; + u32 m9; + u32 m10; + u32 m11; + u32 m12; + u32 m13; + u32 m14; + u32 m15; + u32 v0; + u32 v1; + u32 v2; + u32 v3; + u32 v4; + u32 v5; + u32 v6; + u32 v7; + u32 v8; + u32 v9; + u32 v10; + u32 v11; + u32 v12; + u32 v13; + u32 v14; + u32 v15; + + m0 = U8TO32(block + 0); + m1 = U8TO32(block + 4); + m2 = U8TO32(block + 8); + m3 = U8TO32(block + 12); + m4 = U8TO32(block + 16); + m5 = U8TO32(block + 20); + m6 = U8TO32(block + 24); + m7 = U8TO32(block + 28); + m8 = U8TO32(block + 32); + m9 = U8TO32(block + 36); + m10 = U8TO32(block + 40); + m11 = U8TO32(block + 44); + m12 = U8TO32(block + 48); + m13 = U8TO32(block + 52); + m14 = U8TO32(block + 56); + m15 = U8TO32(block + 60); + v0 = S->h[0]; + v1 = S->h[1]; + v2 = S->h[2]; + v3 = S->h[3]; + v4 = S->h[4]; + v5 = S->h[5]; + v6 = S->h[6]; + v7 = S->h[7]; + v8 = S->s[0] ^ 0x243F6A88; + v9 = S->s[1] ^ 0x85A308D3; + v10 = S->s[2] ^ 0x13198A2E; + v11 = S->s[3] ^ 0x03707344; + v12 = 0xA4093822; + v13 = 0x299F31D0; + v14 = 0x082EFA98; + v15 = 0xEC4E6C89; + if (S->nullt == 0) { + v12 ^= S->t[0]; + v13 ^= S->t[0]; + v14 ^= S->t[1]; + v15 ^= S->t[1]; + } + +#define ROUND(m0,c0,m1,c1,m2,c2,m3,c3,m4,c4,m5,c5,m6,c6,m7,c7,m8,c8,m9,c9,m10,c10,m11,c11,m12,c12,m13,c13,m14,c14,m15,c15) \ + v0 += m0 ^ c0; \ + v0 += v4; \ + v12 ^= v0; \ + v12 = BLAKE256_ROT( v12,16); \ + v8 += v12; \ + v4 ^= v8; \ + v4 = BLAKE256_ROT( v4,12); \ + v1 += m2 ^ c2; \ + v1 += v5; \ + v13 ^= v1; \ + v13 = BLAKE256_ROT( v13,16); \ + v9 += v13; \ + v5 ^= v9; \ + v5 = BLAKE256_ROT( v5,12); \ + v2 += m4 ^ c4; \ + v2 += v6; \ + v14 ^= v2; \ + v14 = BLAKE256_ROT( v14,16); \ + v10 += v14; \ + v6 ^= v10; \ + v6 = BLAKE256_ROT( v6,12); \ + v3 += m6 ^ c6; \ + v3 += v7; \ + v15 ^= v3; \ + v15 = BLAKE256_ROT( v15,16); \ + v11 += v15; \ + v7 ^= v11; \ + v7 = BLAKE256_ROT( v7,12); \ + v2 += m5 ^ c5; \ + v2 += v6; \ + v14 ^= v2; \ + v14 = BLAKE256_ROT( v14, 8); \ + v10 += v14; \ + v6 ^= v10; \ + v6 = BLAKE256_ROT( v6, 7); \ + v3 += m7 ^ c7; \ + v3 += v7; \ + v15 ^= v3; \ + v15 = BLAKE256_ROT( v15, 8); \ + v11 += v15; \ + v7 ^= v11; \ + v7 = BLAKE256_ROT( v7, 7); \ + v1 += m3 ^ c3; \ + v1 += v5; \ + v13 ^= v1; \ + v13 = BLAKE256_ROT( v13, 8); \ + v9 += v13; \ + v5 ^= v9; \ + v5 = BLAKE256_ROT( v5, 7); \ + v0 += m1 ^ c1; \ + v0 += v4; \ + v12 ^= v0; \ + v12 = BLAKE256_ROT( v12, 8); \ + v8 += v12; \ + v4 ^= v8; \ + v4 = BLAKE256_ROT( v4, 7); \ + v0 += m8 ^ c8; \ + v0 += v5; \ + v15 ^= v0; \ + v15 = BLAKE256_ROT( v15,16); \ + v10 += v15; \ + v5 ^= v10; \ + v5 = BLAKE256_ROT( v5,12); \ + v1 += m10 ^ c10; \ + v1 += v6; \ + v12 ^= v1; \ + v12 = BLAKE256_ROT( v12,16); \ + v11 += v12; \ + v6 ^= v11; \ + v6 = BLAKE256_ROT( v6,12); \ + v2 += m12 ^ c12; \ + v2 += v7; \ + v13 ^= v2; \ + v13 = BLAKE256_ROT( v13,16); \ + v8 += v13; \ + v7 ^= v8; \ + v7 = BLAKE256_ROT( v7,12); \ + v3 += m14 ^ c14; \ + v3 += v4; \ + v14 ^= v3; \ + v14 = BLAKE256_ROT( v14,16); \ + v9 += v14; \ + v4 ^= v9; \ + v4 = BLAKE256_ROT( v4,12); \ + v2 += m13 ^ c13; \ + v2 += v7; \ + v13 ^= v2; \ + v13 = BLAKE256_ROT( v13, 8); \ + v8 += v13; \ + v7 ^= v8; \ + v7 = BLAKE256_ROT( v7, 7); \ + v3 += m15 ^ c15; \ + v3 += v4; \ + v14 ^= v3; \ + v14 = BLAKE256_ROT( v14, 8); \ + v9 += v14; \ + v4 ^= v9; \ + v4 = BLAKE256_ROT( v4, 7); \ + v1 += m11 ^ c11; \ + v1 += v6; \ + v12 ^= v1; \ + v12 = BLAKE256_ROT( v12, 8); \ + v11 += v12; \ + v6 ^= v11; \ + v6 = BLAKE256_ROT( v6, 7); \ + v0 += m9 ^ c9; \ + v0 += v5; \ + v15 ^= v0; \ + v15 = BLAKE256_ROT( v15, 8); \ + v10 += v15; \ + v5 ^= v10; \ + v5 = BLAKE256_ROT( v5, 7); \ + + ROUND(m0,cst[1],m1,cst[0],m2,cst[3],m3,cst[2],m4,cst[5],m5,cst[4],m6,cst[7],m7,cst[6],m8,cst[9],m9,cst[8],m10,cst[11],m11,cst[10],m12,cst[13],m13,cst[12],m14,cst[15],m15,cst[14]) + ROUND(m14,cst[10],m10,cst[14],m4,cst[8],m8,cst[4],m9,cst[15],m15,cst[9],m13,cst[6],m6,cst[13],m1,cst[12],m12,cst[1],m0,cst[2],m2,cst[0],m11,cst[7],m7,cst[11],m5,cst[3],m3,cst[5]) + ROUND(m11,cst[8],m8,cst[11],m12,cst[0],m0,cst[12],m5,cst[2],m2,cst[5],m15,cst[13],m13,cst[15],m10,cst[14],m14,cst[10],m3,cst[6],m6,cst[3],m7,cst[1],m1,cst[7],m9,cst[4],m4,cst[9]) + ROUND(m7,cst[9],m9,cst[7],m3,cst[1],m1,cst[3],m13,cst[12],m12,cst[13],m11,cst[14],m14,cst[11],m2,cst[6],m6,cst[2],m5,cst[10],m10,cst[5],m4,cst[0],m0,cst[4],m15,cst[8],m8,cst[15]) + ROUND(m9,cst[0],m0,cst[9],m5,cst[7],m7,cst[5],m2,cst[4],m4,cst[2],m10,cst[15],m15,cst[10],m14,cst[1],m1,cst[14],m11,cst[12],m12,cst[11],m6,cst[8],m8,cst[6],m3,cst[13],m13,cst[3]) + ROUND(m2,cst[12],m12,cst[2],m6,cst[10],m10,cst[6],m0,cst[11],m11,cst[0],m8,cst[3],m3,cst[8],m4,cst[13],m13,cst[4],m7,cst[5],m5,cst[7],m15,cst[14],m14,cst[15],m1,cst[9],m9,cst[1]) + ROUND(m12,cst[5],m5,cst[12],m1,cst[15],m15,cst[1],m14,cst[13],m13,cst[14],m4,cst[10],m10,cst[4],m0,cst[7],m7,cst[0],m6,cst[3],m3,cst[6],m9,cst[2],m2,cst[9],m8,cst[11],m11,cst[8]) + ROUND(m13,cst[11],m11,cst[13],m7,cst[14],m14,cst[7],m12,cst[1],m1,cst[12],m3,cst[9],m9,cst[3],m5,cst[0],m0,cst[5],m15,cst[4],m4,cst[15],m8,cst[6],m6,cst[8],m2,cst[10],m10,cst[2]) + ROUND(m6,cst[15],m15,cst[6],m14,cst[9],m9,cst[14],m11,cst[3],m3,cst[11],m0,cst[8],m8,cst[0],m12,cst[2],m2,cst[12],m13,cst[7],m7,cst[13],m1,cst[4],m4,cst[1],m10,cst[5],m5,cst[10]) + ROUND(m10,cst[2],m2,cst[10],m8,cst[4],m4,cst[8],m7,cst[6],m6,cst[7],m1,cst[5],m5,cst[1],m15,cst[11],m11,cst[15],m9,cst[14],m14,cst[9],m3,cst[12],m12,cst[3],m13,cst[0],m0,cst[13]) + ROUND(m0,cst[1],m1,cst[0],m2,cst[3],m3,cst[2],m4,cst[5],m5,cst[4],m6,cst[7],m7,cst[6],m8,cst[9],m9,cst[8],m10,cst[11],m11,cst[10],m12,cst[13],m13,cst[12],m14,cst[15],m15,cst[14]) + ROUND(m14,cst[10],m10,cst[14],m4,cst[8],m8,cst[4],m9,cst[15],m15,cst[9],m13,cst[6],m6,cst[13],m1,cst[12],m12,cst[1],m0,cst[2],m2,cst[0],m11,cst[7],m7,cst[11],m5,cst[3],m3,cst[5]) + ROUND(m11,cst[8],m8,cst[11],m12,cst[0],m0,cst[12],m5,cst[2],m2,cst[5],m15,cst[13],m13,cst[15],m10,cst[14],m14,cst[10],m3,cst[6],m6,cst[3],m7,cst[1],m1,cst[7],m9,cst[4],m4,cst[9]) + ROUND(m7,cst[9],m9,cst[7],m3,cst[1],m1,cst[3],m13,cst[12],m12,cst[13],m11,cst[14],m14,cst[11],m2,cst[6],m6,cst[2],m5,cst[10],m10,cst[5],m4,cst[0],m0,cst[4],m15,cst[8],m8,cst[15]) + + v0 ^= v8; + v1 ^= v9; + v2 ^= v10; + v3 ^= v11; + v4 ^= v12; + v5 ^= v13; + v6 ^= v14; + v7 ^= v15; + + v0 ^= S->s[0]; + v1 ^= S->s[1]; + v2 ^= S->s[2]; + v3 ^= S->s[3]; + v4 ^= S->s[0]; + v5 ^= S->s[1]; + v6 ^= S->s[2]; + v7 ^= S->s[3]; + + S->h[0] ^= v0; + S->h[1] ^= v1; + S->h[2] ^= v2; + S->h[3] ^= v3; + S->h[4] ^= v4; + S->h[5] ^= v5; + S->h[6] ^= v6; + S->h[7] ^= v7; +} + + +void blake256_init( blakestate256 *S ) { + + S->h[0]=0x6A09E667; + S->h[1]=0xBB67AE85; + S->h[2]=0x3C6EF372; + S->h[3]=0xA54FF53A; + S->h[4]=0x510E527F; + S->h[5]=0x9B05688C; + S->h[6]=0x1F83D9AB; + S->h[7]=0x5BE0CD19; + S->t[0]=S->t[1]=S->buflen=S->nullt=0; + S->s[0]=S->s[1]=S->s[2]=S->s[3] =0; +} + + +void blake256_update( blakestate256 *S, const u8 *data, u64 datalen ) { + + int left=S->buflen >> 3; + int fill=64 - left; + + if( left && ( ((datalen >> 3) & 0x3F) >= fill ) ) { + memcpy( (void*) (S->buf + left), (void*) data, fill ); + S->t[0] += 512; + if (S->t[0] == 0) S->t[1]++; + blake256_compress( S, S->buf ); + data += fill; + datalen -= (fill << 3); + left = 0; + } + + while( datalen >= 512 ) { + S->t[0] += 512; + if (S->t[0] == 0) S->t[1]++; + blake256_compress( S, data ); + data += 64; + datalen -= 512; + } + + if( datalen > 0 ) { + memcpy( (void*) (S->buf + left), (void*) data, datalen>>3 ); + S->buflen = (left<<3) + datalen; + } + else S->buflen=0; +} + + +void blake256_final( blakestate256 *S, u8 *digest ) { + + u8 msglen[8], zo=0x01, oo=0x81; + u32 lo=S->t[0] + S->buflen, hi=S->t[1]; + if ( lo < S->buflen ) hi++; + U32TO8( msglen + 0, hi ); + U32TO8( msglen + 4, lo ); + + if ( S->buflen == 440 ) { /* one padding byte */ + S->t[0] -= 8; + blake256_update( S, &oo, 8 ); + } + else { + if ( S->buflen < 440 ) { /* enough space to fill the block */ + if ( !S->buflen ) S->nullt=1; + S->t[0] -= 440 - S->buflen; + blake256_update( S, padding, 440 - S->buflen ); + } + else { /* need 2 compressions */ + S->t[0] -= 512 - S->buflen; + blake256_update( S, padding, 512 - S->buflen ); + S->t[0] -= 440; + blake256_update( S, padding+1, 440 ); + S->nullt = 1; + } + blake256_update( S, &zo, 8 ); + S->t[0] -= 8; + } + S->t[0] -= 64; + blake256_update( S, msglen, 64 ); + + U32TO8( digest + 0, S->h[0]); + U32TO8( digest + 4, S->h[1]); + U32TO8( digest + 8, S->h[2]); + U32TO8( digest +12, S->h[3]); + U32TO8( digest +16, S->h[4]); + U32TO8( digest +20, S->h[5]); + U32TO8( digest +24, S->h[6]); + U32TO8( digest +28, S->h[7]); +} + +void blake256_mgf1(unsigned char *out, unsigned long outlen, + const unsigned char *in, unsigned long inlen) +{ + SPX_VLA(uint8_t, inbuf, inlen+4); + unsigned char outbuf[SPX_BLAKE256_OUTPUT_BYTES]; + unsigned long i; + + memcpy(inbuf, in, inlen); + + /* While we can fit in at least another full block of BLAKE256 output.. */ + for (i = 0; (i+1)*SPX_BLAKE256_OUTPUT_BYTES <= outlen; i++) { + u32_to_bytes(inbuf + inlen, i); + blake256(out, inbuf, inlen + 4); + out += SPX_BLAKE256_OUTPUT_BYTES; + } + /* Until we cannot anymore, and we fill the remainder. */ + if (outlen > i*SPX_BLAKE256_OUTPUT_BYTES) { + u32_to_bytes(inbuf + inlen, i); + blake256(outbuf, inbuf, inlen + 4); + memcpy(out, outbuf, outlen - i*SPX_BLAKE256_OUTPUT_BYTES); + } +} + +int blake256( unsigned char *out, const unsigned char *in, unsigned long long inlen ) +{ + blakestate256 S; + blake256_init( &S ); + blake256_update( &S, in, inlen*8 ); + blake256_final( &S, out ); + return 0; +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/blake512.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/blake512.c new file mode 100644 index 0000000..2983a72 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/blake512.c @@ -0,0 +1,409 @@ +// supercop-20140525/crypto_hash/blake512/sandy + +/* + BLAKE reference C implementation + + Copyright (c) 2012 Jean-Philippe Aumasson + + To the extent possible under law, the author(s) have dedicated all copyright + and related and neighboring rights to this software to the public domain + worldwide. This software is distributed without any warranty. + + You should have received a copy of the CC0 Public Domain Dedication along + with this software. If not, see + . + */ + +#include +#include + +#include "../../../app/include/utils.h" + +#include "../include/blake.h" + +typedef unsigned long long crypto_uint64; +typedef unsigned int crypto_uint32; +typedef unsigned char crypto_uint8; + +typedef crypto_uint64 u64; +typedef crypto_uint32 u32; +typedef crypto_uint8 u8; + +#define U8TO32(p) \ + (((uint32_t)((p)[0]) << 24) | ((uint32_t)((p)[1]) << 16) | \ + ((uint32_t)((p)[2]) << 8) | ((uint32_t)((p)[3]) )) +#define U32TO8(p, v) \ + (p)[0] = (uint8_t)((v) >> 24); \ + (p)[1] = (uint8_t)((v) >> 16); \ + (p)[2] = (uint8_t)((v) >> 8); \ + (p)[3] = (uint8_t)((v)); +#define U8TO64(p) (((uint64_t)U8TO32(p) << 32) | ((uint64_t)U8TO32(p + 4))) +#define U64TO8(p, v) \ + U32TO8((p), (uint32_t)((v) >> 32)); \ + U32TO8((p) + 4, (uint32_t)((v) )); + +const u64 cst[16] = { + 0x243F6A8885A308D3ULL,0x13198A2E03707344ULL,0xA4093822299F31D0ULL,0x082EFA98EC4E6C89ULL, + 0x452821E638D01377ULL,0xBE5466CF34E90C6CULL,0xC0AC29B7C97C50DDULL,0x3F84D5B5B5470917ULL, + 0x9216D5D98979FB1BULL,0xD1310BA698DFB5ACULL,0x2FFD72DBD01ADFB7ULL,0xB8E1AFED6A267E96ULL, + 0xBA7C9045F12C7F99ULL,0x24A19947B3916CF7ULL,0x0801F2E2858EFC16ULL,0x636920D871574E69ULL +}; + +static const u8 padding[129] = +{0x80,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; + +#define BLAKE512_ROT(x,n) (((x)<<(64-n))|( (x)>>(n))) + +void blake512_compress( blakestate512 *S, const u8 *block ) +{ + u64 m0; + u64 m1; + u64 m2; + u64 m3; + u64 m4; + u64 m5; + u64 m6; + u64 m7; + u64 m8; + u64 m9; + u64 m10; + u64 m11; + u64 m12; + u64 m13; + u64 m14; + u64 m15; + u64 v0; + u64 v1; + u64 v2; + u64 v3; + u64 v4; + u64 v5; + u64 v6; + u64 v7; + u64 v8; + u64 v9; + u64 v10; + u64 v11; + u64 v12; + u64 v13; + u64 v14; + u64 v15; + + m0 = U8TO64(block + 0); + m1 = U8TO64(block + 8); + m2 = U8TO64(block + 16); + m3 = U8TO64(block + 24); + m4 = U8TO64(block + 32); + m5 = U8TO64(block + 40); + m6 = U8TO64(block + 48); + m7 = U8TO64(block + 56); + m8 = U8TO64(block + 64); + m9 = U8TO64(block + 72); + m10 = U8TO64(block + 80); + m11 = U8TO64(block + 88); + m12 = U8TO64(block + 96); + m13 = U8TO64(block + 104); + m14 = U8TO64(block + 112); + m15 = U8TO64(block + 120); + v0 = S->h[0]; + v1 = S->h[1]; + v2 = S->h[2]; + v3 = S->h[3]; + v4 = S->h[4]; + v5 = S->h[5]; + v6 = S->h[6]; + v7 = S->h[7]; + v8 = S->s[0] ^ 0x243F6A8885A308D3ULL; + v9 = S->s[1] ^ 0x13198A2E03707344ULL; + v10 = S->s[2] ^ 0xA4093822299F31D0ULL; + v11 = S->s[3] ^ 0x082EFA98EC4E6C89ULL; + v12 = 0x452821E638D01377ULL; + v13 = 0xBE5466CF34E90C6CULL; + v14 = 0xC0AC29B7C97C50DDULL; + v15 = 0x3F84D5B5B5470917ULL; + + if (S->nullt == 0) { + v12 ^= S->t[0]; + v13 ^= S->t[0]; + v14 ^= S->t[1]; + v15 ^= S->t[1]; + } + +#define ROUND(m0,c0,m1,c1,m2,c2,m3,c3,m4,c4,m5,c5,m6,c6,m7,c7,m8,c8,m9,c9,m10,c10,m11,c11,m12,c12,m13,c13,m14,c14,m15,c15) \ + v0 += m0 ^ c0; \ + v0 += v4; \ + v12 ^= v0; \ + v12 = BLAKE512_ROT( v12,32); \ + v8 += v12; \ + v4 ^= v8; \ + v4 = BLAKE512_ROT( v4,25); \ + v1 += m2 ^ c2; \ + v1 += v5; \ + v13 ^= v1; \ + v13 = BLAKE512_ROT( v13,32); \ + v9 += v13; \ + v5 ^= v9; \ + v5 = BLAKE512_ROT( v5,25); \ + v2 += m4 ^ c4; \ + v2 += v6; \ + v14 ^= v2; \ + v14 = BLAKE512_ROT( v14,32); \ + v10 += v14; \ + v6 ^= v10; \ + v6 = BLAKE512_ROT( v6,25); \ + v3 += m6 ^ c6; \ + v3 += v7; \ + v15 ^= v3; \ + v15 = BLAKE512_ROT( v15,32); \ + v11 += v15; \ + v7 ^= v11; \ + v7 = BLAKE512_ROT( v7,25); \ + v2 += m5 ^ c5; \ + v2 += v6; \ + v14 ^= v2; \ + v14 = BLAKE512_ROT( v14,16); \ + v10 += v14; \ + v6 ^= v10; \ + v6 = BLAKE512_ROT( v6,11); \ + v3 += m7 ^ c7; \ + v3 += v7; \ + v15 ^= v3; \ + v15 = BLAKE512_ROT( v15,16); \ + v11 += v15; \ + v7 ^= v11; \ + v7 = BLAKE512_ROT( v7,11); \ + v1 += m3 ^ c3; \ + v1 += v5; \ + v13 ^= v1; \ + v13 = BLAKE512_ROT( v13,16); \ + v9 += v13; \ + v5 ^= v9; \ + v5 = BLAKE512_ROT( v5,11); \ + v0 += m1 ^ c1; \ + v0 += v4; \ + v12 ^= v0; \ + v12 = BLAKE512_ROT( v12,16); \ + v8 += v12; \ + v4 ^= v8; \ + v4 = BLAKE512_ROT( v4,11); \ + v0 += m8 ^ c8; \ + v0 += v5; \ + v15 ^= v0; \ + v15 = BLAKE512_ROT( v15,32); \ + v10 += v15; \ + v5 ^= v10; \ + v5 = BLAKE512_ROT( v5,25); \ + v1 += m10 ^ c10; \ + v1 += v6; \ + v12 ^= v1; \ + v12 = BLAKE512_ROT( v12,32); \ + v11 += v12; \ + v6 ^= v11; \ + v6 = BLAKE512_ROT( v6,25); \ + v2 += m12 ^ c12; \ + v2 += v7; \ + v13 ^= v2; \ + v13 = BLAKE512_ROT( v13,32); \ + v8 += v13; \ + v7 ^= v8; \ + v7 = BLAKE512_ROT( v7,25); \ + v3 += m14 ^ c14; \ + v3 += v4; \ + v14 ^= v3; \ + v14 = BLAKE512_ROT( v14,32); \ + v9 += v14; \ + v4 ^= v9; \ + v4 = BLAKE512_ROT( v4,25); \ + v2 += m13 ^ c13; \ + v2 += v7; \ + v13 ^= v2; \ + v13 = BLAKE512_ROT( v13,16); \ + v8 += v13; \ + v7 ^= v8; \ + v7 = BLAKE512_ROT( v7,11); \ + v3 += m15 ^ c15; \ + v3 += v4; \ + v14 ^= v3; \ + v14 = BLAKE512_ROT( v14,16); \ + v9 += v14; \ + v4 ^= v9; \ + v4 = BLAKE512_ROT( v4,11); \ + v1 += m11 ^ c11; \ + v1 += v6; \ + v12 ^= v1; \ + v12 = BLAKE512_ROT( v12,16); \ + v11 += v12; \ + v6 ^= v11; \ + v6 = BLAKE512_ROT( v6,11); \ + v0 += m9 ^ c9; \ + v0 += v5; \ + v15 ^= v0; \ + v15 = BLAKE512_ROT( v15,16); \ + v10 += v15; \ + v5 ^= v10; \ + v5 = BLAKE512_ROT( v5,11); \ + + ROUND(m0,cst[1],m1,cst[0],m2,cst[3],m3,cst[2],m4,cst[5],m5,cst[4],m6,cst[7],m7,cst[6],m8,cst[9],m9,cst[8],m10,cst[11],m11,cst[10],m12,cst[13],m13,cst[12],m14,cst[15],m15,cst[14]) + ROUND(m14,cst[10],m10,cst[14],m4,cst[8],m8,cst[4],m9,cst[15],m15,cst[9],m13,cst[6],m6,cst[13],m1,cst[12],m12,cst[1],m0,cst[2],m2,cst[0],m11,cst[7],m7,cst[11],m5,cst[3],m3,cst[5]) + ROUND(m11,cst[8],m8,cst[11],m12,cst[0],m0,cst[12],m5,cst[2],m2,cst[5],m15,cst[13],m13,cst[15],m10,cst[14],m14,cst[10],m3,cst[6],m6,cst[3],m7,cst[1],m1,cst[7],m9,cst[4],m4,cst[9]) + ROUND(m7,cst[9],m9,cst[7],m3,cst[1],m1,cst[3],m13,cst[12],m12,cst[13],m11,cst[14],m14,cst[11],m2,cst[6],m6,cst[2],m5,cst[10],m10,cst[5],m4,cst[0],m0,cst[4],m15,cst[8],m8,cst[15]) + ROUND(m9,cst[0],m0,cst[9],m5,cst[7],m7,cst[5],m2,cst[4],m4,cst[2],m10,cst[15],m15,cst[10],m14,cst[1],m1,cst[14],m11,cst[12],m12,cst[11],m6,cst[8],m8,cst[6],m3,cst[13],m13,cst[3]) + ROUND(m2,cst[12],m12,cst[2],m6,cst[10],m10,cst[6],m0,cst[11],m11,cst[0],m8,cst[3],m3,cst[8],m4,cst[13],m13,cst[4],m7,cst[5],m5,cst[7],m15,cst[14],m14,cst[15],m1,cst[9],m9,cst[1]) + ROUND(m12,cst[5],m5,cst[12],m1,cst[15],m15,cst[1],m14,cst[13],m13,cst[14],m4,cst[10],m10,cst[4],m0,cst[7],m7,cst[0],m6,cst[3],m3,cst[6],m9,cst[2],m2,cst[9],m8,cst[11],m11,cst[8]) + ROUND(m13,cst[11],m11,cst[13],m7,cst[14],m14,cst[7],m12,cst[1],m1,cst[12],m3,cst[9],m9,cst[3],m5,cst[0],m0,cst[5],m15,cst[4],m4,cst[15],m8,cst[6],m6,cst[8],m2,cst[10],m10,cst[2]) + ROUND(m6,cst[15],m15,cst[6],m14,cst[9],m9,cst[14],m11,cst[3],m3,cst[11],m0,cst[8],m8,cst[0],m12,cst[2],m2,cst[12],m13,cst[7],m7,cst[13],m1,cst[4],m4,cst[1],m10,cst[5],m5,cst[10]) + ROUND(m10,cst[2],m2,cst[10],m8,cst[4],m4,cst[8],m7,cst[6],m6,cst[7],m1,cst[5],m5,cst[1],m15,cst[11],m11,cst[15],m9,cst[14],m14,cst[9],m3,cst[12],m12,cst[3],m13,cst[0],m0,cst[13]) + ROUND(m0,cst[1],m1,cst[0],m2,cst[3],m3,cst[2],m4,cst[5],m5,cst[4],m6,cst[7],m7,cst[6],m8,cst[9],m9,cst[8],m10,cst[11],m11,cst[10],m12,cst[13],m13,cst[12],m14,cst[15],m15,cst[14]) + ROUND(m14,cst[10],m10,cst[14],m4,cst[8],m8,cst[4],m9,cst[15],m15,cst[9],m13,cst[6],m6,cst[13],m1,cst[12],m12,cst[1],m0,cst[2],m2,cst[0],m11,cst[7],m7,cst[11],m5,cst[3],m3,cst[5]) + ROUND(m11,cst[8],m8,cst[11],m12,cst[0],m0,cst[12],m5,cst[2],m2,cst[5],m15,cst[13],m13,cst[15],m10,cst[14],m14,cst[10],m3,cst[6],m6,cst[3],m7,cst[1],m1,cst[7],m9,cst[4],m4,cst[9]) + ROUND(m7,cst[9],m9,cst[7],m3,cst[1],m1,cst[3],m13,cst[12],m12,cst[13],m11,cst[14],m14,cst[11],m2,cst[6],m6,cst[2],m5,cst[10],m10,cst[5],m4,cst[0],m0,cst[4],m15,cst[8],m8,cst[15]) + ROUND(m9,cst[0],m0,cst[9],m5,cst[7],m7,cst[5],m2,cst[4],m4,cst[2],m10,cst[15],m15,cst[10],m14,cst[1],m1,cst[14],m11,cst[12],m12,cst[11],m6,cst[8],m8,cst[6],m3,cst[13],m13,cst[3]) + ROUND(m2,cst[12],m12,cst[2],m6,cst[10],m10,cst[6],m0,cst[11],m11,cst[0],m8,cst[3],m3,cst[8],m4,cst[13],m13,cst[4],m7,cst[5],m5,cst[7],m15,cst[14],m14,cst[15],m1,cst[9],m9,cst[1]) + + v0 ^= v8; + v1 ^= v9; + v2 ^= v10; + v3 ^= v11; + v4 ^= v12; + v5 ^= v13; + v6 ^= v14; + v7 ^= v15; + + v0 ^= S->s[0]; + v1 ^= S->s[1]; + v2 ^= S->s[2]; + v3 ^= S->s[3]; + v4 ^= S->s[0]; + v5 ^= S->s[1]; + v6 ^= S->s[2]; + v7 ^= S->s[3]; + + S->h[0] ^= v0; + S->h[1] ^= v1; + S->h[2] ^= v2; + S->h[3] ^= v3; + S->h[4] ^= v4; + S->h[5] ^= v5; + S->h[6] ^= v6; + S->h[7] ^= v7; +} + + +void blake512_init( blakestate512 *S ) { + S->h[0]=0x6A09E667F3BCC908ULL; + S->h[1]=0xBB67AE8584CAA73BULL; + S->h[2]=0x3C6EF372FE94F82BULL; + S->h[3]=0xA54FF53A5F1D36F1ULL; + S->h[4]=0x510E527FADE682D1ULL; + S->h[5]=0x9B05688C2B3E6C1FULL; + S->h[6]=0x1F83D9ABFB41BD6BULL; + S->h[7]=0x5BE0CD19137E2179ULL; + S->t[0]=S->t[1]=S->buflen=S->nullt=0; + S->s[0]=S->s[1]=S->s[2]=S->s[3] =0; +} + + +void blake512_update( blakestate512 * S, const u8 * data, u64 datalen ) { + + int left = (S->buflen >> 3); + int fill = 128 - left; + + if( left && ( ((datalen >> 3) & 0x7F) >= fill ) ) { + memcpy( (void *) (S->buf + left), (void *) data, fill ); + S->t[0] += 1024; + blake512_compress( S, S->buf ); + data += fill; + datalen -= (fill << 3); + left = 0; + } + + while( datalen >= 1024 ) { + S->t[0] += 1024; + blake512_compress( S, data ); + data += 128; + datalen -= 1024; + } + + if( datalen > 0 ) { + memcpy( (void *) (S->buf + left), (void *) data, ( datalen>>3 ) & 0x7F ); + S->buflen = (left<<3) + datalen; + } + else S->buflen=0; +} + + +void blake512_final( blakestate512 * S, u8 * digest ) { + + u8 msglen[16], zo=0x01,oo=0x81; + u64 lo=S->t[0] + S->buflen, hi = S->t[1]; + if ( lo < S->buflen ) hi++; + U64TO8( msglen + 0, hi ); + U64TO8( msglen + 8, lo ); + + if ( S->buflen == 888 ) { /* one padding byte */ + S->t[0] -= 8; + blake512_update( S, &oo, 8 ); + } + else { + if ( S->buflen < 888 ) { /* enough space to fill the block */ + if ( S->buflen == 0 ) S->nullt=1; + S->t[0] -= 888 - S->buflen; + blake512_update( S, padding, 888 - S->buflen ); + } + else { /* NOT enough space, need 2 compressions */ + S->t[0] -= 1024 - S->buflen; + blake512_update( S, padding, 1024 - S->buflen ); + S->t[0] -= 888; + blake512_update( S, padding+1, 888 ); + S->nullt = 1; + } + blake512_update( S, &zo, 8 ); + S->t[0] -= 8; + } + S->t[0] -= 128; + blake512_update( S, msglen, 128 ); + + U64TO8( digest + 0, S->h[0]); + U64TO8( digest + 8, S->h[1]); + U64TO8( digest +16, S->h[2]); + U64TO8( digest +24, S->h[3]); + U64TO8( digest +32, S->h[4]); + U64TO8( digest +40, S->h[5]); + U64TO8( digest +48, S->h[6]); + U64TO8( digest +56, S->h[7]); +} + +void blake512_mgf1(unsigned char *out, unsigned long outlen, + const unsigned char *in, unsigned long inlen) +{ + SPX_VLA(uint8_t, inbuf, inlen+4); + unsigned char outbuf[SPX_BLAKE512_OUTPUT_BYTES]; + unsigned long i; + + memcpy(inbuf, in, inlen); + + /* While we can fit in at least another full block of BLAKE512 output.. */ + for (i = 0; (i+1)*SPX_BLAKE512_OUTPUT_BYTES <= outlen; i++) { + u32_to_bytes(inbuf + inlen, i); + blake512(out, inbuf, inlen + 4); + out += SPX_BLAKE512_OUTPUT_BYTES; + } + /* Until we cannot anymore, and we fill the remainder. */ + if (outlen > i*SPX_BLAKE512_OUTPUT_BYTES) { + u32_to_bytes(inbuf + inlen, i); + blake512(outbuf, inbuf, inlen + 4); + memcpy(out, outbuf, outlen - i*SPX_BLAKE512_OUTPUT_BYTES); + } +} + +int blake512( unsigned char *out, const unsigned char *in, unsigned long long inlen ) { + + blakestate512 S; + blake512_init( &S ); + blake512_update( &S, in, inlen*8 ); + blake512_final( &S, out ); + return 0; +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/hash_blake.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/hash_blake.c new file mode 100644 index 0000000..5df5960 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/hash_blake.c @@ -0,0 +1,123 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/hash.h" +#include "../../../app/include/params.h" +#include "../../../app/include/utils.h" + +#include "../include/blake.h" + +#if SPX_N >= 24 +#define SPX_BLAKEX_OUTPUT_BYTES SPX_BLAKE512_OUTPUT_BYTES +#define blakeX blake512 +#define blakestateX blakestate512 +#define blakeX_init blake512_init +#define blakeX_update blake512_update +#define blakeX_final blake512_final +#define blakeX_mgf1 blake512_mgf1 +#else +#define SPX_BLAKEX_OUTPUT_BYTES SPX_BLAKE256_OUTPUT_BYTES +#define blakeX blake256 +#define blakestateX blakestate256 +#define blakeX_init blake256_init +#define blakeX_update blake256_update +#define blakeX_final blake256_final +#define blakeX_mgf1 blake256_mgf1 +#endif + +void initialize_hash_function(spx_ctx *ctx) +{ + (void)ctx; +} + +/** + * Computes PRF(key, addr), given a secret key of SPX_N bytes and an address + */ +void prf_addr(unsigned char *out, const spx_ctx *ctx, + const uint32_t addr[8]) +{ + unsigned char buf[2*SPX_N + SPX_ADDR_BYTES] = {0}; + unsigned char outbuf[SPX_BLAKE256_OUTPUT_BYTES] = {0}; + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_ADDR_BYTES); + memcpy(buf + SPX_N + SPX_ADDR_BYTES, ctx->sk_seed, SPX_N); + + blake256(outbuf, buf, SPX_N + SPX_ADDR_BYTES); + + memcpy(out, outbuf, SPX_N); +} + +/** + * Computes the message-dependent randomness R, using a secret seed and an + * optional randomization value as well as the message. + */ +void gen_message_random(unsigned char *R, const unsigned char *sk_prf, + const unsigned char *optrand, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx) +{ + (void)ctx; + blakestateX S; + + blakeX_init(&S); + blakeX_update(&S, sk_prf, SPX_N); + blakeX_update(&S, optrand, SPX_N); + blakeX_update(&S, m, mlen); + blakeX_final(&S, R); +} + +/** + * Computes the message hash using R, the public key, and the message. + * Outputs the message digest and the index of the leaf. The index is split in + * the tree index and the leaf index, for convenient copying to an address. + */ +void hash_message(unsigned char *digest, uint64_t *tree, uint32_t *leaf_idx, + const unsigned char *R, const unsigned char *pk, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx) +{ + (void)ctx; +#define SPX_TREE_BITS (SPX_TREE_HEIGHT * (SPX_D - 1)) +#define SPX_TREE_BYTES ((SPX_TREE_BITS + 7) / 8) +#define SPX_LEAF_BITS SPX_TREE_HEIGHT +#define SPX_LEAF_BYTES ((SPX_LEAF_BITS + 7) / 8) +#define SPX_DGST_BYTES (SPX_FORS_MSG_BYTES + SPX_TREE_BYTES + SPX_LEAF_BYTES) + + unsigned char buf[SPX_DGST_BYTES]; + unsigned char *bufp = buf; + unsigned char seed[2*SPX_N + SPX_BLAKEX_OUTPUT_BYTES]; + + blakestateX S; + blakeX_init(&S); + + blakeX_update(&S, R, SPX_N); + blakeX_update(&S, pk, SPX_PK_BYTES); + blakeX_update(&S, m, mlen); + + blakeX_final(&S, seed+ 2 * SPX_N); + + memcpy(seed, R, SPX_N); + memcpy(seed + SPX_N, pk, SPX_N); + + blakeX_mgf1(bufp, SPX_DGST_BYTES, seed, 2*SPX_N + SPX_BLAKEX_OUTPUT_BYTES); + + memcpy(digest, bufp, SPX_FORS_MSG_BYTES); + bufp += SPX_FORS_MSG_BYTES; + +#if SPX_TREE_BITS > 64 +#error For given height and depth, 64 bits cannot represent all subtrees +#endif + + if (SPX_D == 1) { + *tree = 0; + } else { + *tree = bytes_to_ull(bufp, SPX_TREE_BYTES); + *tree &= (~(uint64_t)0) >> (64 - SPX_TREE_BITS); + } + bufp += SPX_TREE_BYTES; + + *leaf_idx = (uint32_t)bytes_to_ull(bufp, SPX_LEAF_BYTES); + *leaf_idx &= (~(uint32_t)0) >> (32 - SPX_LEAF_BITS); +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/thash_blake_robust.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/thash_blake_robust.c new file mode 100644 index 0000000..477d206 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/thash_blake_robust.c @@ -0,0 +1,67 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/params.h" +#include "../../../app/include/thash.h" +#include "../../../app/include/utils.h" + +#include "../include/blake.h" + +#if SPX_BLAKE512 +static void thash_512(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]); +#endif + +/** + * Takes an array of inblocks concatenated arrays of SPX_N bytes. + */ +void thash(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ +#if SPX_BLAKE512 + if (inblocks > 1) { + thash_512(out, in, inblocks, ctx, addr); + return; + } +#endif + unsigned char outbuf[SPX_BLAKE256_OUTPUT_BYTES]; + SPX_VLA(uint8_t, bitmask, inblocks * SPX_N); + SPX_VLA(uint8_t, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); + unsigned int i; + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_ADDR_BYTES); + + blake256_mgf1(bitmask, inblocks * SPX_N, buf, SPX_N + SPX_ADDR_BYTES); + + for (i = 0; i < inblocks * SPX_N; i++) { + buf[SPX_N + SPX_ADDR_BYTES + i] = in[i] ^ bitmask[i]; + } + + blake256(outbuf, buf + SPX_N, SPX_ADDR_BYTES + inblocks*SPX_N); + memcpy(out, outbuf, SPX_N); +} + +#if SPX_BLAKE512 +static void thash_512(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ + unsigned char outbuf[SPX_BLAKE512_OUTPUT_BYTES]; + SPX_VLA(uint8_t, bitmask, inblocks * SPX_N); + SPX_VLA(uint8_t, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); + unsigned int i; + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_ADDR_BYTES); + + blake512_mgf1(bitmask, inblocks * SPX_N, buf, SPX_N + SPX_ADDR_BYTES); + + for (i = 0; i < inblocks * SPX_N; i++) { + buf[SPX_N + SPX_ADDR_BYTES + i] = in[i] ^ bitmask[i]; + } + + blake512(outbuf, buf + SPX_N, SPX_ADDR_BYTES + inblocks*SPX_N); + memcpy(out, outbuf, SPX_N); +} +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/thash_blake_simple.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/thash_blake_simple.c new file mode 100644 index 0000000..18636bc --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/blake/src/thash_blake_simple.c @@ -0,0 +1,53 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/params.h" +#include "../../../app/include/thash.h" +#include "../../../app/include/utils.h" + +#include "../include/blake.h" + +#if SPX_BLAKE512 +static void thash_512(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]); +#endif + +/** + * Takes an array of inblocks concatenated arrays of SPX_N bytes. + */ +void thash(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ +#if SPX_BLAKE512 + if (inblocks > 1) { + thash_512(out, in, inblocks, ctx, addr); + return; + } +#endif + unsigned char outbuf[SPX_BLAKE256_OUTPUT_BYTES]; + SPX_VLA(uint8_t, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_ADDR_BYTES); + memcpy(buf + SPX_N + SPX_ADDR_BYTES, in, inblocks * SPX_N); + + blake256(outbuf, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); + memcpy(out, outbuf, SPX_N); +} + +#if SPX_BLAKE512 +static void thash_512(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ + unsigned char outbuf[SPX_BLAKE512_OUTPUT_BYTES]; + SPX_VLA(uint8_t, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_ADDR_BYTES); + memcpy(buf + SPX_N + SPX_ADDR_BYTES, in, inblocks * SPX_N); + + blake512(outbuf, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); + memcpy(out, outbuf, SPX_N); +} +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/CMakeLists.txt b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/CMakeLists.txt new file mode 100644 index 0000000..f569a03 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/CMakeLists.txt @@ -0,0 +1,11 @@ +set(HARAKA_SOURCES + src/haraka.c + src/hash_haraka.c + src/thash_haraka_${THASH}.c +) + +set(PARAMS "sphincs-haraka-${SECPAR}") +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DPARAMS=${PARAMS}") + +add_library(haraka SHARED ${HARAKA_SOURCES}) +target_include_directories(haraka PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include) diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/include/haraka.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/include/haraka.h new file mode 100644 index 0000000..7478222 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/include/haraka.h @@ -0,0 +1,41 @@ +#ifndef SPX_HARAKA_H +#define SPX_HARAKA_H + +#include "../../../app/include/context.h" + +/* Tweak constants with seed */ +#define tweak_constants SPX_NAMESPACE(tweak_constants) +void tweak_constants(spx_ctx *ctx); + +/* Haraka Sponge */ +#define haraka_S_inc_init SPX_NAMESPACE(haraka_S_inc_init) +void haraka_S_inc_init(uint8_t *s_inc); +#define haraka_S_inc_absorb SPX_NAMESPACE(haraka_S_inc_absorb) +void haraka_S_inc_absorb(uint8_t *s_inc, const uint8_t *m, size_t mlen, + const spx_ctx *ctx); +#define haraka_S_inc_finalize SPX_NAMESPACE(haraka_S_inc_finalize) +void haraka_S_inc_finalize(uint8_t *s_inc); +#define haraka_S_inc_squeeze SPX_NAMESPACE(haraka_S_inc_squeeze) +void haraka_S_inc_squeeze(uint8_t *out, size_t outlen, uint8_t *s_inc, + const spx_ctx *ctx); +#define haraka_S SPX_NAMESPACE(haraka_S) +void haraka_S(unsigned char *out, unsigned long long outlen, + const unsigned char *in, unsigned long long inlen, + const spx_ctx *ctx); + +/* Applies the 512-bit Haraka permutation to in. */ +#define haraka512_perm SPX_NAMESPACE(haraka512_perm) +void haraka512_perm(unsigned char *out, const unsigned char *in, + const spx_ctx *ctx); + +/* Implementation of Haraka-512 */ +#define haraka512 SPX_NAMESPACE(haraka512) +void haraka512(unsigned char *out, const unsigned char *in, + const spx_ctx *ctx); + +/* Implementation of Haraka-256 */ +#define haraka256 SPX_NAMESPACE(haraka256) +void haraka256(unsigned char *out, const unsigned char *in, + const spx_ctx *ctx); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/include/haraka_offsets.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/include/haraka_offsets.h new file mode 100644 index 0000000..6afa5f8 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/include/haraka_offsets.h @@ -0,0 +1,20 @@ +#if !defined( HARAKA_OFFSETS_H_ ) +#define HARAKA_OFFSETS_H_ + +/* + * Offsets of various fields in the address structure when we use Haraka as + * the Sphincs+ hash function + */ + +#define SPX_OFFSET_LAYER 3 /* The byte used to specify the Merkle tree layer */ +#define SPX_OFFSET_TREE 8 /* The start of the 8 byte field used to specify the tree */ +#define SPX_OFFSET_TYPE 19 /* The byte used to specify the hash type (reason) */ +#define SPX_OFFSET_KP_ADDR 20 /* The start of the 4 byte field used to specify the key pair address */ +#define SPX_OFFSET_CHAIN_ADDR 27 /* The byte used to specify the chain address (which Winternitz chain) */ +#define SPX_OFFSET_HASH_ADDR 31 /* The byte used to specify the hash address (where in the Winternitz chain) */ +#define SPX_OFFSET_TREE_HGT 27 /* The byte used to specify the height of this node in the FORS or Merkle tree */ +#define SPX_OFFSET_TREE_INDEX 28 /* The start of the 4 byte field used to specify the node in the FORS or Merkle tree */ + +#define SPX_HARAKA 1 + +#endif /* HARAKA_OFFSETS_H_ */ diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/haraka.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/haraka.c new file mode 100644 index 0000000..718908b --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/haraka.c @@ -0,0 +1,965 @@ +/* + * Constant time implementation of the Haraka hash function. + * + * The bit-sliced implementation of the AES round functions are + * based on the AES implementation in BearSSL written + * by Thomas Pornin , licensed as follows: + * + * Copyright (c) 2016 Thomas Pornin + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS + * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN + * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#include +#include +#include +#include + +#include "../include/haraka.h" +#include "../../../app/include/utils.h" + +#define HARAKAS_RATE 32 + +static const uint64_t haraka512_rc64[10][8] = { + {0x24cf0ab9086f628b, 0xbdd6eeecc83b8382, 0xd96fb0306cdad0a7, 0xaace082ac8f95f89, 0x449d8e8870d7041f, 0x49bb2f80b2b3e2f8, 0x0569ae98d93bb258, 0x23dc9691e7d6a4b1}, + {0xd8ba10ede0fe5b6e, 0x7ecf7dbe424c7b8e, 0x6ea9949c6df62a31, 0xbf3f3c97ec9c313e, 0x241d03a196a1861e, 0xead3a51116e5a2ea, 0x77d479fcad9574e3, 0x18657a1af894b7a0}, + {0x10671e1a7f595522, 0xd9a00ff675d28c7b, 0x2f1edf0d2b9ba661, 0xb8ff58b8e3de45f9, 0xee29261da9865c02, 0xd1532aa4b50bdf43, 0x8bf858159b231bb1, 0xdf17439d22d4f599}, + {0xdd4b2f0870b918c0, 0x757a81f3b39b1bb6, 0x7a5c556898952e3f, 0x7dd70a16d915d87a, 0x3ae61971982b8301, 0xc3ab319e030412be, 0x17c0033ac094a8cb, 0x5a0630fc1a8dc4ef}, + {0x17708988c1632f73, 0xf92ddae090b44f4f, 0x11ac0285c43aa314, 0x509059941936b8ba, 0xd03e152fa2ce9b69, 0x3fbcbcb63a32998b, 0x6204696d692254f7, 0x915542ed93ec59b4}, + {0xf4ed94aa8879236e, 0xff6cb41cd38e03c0, 0x069b38602368aeab, 0x669495b820f0ddba, 0xf42013b1b8bf9e3d, 0xcf935efe6439734d, 0xbc1dcf42ca29e3f8, 0x7e6d3ed29f78ad67}, + {0xf3b0f6837ffcddaa, 0x3a76faef934ddf41, 0xcec7ae583a9c8e35, 0xe4dd18c68f0260af, 0x2c0e5df1ad398eaa, 0x478df5236ae22e8c, 0xfb944c46fe865f39, 0xaa48f82f028132ba}, + {0x231b9ae2b76aca77, 0x292a76a712db0b40, 0x5850625dc8134491, 0x73137dd469810fb5, 0x8a12a6a202a474fd, 0xd36fd9daa78bdb80, 0xb34c5e733505706f, 0xbaf1cdca818d9d96}, + {0x2e99781335e8c641, 0xbddfe5cce47d560e, 0xf74e9bf32e5e040c, 0x1d7a709d65996be9, 0x670df36a9cf66cdd, 0xd05ef84a176a2875, 0x0f888e828cb1c44e, 0x1a79e9c9727b052c}, + {0x83497348628d84de, 0x2e9387d51f22a754, 0xb000068da2f852d6, 0x378c9e1190fd6fe5, 0x870027c316de7293, 0xe51a9d4462e047bb, 0x90ecf7f8c6251195, 0x655953bfbed90a9c}, +}; + +static inline uint32_t br_dec32le(const unsigned char *src) +{ + return (uint32_t)src[0] + | ((uint32_t)src[1] << 8) + | ((uint32_t)src[2] << 16) + | ((uint32_t)src[3] << 24); +} + +static void br_range_dec32le(uint32_t *v, size_t num, const unsigned char *src) +{ + while (num-- > 0) { + *v ++ = br_dec32le(src); + src += 4; + } +} + +static inline void br_enc32le(unsigned char *dst, uint32_t x) +{ + dst[0] = (unsigned char)x; + dst[1] = (unsigned char)(x >> 8); + dst[2] = (unsigned char)(x >> 16); + dst[3] = (unsigned char)(x >> 24); +} + + +static void br_range_enc32le(unsigned char *dst, const uint32_t *v, size_t num) +{ + while (num-- > 0) { + br_enc32le(dst, *v ++); + dst += 4; + } +} + +static void br_aes_ct64_bitslice_Sbox(uint64_t *q) { + /* + * This S-box implementation is a straightforward translation of + * the circuit described by Boyar and Peralta in "A new + * combinational logic minimization technique with applications + * to cryptology" (https://eprint.iacr.org/2009/191.pdf). + * + * Note that variables x* (input) and s* (output) are numbered + * in "reverse" order (x0 is the high bit, x7 is the low bit). + */ + + uint64_t x0, x1, x2, x3, x4, x5, x6, x7; + uint64_t y1, y2, y3, y4, y5, y6, y7, y8, y9; + uint64_t y10, y11, y12, y13, y14, y15, y16, y17, y18, y19; + uint64_t y20, y21; + uint64_t z0, z1, z2, z3, z4, z5, z6, z7, z8, z9; + uint64_t z10, z11, z12, z13, z14, z15, z16, z17; + uint64_t t0, t1, t2, t3, t4, t5, t6, t7, t8, t9; + uint64_t t10, t11, t12, t13, t14, t15, t16, t17, t18, t19; + uint64_t t20, t21, t22, t23, t24, t25, t26, t27, t28, t29; + uint64_t t30, t31, t32, t33, t34, t35, t36, t37, t38, t39; + uint64_t t40, t41, t42, t43, t44, t45, t46, t47, t48, t49; + uint64_t t50, t51, t52, t53, t54, t55, t56, t57, t58, t59; + uint64_t t60, t61, t62, t63, t64, t65, t66, t67; + uint64_t s0, s1, s2, s3, s4, s5, s6, s7; + + x0 = q[7]; + x1 = q[6]; + x2 = q[5]; + x3 = q[4]; + x4 = q[3]; + x5 = q[2]; + x6 = q[1]; + x7 = q[0]; + + /* + * Top linear transformation. + */ + y14 = x3 ^ x5; + y13 = x0 ^ x6; + y9 = x0 ^ x3; + y8 = x0 ^ x5; + t0 = x1 ^ x2; + y1 = t0 ^ x7; + y4 = y1 ^ x3; + y12 = y13 ^ y14; + y2 = y1 ^ x0; + y5 = y1 ^ x6; + y3 = y5 ^ y8; + t1 = x4 ^ y12; + y15 = t1 ^ x5; + y20 = t1 ^ x1; + y6 = y15 ^ x7; + y10 = y15 ^ t0; + y11 = y20 ^ y9; + y7 = x7 ^ y11; + y17 = y10 ^ y11; + y19 = y10 ^ y8; + y16 = t0 ^ y11; + y21 = y13 ^ y16; + y18 = x0 ^ y16; + + /* + * Non-linear section. + */ + t2 = y12 & y15; + t3 = y3 & y6; + t4 = t3 ^ t2; + t5 = y4 & x7; + t6 = t5 ^ t2; + t7 = y13 & y16; + t8 = y5 & y1; + t9 = t8 ^ t7; + t10 = y2 & y7; + t11 = t10 ^ t7; + t12 = y9 & y11; + t13 = y14 & y17; + t14 = t13 ^ t12; + t15 = y8 & y10; + t16 = t15 ^ t12; + t17 = t4 ^ t14; + t18 = t6 ^ t16; + t19 = t9 ^ t14; + t20 = t11 ^ t16; + t21 = t17 ^ y20; + t22 = t18 ^ y19; + t23 = t19 ^ y21; + t24 = t20 ^ y18; + + t25 = t21 ^ t22; + t26 = t21 & t23; + t27 = t24 ^ t26; + t28 = t25 & t27; + t29 = t28 ^ t22; + t30 = t23 ^ t24; + t31 = t22 ^ t26; + t32 = t31 & t30; + t33 = t32 ^ t24; + t34 = t23 ^ t33; + t35 = t27 ^ t33; + t36 = t24 & t35; + t37 = t36 ^ t34; + t38 = t27 ^ t36; + t39 = t29 & t38; + t40 = t25 ^ t39; + + t41 = t40 ^ t37; + t42 = t29 ^ t33; + t43 = t29 ^ t40; + t44 = t33 ^ t37; + t45 = t42 ^ t41; + z0 = t44 & y15; + z1 = t37 & y6; + z2 = t33 & x7; + z3 = t43 & y16; + z4 = t40 & y1; + z5 = t29 & y7; + z6 = t42 & y11; + z7 = t45 & y17; + z8 = t41 & y10; + z9 = t44 & y12; + z10 = t37 & y3; + z11 = t33 & y4; + z12 = t43 & y13; + z13 = t40 & y5; + z14 = t29 & y2; + z15 = t42 & y9; + z16 = t45 & y14; + z17 = t41 & y8; + + /* + * Bottom linear transformation. + */ + t46 = z15 ^ z16; + t47 = z10 ^ z11; + t48 = z5 ^ z13; + t49 = z9 ^ z10; + t50 = z2 ^ z12; + t51 = z2 ^ z5; + t52 = z7 ^ z8; + t53 = z0 ^ z3; + t54 = z6 ^ z7; + t55 = z16 ^ z17; + t56 = z12 ^ t48; + t57 = t50 ^ t53; + t58 = z4 ^ t46; + t59 = z3 ^ t54; + t60 = t46 ^ t57; + t61 = z14 ^ t57; + t62 = t52 ^ t58; + t63 = t49 ^ t58; + t64 = z4 ^ t59; + t65 = t61 ^ t62; + t66 = z1 ^ t63; + s0 = t59 ^ t63; + s6 = t56 ^ ~t62; + s7 = t48 ^ ~t60; + t67 = t64 ^ t65; + s3 = t53 ^ t66; + s4 = t51 ^ t66; + s5 = t47 ^ t65; + s1 = t64 ^ ~s3; + s2 = t55 ^ ~t67; + + q[7] = s0; + q[6] = s1; + q[5] = s2; + q[4] = s3; + q[3] = s4; + q[2] = s5; + q[1] = s6; + q[0] = s7; +} + +static void br_aes_ct_bitslice_Sbox(uint32_t *q) +{ + /* + * This S-box implementation is a straightforward translation of + * the circuit described by Boyar and Peralta in "A new + * combinational logic minimization technique with applications + * to cryptology" (https://eprint.iacr.org/2009/191.pdf). + * + * Note that variables x* (input) and s* (output) are numbered + * in "reverse" order (x0 is the high bit, x7 is the low bit). + */ + + uint32_t x0, x1, x2, x3, x4, x5, x6, x7; + uint32_t y1, y2, y3, y4, y5, y6, y7, y8, y9; + uint32_t y10, y11, y12, y13, y14, y15, y16, y17, y18, y19; + uint32_t y20, y21; + uint32_t z0, z1, z2, z3, z4, z5, z6, z7, z8, z9; + uint32_t z10, z11, z12, z13, z14, z15, z16, z17; + uint32_t t0, t1, t2, t3, t4, t5, t6, t7, t8, t9; + uint32_t t10, t11, t12, t13, t14, t15, t16, t17, t18, t19; + uint32_t t20, t21, t22, t23, t24, t25, t26, t27, t28, t29; + uint32_t t30, t31, t32, t33, t34, t35, t36, t37, t38, t39; + uint32_t t40, t41, t42, t43, t44, t45, t46, t47, t48, t49; + uint32_t t50, t51, t52, t53, t54, t55, t56, t57, t58, t59; + uint32_t t60, t61, t62, t63, t64, t65, t66, t67; + uint32_t s0, s1, s2, s3, s4, s5, s6, s7; + + x0 = q[7]; + x1 = q[6]; + x2 = q[5]; + x3 = q[4]; + x4 = q[3]; + x5 = q[2]; + x6 = q[1]; + x7 = q[0]; + + /* + * Top linear transformation. + */ + y14 = x3 ^ x5; + y13 = x0 ^ x6; + y9 = x0 ^ x3; + y8 = x0 ^ x5; + t0 = x1 ^ x2; + y1 = t0 ^ x7; + y4 = y1 ^ x3; + y12 = y13 ^ y14; + y2 = y1 ^ x0; + y5 = y1 ^ x6; + y3 = y5 ^ y8; + t1 = x4 ^ y12; + y15 = t1 ^ x5; + y20 = t1 ^ x1; + y6 = y15 ^ x7; + y10 = y15 ^ t0; + y11 = y20 ^ y9; + y7 = x7 ^ y11; + y17 = y10 ^ y11; + y19 = y10 ^ y8; + y16 = t0 ^ y11; + y21 = y13 ^ y16; + y18 = x0 ^ y16; + + /* + * Non-linear section. + */ + t2 = y12 & y15; + t3 = y3 & y6; + t4 = t3 ^ t2; + t5 = y4 & x7; + t6 = t5 ^ t2; + t7 = y13 & y16; + t8 = y5 & y1; + t9 = t8 ^ t7; + t10 = y2 & y7; + t11 = t10 ^ t7; + t12 = y9 & y11; + t13 = y14 & y17; + t14 = t13 ^ t12; + t15 = y8 & y10; + t16 = t15 ^ t12; + t17 = t4 ^ t14; + t18 = t6 ^ t16; + t19 = t9 ^ t14; + t20 = t11 ^ t16; + t21 = t17 ^ y20; + t22 = t18 ^ y19; + t23 = t19 ^ y21; + t24 = t20 ^ y18; + + t25 = t21 ^ t22; + t26 = t21 & t23; + t27 = t24 ^ t26; + t28 = t25 & t27; + t29 = t28 ^ t22; + t30 = t23 ^ t24; + t31 = t22 ^ t26; + t32 = t31 & t30; + t33 = t32 ^ t24; + t34 = t23 ^ t33; + t35 = t27 ^ t33; + t36 = t24 & t35; + t37 = t36 ^ t34; + t38 = t27 ^ t36; + t39 = t29 & t38; + t40 = t25 ^ t39; + + t41 = t40 ^ t37; + t42 = t29 ^ t33; + t43 = t29 ^ t40; + t44 = t33 ^ t37; + t45 = t42 ^ t41; + z0 = t44 & y15; + z1 = t37 & y6; + z2 = t33 & x7; + z3 = t43 & y16; + z4 = t40 & y1; + z5 = t29 & y7; + z6 = t42 & y11; + z7 = t45 & y17; + z8 = t41 & y10; + z9 = t44 & y12; + z10 = t37 & y3; + z11 = t33 & y4; + z12 = t43 & y13; + z13 = t40 & y5; + z14 = t29 & y2; + z15 = t42 & y9; + z16 = t45 & y14; + z17 = t41 & y8; + + /* + * Bottom linear transformation. + */ + t46 = z15 ^ z16; + t47 = z10 ^ z11; + t48 = z5 ^ z13; + t49 = z9 ^ z10; + t50 = z2 ^ z12; + t51 = z2 ^ z5; + t52 = z7 ^ z8; + t53 = z0 ^ z3; + t54 = z6 ^ z7; + t55 = z16 ^ z17; + t56 = z12 ^ t48; + t57 = t50 ^ t53; + t58 = z4 ^ t46; + t59 = z3 ^ t54; + t60 = t46 ^ t57; + t61 = z14 ^ t57; + t62 = t52 ^ t58; + t63 = t49 ^ t58; + t64 = z4 ^ t59; + t65 = t61 ^ t62; + t66 = z1 ^ t63; + s0 = t59 ^ t63; + s6 = t56 ^ ~t62; + s7 = t48 ^ ~t60; + t67 = t64 ^ t65; + s3 = t53 ^ t66; + s4 = t51 ^ t66; + s5 = t47 ^ t65; + s1 = t64 ^ ~s3; + s2 = t55 ^ ~t67; + + q[7] = s0; + q[6] = s1; + q[5] = s2; + q[4] = s3; + q[3] = s4; + q[2] = s5; + q[1] = s6; + q[0] = s7; +} + +static void br_aes_ct_ortho(uint32_t *q) +{ +#define SWAPN_32(cl, ch, s, x, y) do { \ + uint32_t a, b; \ + a = (x); \ + b = (y); \ + (x) = (a & (uint32_t)cl) | ((b & (uint32_t)cl) << (s)); \ + (y) = ((a & (uint32_t)ch) >> (s)) | (b & (uint32_t)ch); \ + } while (0) + +#define SWAP2_32(x, y) SWAPN_32(0x55555555, 0xAAAAAAAA, 1, x, y) +#define SWAP4_32(x, y) SWAPN_32(0x33333333, 0xCCCCCCCC, 2, x, y) +#define SWAP8_32(x, y) SWAPN_32(0x0F0F0F0F, 0xF0F0F0F0, 4, x, y) + + SWAP2_32(q[0], q[1]); + SWAP2_32(q[2], q[3]); + SWAP2_32(q[4], q[5]); + SWAP2_32(q[6], q[7]); + + SWAP4_32(q[0], q[2]); + SWAP4_32(q[1], q[3]); + SWAP4_32(q[4], q[6]); + SWAP4_32(q[5], q[7]); + + SWAP8_32(q[0], q[4]); + SWAP8_32(q[1], q[5]); + SWAP8_32(q[2], q[6]); + SWAP8_32(q[3], q[7]); +} + +static inline void add_round_key32(uint32_t *q, const uint32_t *sk) +{ + q[0] ^= sk[0]; + q[1] ^= sk[1]; + q[2] ^= sk[2]; + q[3] ^= sk[3]; + q[4] ^= sk[4]; + q[5] ^= sk[5]; + q[6] ^= sk[6]; + q[7] ^= sk[7]; +} + +static inline void shift_rows32(uint32_t *q) +{ + int i; + + for (i = 0; i < 8; i++) { + uint32_t x; + + x = q[i]; + q[i] = (x & 0x000000FF) + | ((x & 0x0000FC00) >> 2) | ((x & 0x00000300) << 6) + | ((x & 0x00F00000) >> 4) | ((x & 0x000F0000) << 4) + | ((x & 0xC0000000) >> 6) | ((x & 0x3F000000) << 2); + } +} + +static inline uint32_t rotr16(uint32_t x) +{ + return (x << 16) | (x >> 16); +} + +static inline void mix_columns32(uint32_t *q) +{ + uint32_t q0, q1, q2, q3, q4, q5, q6, q7; + uint32_t r0, r1, r2, r3, r4, r5, r6, r7; + + q0 = q[0]; + q1 = q[1]; + q2 = q[2]; + q3 = q[3]; + q4 = q[4]; + q5 = q[5]; + q6 = q[6]; + q7 = q[7]; + r0 = (q0 >> 8) | (q0 << 24); + r1 = (q1 >> 8) | (q1 << 24); + r2 = (q2 >> 8) | (q2 << 24); + r3 = (q3 >> 8) | (q3 << 24); + r4 = (q4 >> 8) | (q4 << 24); + r5 = (q5 >> 8) | (q5 << 24); + r6 = (q6 >> 8) | (q6 << 24); + r7 = (q7 >> 8) | (q7 << 24); + + q[0] = q7 ^ r7 ^ r0 ^ rotr16(q0 ^ r0); + q[1] = q0 ^ r0 ^ q7 ^ r7 ^ r1 ^ rotr16(q1 ^ r1); + q[2] = q1 ^ r1 ^ r2 ^ rotr16(q2 ^ r2); + q[3] = q2 ^ r2 ^ q7 ^ r7 ^ r3 ^ rotr16(q3 ^ r3); + q[4] = q3 ^ r3 ^ q7 ^ r7 ^ r4 ^ rotr16(q4 ^ r4); + q[5] = q4 ^ r4 ^ r5 ^ rotr16(q5 ^ r5); + q[6] = q5 ^ r5 ^ r6 ^ rotr16(q6 ^ r6); + q[7] = q6 ^ r6 ^ r7 ^ rotr16(q7 ^ r7); +} + +static void br_aes_ct64_ortho(uint64_t *q) +{ +#define SWAPN(cl, ch, s, x, y) do { \ + uint64_t a, b; \ + a = (x); \ + b = (y); \ + (x) = (a & (uint64_t)(cl)) | ((b & (uint64_t)(cl)) << (s)); \ + (y) = ((a & (uint64_t)(ch)) >> (s)) | (b & (uint64_t)(ch)); \ + } while (0) + +#define SWAP2(x, y) SWAPN(0x5555555555555555, 0xAAAAAAAAAAAAAAAA, 1, x, y) +#define SWAP4(x, y) SWAPN(0x3333333333333333, 0xCCCCCCCCCCCCCCCC, 2, x, y) +#define SWAP8(x, y) SWAPN(0x0F0F0F0F0F0F0F0F, 0xF0F0F0F0F0F0F0F0, 4, x, y) + + SWAP2(q[0], q[1]); + SWAP2(q[2], q[3]); + SWAP2(q[4], q[5]); + SWAP2(q[6], q[7]); + + SWAP4(q[0], q[2]); + SWAP4(q[1], q[3]); + SWAP4(q[4], q[6]); + SWAP4(q[5], q[7]); + + SWAP8(q[0], q[4]); + SWAP8(q[1], q[5]); + SWAP8(q[2], q[6]); + SWAP8(q[3], q[7]); +} + + +static void br_aes_ct64_interleave_in(uint64_t *q0, uint64_t *q1, const uint32_t *w) +{ + uint64_t x0, x1, x2, x3; + + x0 = w[0]; + x1 = w[1]; + x2 = w[2]; + x3 = w[3]; + x0 |= (x0 << 16); + x1 |= (x1 << 16); + x2 |= (x2 << 16); + x3 |= (x3 << 16); + x0 &= (uint64_t)0x0000FFFF0000FFFF; + x1 &= (uint64_t)0x0000FFFF0000FFFF; + x2 &= (uint64_t)0x0000FFFF0000FFFF; + x3 &= (uint64_t)0x0000FFFF0000FFFF; + x0 |= (x0 << 8); + x1 |= (x1 << 8); + x2 |= (x2 << 8); + x3 |= (x3 << 8); + x0 &= (uint64_t)0x00FF00FF00FF00FF; + x1 &= (uint64_t)0x00FF00FF00FF00FF; + x2 &= (uint64_t)0x00FF00FF00FF00FF; + x3 &= (uint64_t)0x00FF00FF00FF00FF; + *q0 = x0 | (x2 << 8); + *q1 = x1 | (x3 << 8); +} + + +static void br_aes_ct64_interleave_out(uint32_t *w, uint64_t q0, uint64_t q1) +{ + uint64_t x0, x1, x2, x3; + + x0 = q0 & (uint64_t)0x00FF00FF00FF00FF; + x1 = q1 & (uint64_t)0x00FF00FF00FF00FF; + x2 = (q0 >> 8) & (uint64_t)0x00FF00FF00FF00FF; + x3 = (q1 >> 8) & (uint64_t)0x00FF00FF00FF00FF; + x0 |= (x0 >> 8); + x1 |= (x1 >> 8); + x2 |= (x2 >> 8); + x3 |= (x3 >> 8); + x0 &= (uint64_t)0x0000FFFF0000FFFF; + x1 &= (uint64_t)0x0000FFFF0000FFFF; + x2 &= (uint64_t)0x0000FFFF0000FFFF; + x3 &= (uint64_t)0x0000FFFF0000FFFF; + w[0] = (uint32_t)x0 | (uint32_t)(x0 >> 16); + w[1] = (uint32_t)x1 | (uint32_t)(x1 >> 16); + w[2] = (uint32_t)x2 | (uint32_t)(x2 >> 16); + w[3] = (uint32_t)x3 | (uint32_t)(x3 >> 16); +} + +static inline void add_round_key(uint64_t *q, const uint64_t *sk) +{ + q[0] ^= sk[0]; + q[1] ^= sk[1]; + q[2] ^= sk[2]; + q[3] ^= sk[3]; + q[4] ^= sk[4]; + q[5] ^= sk[5]; + q[6] ^= sk[6]; + q[7] ^= sk[7]; +} + +static inline void shift_rows(uint64_t *q) +{ + int i; + + for (i = 0; i < 8; i++) { + uint64_t x; + + x = q[i]; + q[i] = (x & (uint64_t)0x000000000000FFFF) + | ((x & (uint64_t)0x00000000FFF00000) >> 4) + | ((x & (uint64_t)0x00000000000F0000) << 12) + | ((x & (uint64_t)0x0000FF0000000000) >> 8) + | ((x & (uint64_t)0x000000FF00000000) << 8) + | ((x & (uint64_t)0xF000000000000000) >> 12) + | ((x & (uint64_t)0x0FFF000000000000) << 4); + } +} + +static inline uint64_t rotr32(uint64_t x) +{ + return (x << 32) | (x >> 32); +} + +static inline void mix_columns(uint64_t *q) +{ + uint64_t q0, q1, q2, q3, q4, q5, q6, q7; + uint64_t r0, r1, r2, r3, r4, r5, r6, r7; + + q0 = q[0]; + q1 = q[1]; + q2 = q[2]; + q3 = q[3]; + q4 = q[4]; + q5 = q[5]; + q6 = q[6]; + q7 = q[7]; + r0 = (q0 >> 16) | (q0 << 48); + r1 = (q1 >> 16) | (q1 << 48); + r2 = (q2 >> 16) | (q2 << 48); + r3 = (q3 >> 16) | (q3 << 48); + r4 = (q4 >> 16) | (q4 << 48); + r5 = (q5 >> 16) | (q5 << 48); + r6 = (q6 >> 16) | (q6 << 48); + r7 = (q7 >> 16) | (q7 << 48); + + q[0] = q7 ^ r7 ^ r0 ^ rotr32(q0 ^ r0); + q[1] = q0 ^ r0 ^ q7 ^ r7 ^ r1 ^ rotr32(q1 ^ r1); + q[2] = q1 ^ r1 ^ r2 ^ rotr32(q2 ^ r2); + q[3] = q2 ^ r2 ^ q7 ^ r7 ^ r3 ^ rotr32(q3 ^ r3); + q[4] = q3 ^ r3 ^ q7 ^ r7 ^ r4 ^ rotr32(q4 ^ r4); + q[5] = q4 ^ r4 ^ r5 ^ rotr32(q5 ^ r5); + q[6] = q5 ^ r5 ^ r6 ^ rotr32(q6 ^ r6); + q[7] = q6 ^ r6 ^ r7 ^ rotr32(q7 ^ r7); +} + +static void interleave_constant(uint64_t *out, const unsigned char *in) +{ + uint32_t tmp_32_constant[16]; + int i; + + br_range_dec32le(tmp_32_constant, 16, in); + for (i = 0; i < 4; i++) { + br_aes_ct64_interleave_in(&out[i], &out[i + 4], tmp_32_constant + (i << 2)); + } + br_aes_ct64_ortho(out); +} + +static void interleave_constant32(uint32_t *out, const unsigned char *in) +{ + int i; + for (i = 0; i < 4; i++) { + out[2*i] = br_dec32le(in + 4*i); + out[2*i + 1] = br_dec32le(in + 4*i + 16); + } + br_aes_ct_ortho(out); +} + +void tweak_constants(spx_ctx *ctx) +{ + unsigned char buf[40*16]; + int i; + + /* Use the standard constants to generate tweaked ones. */ + memcpy((uint8_t *)ctx->tweaked512_rc64, (uint8_t *)haraka512_rc64, 40*16); + + /* Constants for pk.seed */ + haraka_S(buf, 40*16, ctx->pub_seed, SPX_N, ctx); + for (i = 0; i < 10; i++) { + interleave_constant32(ctx->tweaked256_rc32[i], buf + 32*i); + interleave_constant(ctx->tweaked512_rc64[i], buf + 64*i); + } +} + +static void haraka_S_absorb(unsigned char *s, unsigned int r, + const unsigned char *m, unsigned long long mlen, + unsigned char p, const spx_ctx *ctx) +{ + unsigned long long i; + SPX_VLA(uint8_t, t, r); + + while (mlen >= r) { + /* XOR block to state */ + for (i = 0; i < r; ++i) { + s[i] ^= m[i]; + } + haraka512_perm(s, s, ctx); + mlen -= r; + m += r; + } + + for (i = 0; i < r; ++i) { + t[i] = 0; + } + for (i = 0; i < mlen; ++i) { + t[i] = m[i]; + } + t[i] = p; + t[r - 1] |= 128; + for (i = 0; i < r; ++i) { + s[i] ^= t[i]; + } +} + +static void haraka_S_squeezeblocks(unsigned char *h, unsigned long long nblocks, + unsigned char *s, unsigned int r, + const spx_ctx *ctx) +{ + while (nblocks > 0) { + haraka512_perm(s, s, ctx); + memcpy(h, s, HARAKAS_RATE); + h += r; + nblocks--; + } +} + +void haraka_S_inc_init(uint8_t *s_inc) +{ + size_t i; + + for (i = 0; i < 64; i++) { + s_inc[i] = 0; + } + s_inc[64] = 0; +} + +void haraka_S_inc_absorb(uint8_t *s_inc, const uint8_t *m, size_t mlen, + const spx_ctx *ctx) +{ + size_t i; + + /* Recall that s_inc[64] is the non-absorbed bytes xored into the state */ + while (mlen + s_inc[64] >= HARAKAS_RATE) { + for (i = 0; i < (size_t)(HARAKAS_RATE - s_inc[64]); i++) { + /* Take the i'th byte from message + xor with the s_inc[64] + i'th byte of the state */ + s_inc[s_inc[64] + i] ^= m[i]; + } + mlen -= (size_t)(HARAKAS_RATE - s_inc[64]); + m += HARAKAS_RATE - (uint8_t)s_inc[64]; + s_inc[64] = 0; + + haraka512_perm(s_inc, s_inc, ctx); + } + + for (i = 0; i < mlen; i++) { + s_inc[s_inc[64] + i] ^= m[i]; + } + s_inc[64] += (uint8_t)mlen; +} + +void haraka_S_inc_finalize(uint8_t *s_inc) +{ + /* After haraka_S_inc_absorb, we are guaranteed that s_inc[64] < HARAKAS_RATE, + so we can always use one more byte for p in the current state. */ + s_inc[s_inc[64]] ^= 0x1F; + s_inc[HARAKAS_RATE - 1] ^= 128; + s_inc[64] = 0; +} + +void haraka_S_inc_squeeze(uint8_t *out, size_t outlen, uint8_t *s_inc, + const spx_ctx *ctx) +{ + size_t i; + + /* First consume any bytes we still have sitting around */ + for (i = 0; i < outlen && i < s_inc[64]; i++) { + /* There are s_inc[64] bytes left, so r - s_inc[64] is the first + available byte. We consume from there, i.e., up to r. */ + out[i] = (uint8_t)s_inc[(HARAKAS_RATE - s_inc[64] + i)]; + } + out += i; + outlen -= i; + s_inc[64] -= (uint8_t)i; + + /* Then squeeze the remaining necessary blocks */ + while (outlen > 0) { + haraka512_perm(s_inc, s_inc, ctx); + + for (i = 0; i < outlen && i < HARAKAS_RATE; i++) { + out[i] = s_inc[i]; + } + out += i; + outlen -= i; + s_inc[64] = (uint8_t)(HARAKAS_RATE - i); + } +} + +void haraka_S(unsigned char *out, unsigned long long outlen, + const unsigned char *in, unsigned long long inlen, + const spx_ctx *ctx) +{ + unsigned long long i; + unsigned char s[64]; + unsigned char d[32]; + + for (i = 0; i < 64; i++) { + s[i] = 0; + } + haraka_S_absorb(s, 32, in, inlen, 0x1F, ctx); + + haraka_S_squeezeblocks(out, outlen / 32, s, 32, ctx); + out += (outlen / 32) * 32; + + if (outlen % 32) { + haraka_S_squeezeblocks(d, 1, s, 32, ctx); + for (i = 0; i < outlen % 32; i++) { + out[i] = d[i]; + } + } +} + +void haraka512_perm(unsigned char *out, const unsigned char *in, + const spx_ctx *ctx) +{ + uint32_t w[16]; + uint64_t q[8], tmp_q; + unsigned int i, j; + + br_range_dec32le(w, 16, in); + for (i = 0; i < 4; i++) { + br_aes_ct64_interleave_in(&q[i], &q[i + 4], w + (i << 2)); + } + br_aes_ct64_ortho(q); + + /* AES rounds */ + for (i = 0; i < 5; i++) { + for (j = 0; j < 2; j++) { + br_aes_ct64_bitslice_Sbox(q); + shift_rows(q); + mix_columns(q); + add_round_key(q, ctx->tweaked512_rc64[2*i + j]); + } + /* Mix states */ + for (j = 0; j < 8; j++) { + tmp_q = q[j]; + q[j] = (tmp_q & 0x0001000100010001) << 5 | + (tmp_q & 0x0002000200020002) << 12 | + (tmp_q & 0x0004000400040004) >> 1 | + (tmp_q & 0x0008000800080008) << 6 | + (tmp_q & 0x0020002000200020) << 9 | + (tmp_q & 0x0040004000400040) >> 4 | + (tmp_q & 0x0080008000800080) << 3 | + (tmp_q & 0x2100210021002100) >> 5 | + (tmp_q & 0x0210021002100210) << 2 | + (tmp_q & 0x0800080008000800) << 4 | + (tmp_q & 0x1000100010001000) >> 12 | + (tmp_q & 0x4000400040004000) >> 10 | + (tmp_q & 0x8400840084008400) >> 3; + } + } + + br_aes_ct64_ortho(q); + for (i = 0; i < 4; i ++) { + br_aes_ct64_interleave_out(w + (i << 2), q[i], q[i + 4]); + } + br_range_enc32le(out, w, 16); +} + +void haraka512(unsigned char *out, const unsigned char *in, const spx_ctx *ctx) +{ + int i; + + unsigned char buf[64]; + + haraka512_perm(buf, in, ctx); + /* Feed-forward */ + for (i = 0; i < 64; i++) { + buf[i] = buf[i] ^ in[i]; + } + + /* Truncated */ + memcpy(out, buf + 8, 8); + memcpy(out + 8, buf + 24, 8); + memcpy(out + 16, buf + 32, 8); + memcpy(out + 24, buf + 48, 8); +} + + +void haraka256(unsigned char *out, const unsigned char *in, + const spx_ctx *ctx) +{ + uint32_t q[8], tmp_q; + int i, j; + + for (i = 0; i < 4; i++) { + q[2*i] = br_dec32le(in + 4*i); + q[2*i + 1] = br_dec32le(in + 4*i + 16); + } + br_aes_ct_ortho(q); + + /* AES rounds */ + for (i = 0; i < 5; i++) { + for (j = 0; j < 2; j++) { + br_aes_ct_bitslice_Sbox(q); + shift_rows32(q); + mix_columns32(q); + add_round_key32(q, ctx->tweaked256_rc32[2*i + j]); + } + + /* Mix states */ + for (j = 0; j < 8; j++) { + tmp_q = q[j]; + q[j] = (tmp_q & 0x81818181) | + (tmp_q & 0x02020202) << 1 | + (tmp_q & 0x04040404) << 2 | + (tmp_q & 0x08080808) << 3 | + (tmp_q & 0x10101010) >> 3 | + (tmp_q & 0x20202020) >> 2 | + (tmp_q & 0x40404040) >> 1; + } + } + + br_aes_ct_ortho(q); + for (i = 0; i < 4; i++) { + br_enc32le(out + 4*i, q[2*i]); + br_enc32le(out + 4*i + 16, q[2*i + 1]); + } + + for (i = 0; i < 32; i++) { + out[i] ^= in[i]; + } +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/hash_haraka.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/hash_haraka.c new file mode 100644 index 0000000..cd6beac --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/hash_haraka.c @@ -0,0 +1,96 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/hash.h" +#include "../../../app/include/params.h" +#include "../../../app/include/utils.h" + +#include "../include/haraka.h" + +void initialize_hash_function(spx_ctx* ctx) +{ + tweak_constants(ctx); +} + +/* + * Computes PRF(key, addr), given a secret key of SPX_N bytes and an address + */ +void prf_addr(unsigned char *out, const spx_ctx *ctx, + const uint32_t addr[8]) +{ + /* Since SPX_N may be smaller than 32, we need temporary buffers. */ + unsigned char outbuf[32]; + unsigned char buf[64] = {0}; + + memcpy(buf, addr, SPX_ADDR_BYTES); + memcpy(buf + SPX_ADDR_BYTES, ctx->sk_seed, SPX_N); + + haraka512(outbuf, buf, ctx); + memcpy(out, outbuf, SPX_N); +} + +/** + * Computes the message-dependent randomness R, using a secret seed and an + * optional randomization value as well as the message. + */ +void gen_message_random(unsigned char *R, const unsigned char* sk_prf, + const unsigned char *optrand, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx) +{ + uint8_t s_inc[65]; + + haraka_S_inc_init(s_inc); + haraka_S_inc_absorb(s_inc, sk_prf, SPX_N, ctx); + haraka_S_inc_absorb(s_inc, optrand, SPX_N, ctx); + haraka_S_inc_absorb(s_inc, m, mlen, ctx); + haraka_S_inc_finalize(s_inc); + haraka_S_inc_squeeze(R, SPX_N, s_inc, ctx); +} + +/** + * Computes the message hash using R, the public key, and the message. + * Outputs the message digest and the index of the leaf. The index is split in + * the tree index and the leaf index, for convenient copying to an address. + */ +void hash_message(unsigned char *digest, uint64_t *tree, uint32_t *leaf_idx, + const unsigned char *R, const unsigned char *pk, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx) +{ +#define SPX_TREE_BITS (SPX_TREE_HEIGHT * (SPX_D - 1)) +#define SPX_TREE_BYTES ((SPX_TREE_BITS + 7) / 8) +#define SPX_LEAF_BITS SPX_TREE_HEIGHT +#define SPX_LEAF_BYTES ((SPX_LEAF_BITS + 7) / 8) +#define SPX_DGST_BYTES (SPX_FORS_MSG_BYTES + SPX_TREE_BYTES + SPX_LEAF_BYTES) + + unsigned char buf[SPX_DGST_BYTES]; + unsigned char *bufp = buf; + uint8_t s_inc[65]; + + haraka_S_inc_init(s_inc); + haraka_S_inc_absorb(s_inc, R, SPX_N, ctx); + haraka_S_inc_absorb(s_inc, pk + SPX_N, SPX_N, ctx); // Only absorb root part of pk + haraka_S_inc_absorb(s_inc, m, mlen, ctx); + haraka_S_inc_finalize(s_inc); + haraka_S_inc_squeeze(buf, SPX_DGST_BYTES, s_inc, ctx); + + memcpy(digest, bufp, SPX_FORS_MSG_BYTES); + bufp += SPX_FORS_MSG_BYTES; + +#if SPX_TREE_BITS > 64 + #error For given height and depth, 64 bits cannot represent all subtrees +#endif + + if (SPX_D == 1) { + *tree = 0; + } else { + *tree = bytes_to_ull(bufp, SPX_TREE_BYTES); + *tree &= (~(uint64_t)0) >> (64 - SPX_TREE_BITS); + } + bufp += SPX_TREE_BYTES; + + *leaf_idx = (uint32_t)bytes_to_ull(bufp, SPX_LEAF_BYTES); + *leaf_idx &= (~(uint32_t)0) >> (32 - SPX_LEAF_BITS); +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/thash_haraka_robust.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/thash_haraka_robust.c new file mode 100644 index 0000000..b540bdc --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/thash_haraka_robust.c @@ -0,0 +1,46 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/thash.h" +#include "../../../app/include/params.h" +#include "../../../app/include/utils.h" + +#include "../include/haraka.h" + +/** + * Takes an array of inblocks concatenated arrays of SPX_N bytes. + */ +void thash(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ + SPX_VLA(uint8_t, buf, SPX_ADDR_BYTES + inblocks*SPX_N); + SPX_VLA(uint8_t, bitmask, inblocks*SPX_N); + unsigned char outbuf[32]; + unsigned char buf_tmp[64]; + unsigned int i; + + if (inblocks == 1) { + /* F function */ + /* Since SPX_N may be smaller than 32, we need a temporary buffer. */ + memset(buf_tmp, 0, 64); + memcpy(buf_tmp, addr, 32); + + haraka256(outbuf, buf_tmp, ctx); + for (i = 0; i < inblocks * SPX_N; i++) { + buf_tmp[SPX_ADDR_BYTES + i] = in[i] ^ outbuf[i]; + } + haraka512(outbuf, buf_tmp, ctx); + memcpy(out, outbuf, SPX_N); + } else { + /* All other tweakable hashes*/ + memcpy(buf, addr, 32); + haraka_S(bitmask, inblocks * SPX_N, buf, SPX_ADDR_BYTES, ctx); + + for (i = 0; i < inblocks * SPX_N; i++) { + buf[SPX_ADDR_BYTES + i] = in[i] ^ bitmask[i]; + } + + haraka_S(out, SPX_N, buf, SPX_ADDR_BYTES + inblocks*SPX_N, ctx); + } +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/thash_haraka_simple.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/thash_haraka_simple.c new file mode 100644 index 0000000..43b14c7 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/haraka/src/thash_haraka_simple.c @@ -0,0 +1,37 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/thash.h" +#include "../../../app/include/params.h" +#include "../../../app/include/utils.h" + +#include "../include/haraka.h" + +/** + * Takes an array of inblocks concatenated arrays of SPX_N bytes. + */ +void thash(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ + SPX_VLA(uint8_t, buf, SPX_ADDR_BYTES + inblocks*SPX_N); + unsigned char outbuf[32]; + unsigned char buf_tmp[64]; + + if (inblocks == 1) { + /* F function */ + /* Since SPX_N may be smaller than 32, we need a temporary buffer. */ + memset(buf_tmp, 0, 64); + memcpy(buf_tmp, addr, 32); + memcpy(buf_tmp + SPX_ADDR_BYTES, in, SPX_N); + + haraka512(outbuf, buf_tmp, ctx); + memcpy(out, outbuf, SPX_N); + } else { + /* All other tweakable hashes*/ + memcpy(buf, addr, 32); + memcpy(buf + SPX_ADDR_BYTES, in, inblocks * SPX_N); + + haraka_S(out, SPX_N, buf, SPX_ADDR_BYTES + inblocks*SPX_N, ctx); + } +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/CMakeLists.txt b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/CMakeLists.txt new file mode 100644 index 0000000..bf65803 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/CMakeLists.txt @@ -0,0 +1,12 @@ +set(SHA2_SOURCES + src/sha2.c + src/hash_sha2.c + ../../app/src/utils.c + src/thash_sha2_${THASH}.c +) + +set(PARAMS "sphincs-sha2-${SECPAR}") +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DPARAMS=${PARAMS}") + +add_library(sha2 SHARED ${SHA2_SOURCES}) +target_include_directories(sha2 PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include) diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/include/sha2.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/include/sha2.h new file mode 100644 index 0000000..83038e2 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/include/sha2.h @@ -0,0 +1,44 @@ +#ifndef SPX_SHA2_H +#define SPX_SHA2_H + +#include "../../../app/include/params.h" +#include "../../../app/include/context.h" + +#define SPX_SHA256_BLOCK_BYTES 64 +#define SPX_SHA256_OUTPUT_BYTES 32 /* This does not necessarily equal SPX_N */ + +#define SPX_SHA512_BLOCK_BYTES 128 +#define SPX_SHA512_OUTPUT_BYTES 64 + +#if SPX_SHA256_OUTPUT_BYTES < SPX_N + #error Linking against SHA-256 with N larger than 32 bytes is not supported +#endif + +#define SPX_SHA256_ADDR_BYTES 22 + +#include +#include + +void sha256_inc_init(uint8_t *state); +void sha256_inc_blocks(uint8_t *state, const uint8_t *in, size_t inblocks); +void sha256_inc_finalize(uint8_t *out, uint8_t *state, const uint8_t *in, size_t inlen); +void sha256(uint8_t *out, const uint8_t *in, size_t inlen); + +void sha512_inc_init(uint8_t *state); +void sha512_inc_blocks(uint8_t *state, const uint8_t *in, size_t inblocks); +void sha512_inc_finalize(uint8_t *out, uint8_t *state, const uint8_t *in, size_t inlen); +void sha512(uint8_t *out, const uint8_t *in, size_t inlen); + +#define mgf1_256 SPX_NAMESPACE(mgf1_256) +void mgf1_256(unsigned char *out, unsigned long outlen, + const unsigned char *in, unsigned long inlen); + +#define mgf1_512 SPX_NAMESPACE(mgf1_512) +void mgf1_512(unsigned char *out, unsigned long outlen, + const unsigned char *in, unsigned long inlen); + +#define seed_state SPX_NAMESPACE(seed_state) +void seed_state(spx_ctx *ctx); + + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/include/sha2_offsets.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/include/sha2_offsets.h new file mode 100644 index 0000000..49f7e85 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/include/sha2_offsets.h @@ -0,0 +1,20 @@ +#ifndef SHA2_OFFSETS_H_ +#define SHA2_OFFSETS_H_ + +/* + * Offsets of various fields in the address structure when we use SHA2 as + * the Sphincs+ hash function + */ + +#define SPX_OFFSET_LAYER 0 /* The byte used to specify the Merkle tree layer */ +#define SPX_OFFSET_TREE 1 /* The start of the 8 byte field used to specify the tree */ +#define SPX_OFFSET_TYPE 9 /* The byte used to specify the hash type (reason) */ +#define SPX_OFFSET_KP_ADDR 10 /* The start of the 4 byte field used to specify the key pair address */ +#define SPX_OFFSET_CHAIN_ADDR 17 /* The byte used to specify the chain address (which Winternitz chain) */ +#define SPX_OFFSET_HASH_ADDR 21 /* The byte used to specify the hash address (where in the Winternitz chain) */ +#define SPX_OFFSET_TREE_HGT 17 /* The byte used to specify the height of this node in the FORS or Merkle tree */ +#define SPX_OFFSET_TREE_INDEX 18 /* The start of the 4 byte field used to specify the node in the FORS or Merkle tree */ + +#define SPX_SHA2 1 + +#endif /* SHA2_OFFSETS_H_ */ diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/hash_sha2.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/hash_sha2.c new file mode 100644 index 0000000..e84b8a3 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/hash_sha2.c @@ -0,0 +1,198 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/hash.h" +#include "../../../app/include/params.h" +#include "../../../app/include/utils.h" + +#include "../include/sha2.h" + +#if SPX_N >= 24 +#define SPX_SHAX_OUTPUT_BYTES SPX_SHA512_OUTPUT_BYTES +#define SPX_SHAX_BLOCK_BYTES SPX_SHA512_BLOCK_BYTES +#define shaX_inc_init sha512_inc_init +#define shaX_inc_blocks sha512_inc_blocks +#define shaX_inc_finalize sha512_inc_finalize +#define shaX sha512 +#define mgf1_X mgf1_512 +#else +#define SPX_SHAX_OUTPUT_BYTES SPX_SHA256_OUTPUT_BYTES +#define SPX_SHAX_BLOCK_BYTES SPX_SHA256_BLOCK_BYTES +#define shaX_inc_init sha256_inc_init +#define shaX_inc_blocks sha256_inc_blocks +#define shaX_inc_finalize sha256_inc_finalize +#define shaX sha256 +#define mgf1_X mgf1_256 +#endif + + +/* For SHA, there is no immediate reason to initialize at the start, + so this function is an empty operation. */ +void initialize_hash_function(spx_ctx *ctx) +{ + seed_state(ctx); +} + +/* + * Computes PRF(pk_seed, sk_seed, addr). + */ +void prf_addr(unsigned char *out, const spx_ctx *ctx, + const uint32_t addr[8]) +{ + uint8_t sha2_state[40]; + unsigned char buf[SPX_SHA256_ADDR_BYTES + SPX_N]; + unsigned char outbuf[SPX_SHA256_OUTPUT_BYTES]; + + /* Retrieve precomputed state containing pub_seed */ + memcpy(sha2_state, ctx->state_seeded, 40 * sizeof(uint8_t)); + + /* Remainder: ADDR^c ‖ SK.seed */ + memcpy(buf, addr, SPX_SHA256_ADDR_BYTES); + memcpy(buf + SPX_SHA256_ADDR_BYTES, ctx->sk_seed, SPX_N); + + sha256_inc_finalize(outbuf, sha2_state, buf, SPX_SHA256_ADDR_BYTES + SPX_N); + + memcpy(out, outbuf, SPX_N); +} + +/** + * Computes the message-dependent randomness R, using a secret seed as a key + * for HMAC, and an optional randomization value prefixed to the message. + * This requires m to have at least SPX_SHAX_BLOCK_BYTES + SPX_N space + * available in front of the pointer, i.e. before the message to use for the + * prefix. This is necessary to prevent having to move the message around (and + * allocate memory for it). + */ +void gen_message_random(unsigned char *R, const unsigned char *sk_prf, + const unsigned char *optrand, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx) +{ + (void)ctx; + + unsigned char buf[SPX_SHAX_BLOCK_BYTES + SPX_SHAX_OUTPUT_BYTES]; + uint8_t state[8 + SPX_SHAX_OUTPUT_BYTES]; + int i; + +#if SPX_N > SPX_SHAX_BLOCK_BYTES + #error "Currently only supports SPX_N of at most SPX_SHAX_BLOCK_BYTES" +#endif + + /* This implements HMAC-SHA */ + for (i = 0; i < SPX_N; i++) { + buf[i] = 0x36 ^ sk_prf[i]; + } + memset(buf + SPX_N, 0x36, SPX_SHAX_BLOCK_BYTES - SPX_N); + + shaX_inc_init(state); + shaX_inc_blocks(state, buf, 1); + + memcpy(buf, optrand, SPX_N); + + /* If optrand + message cannot fill up an entire block */ + if (SPX_N + mlen < SPX_SHAX_BLOCK_BYTES) { + memcpy(buf + SPX_N, m, mlen); + shaX_inc_finalize(buf + SPX_SHAX_BLOCK_BYTES, state, + buf, mlen + SPX_N); + } + /* Otherwise first fill a block, so that finalize only uses the message */ + else { + memcpy(buf + SPX_N, m, SPX_SHAX_BLOCK_BYTES - SPX_N); + shaX_inc_blocks(state, buf, 1); + + m += SPX_SHAX_BLOCK_BYTES - SPX_N; + mlen -= SPX_SHAX_BLOCK_BYTES - SPX_N; + shaX_inc_finalize(buf + SPX_SHAX_BLOCK_BYTES, state, m, mlen); + } + + for (i = 0; i < SPX_N; i++) { + buf[i] = 0x5c ^ sk_prf[i]; + } + memset(buf + SPX_N, 0x5c, SPX_SHAX_BLOCK_BYTES - SPX_N); + + shaX(buf, buf, SPX_SHAX_BLOCK_BYTES + SPX_SHAX_OUTPUT_BYTES); + memcpy(R, buf, SPX_N); +} + +/** + * Computes the message hash using R, the public key, and the message. + * Outputs the message digest and the index of the leaf. The index is split in + * the tree index and the leaf index, for convenient copying to an address. + */ +void hash_message(unsigned char *digest, uint64_t *tree, uint32_t *leaf_idx, + const unsigned char *R, const unsigned char *pk, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx) +{ + (void)ctx; +#define SPX_TREE_BITS (SPX_TREE_HEIGHT * (SPX_D - 1)) +#define SPX_TREE_BYTES ((SPX_TREE_BITS + 7) / 8) +#define SPX_LEAF_BITS SPX_TREE_HEIGHT +#define SPX_LEAF_BYTES ((SPX_LEAF_BITS + 7) / 8) +#define SPX_DGST_BYTES (SPX_FORS_MSG_BYTES + SPX_TREE_BYTES + SPX_LEAF_BYTES) + + unsigned char seed[2*SPX_N + SPX_SHAX_OUTPUT_BYTES]; + + /* Round to nearest multiple of SPX_SHAX_BLOCK_BYTES */ +#if (SPX_SHAX_BLOCK_BYTES & (SPX_SHAX_BLOCK_BYTES-1)) != 0 + #error "Assumes that SPX_SHAX_BLOCK_BYTES is a power of 2" +#endif +#define SPX_INBLOCKS (((SPX_N + SPX_PK_BYTES + SPX_SHAX_BLOCK_BYTES - 1) & \ + -SPX_SHAX_BLOCK_BYTES) / SPX_SHAX_BLOCK_BYTES) + unsigned char inbuf[SPX_INBLOCKS * SPX_SHAX_BLOCK_BYTES]; + + unsigned char buf[SPX_DGST_BYTES]; + unsigned char *bufp = buf; + uint8_t state[8 + SPX_SHAX_OUTPUT_BYTES]; + + shaX_inc_init(state); + + // seed: SHA-X(R ‖ PK.seed ‖ PK.root ‖ M) + memcpy(inbuf, R, SPX_N); + memcpy(inbuf + SPX_N, pk, SPX_PK_BYTES); + + /* If R + pk + message cannot fill up an entire block */ + if (SPX_N + SPX_PK_BYTES + mlen < SPX_INBLOCKS * SPX_SHAX_BLOCK_BYTES) { + memcpy(inbuf + SPX_N + SPX_PK_BYTES, m, mlen); + shaX_inc_finalize(seed + 2*SPX_N, state, inbuf, SPX_N + SPX_PK_BYTES + mlen); + } + /* Otherwise first fill a block, so that finalize only uses the message */ + else { + memcpy(inbuf + SPX_N + SPX_PK_BYTES, m, + SPX_INBLOCKS * SPX_SHAX_BLOCK_BYTES - SPX_N - SPX_PK_BYTES); + shaX_inc_blocks(state, inbuf, SPX_INBLOCKS); + + m += SPX_INBLOCKS * SPX_SHAX_BLOCK_BYTES - SPX_N - SPX_PK_BYTES; + mlen -= SPX_INBLOCKS * SPX_SHAX_BLOCK_BYTES - SPX_N - SPX_PK_BYTES; + shaX_inc_finalize(seed + 2*SPX_N, state, m, mlen); + } + + // H_msg: MGF1-SHA-X(R ‖ PK.seed ‖ seed) + memcpy(seed, R, SPX_N); + memcpy(seed + SPX_N, pk, SPX_N); + + /* By doing this in two steps, we prevent hashing the message twice; + otherwise each iteration in MGF1 would hash the message again. */ + mgf1_X(bufp, SPX_DGST_BYTES, seed, 2*SPX_N + SPX_SHAX_OUTPUT_BYTES); + + memcpy(digest, bufp, SPX_FORS_MSG_BYTES); + bufp += SPX_FORS_MSG_BYTES; + +#if SPX_TREE_BITS > 64 + #error For given height and depth, 64 bits cannot represent all subtrees +#endif + + if (SPX_D == 1) { + *tree = 0; + } else { + *tree = bytes_to_ull(bufp, SPX_TREE_BYTES); + *tree &= (~(uint64_t)0) >> (64 - SPX_TREE_BITS); + } + bufp += SPX_TREE_BYTES; + + *leaf_idx = (uint32_t)bytes_to_ull(bufp, SPX_LEAF_BYTES); + *leaf_idx &= (~(uint32_t)0) >> (32 - SPX_LEAF_BITS); +} + + diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/sha2.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/sha2.c new file mode 100644 index 0000000..3d311eb --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/sha2.c @@ -0,0 +1,701 @@ +/* Based on the public domain implementation in + * crypto_hash/sha512/ref/ from http://bench.cr.yp.to/supercop.html + * by D. J. Bernstein */ + +#include +#include +#include + +#include "../../../app/include/utils.h" + +#include "../include/sha2.h" + +static uint32_t load_bigendian_32(const uint8_t *x) { + return (uint32_t)(x[3]) | (((uint32_t)(x[2])) << 8) | + (((uint32_t)(x[1])) << 16) | (((uint32_t)(x[0])) << 24); +} + +static uint64_t load_bigendian_64(const uint8_t *x) { + return (uint64_t)(x[7]) | (((uint64_t)(x[6])) << 8) | + (((uint64_t)(x[5])) << 16) | (((uint64_t)(x[4])) << 24) | + (((uint64_t)(x[3])) << 32) | (((uint64_t)(x[2])) << 40) | + (((uint64_t)(x[1])) << 48) | (((uint64_t)(x[0])) << 56); +} + +static void store_bigendian_32(uint8_t *x, uint64_t u) { + x[3] = (uint8_t) u; + u >>= 8; + x[2] = (uint8_t) u; + u >>= 8; + x[1] = (uint8_t) u; + u >>= 8; + x[0] = (uint8_t) u; +} + +static void store_bigendian_64(uint8_t *x, uint64_t u) { + x[7] = (uint8_t) u; + u >>= 8; + x[6] = (uint8_t) u; + u >>= 8; + x[5] = (uint8_t) u; + u >>= 8; + x[4] = (uint8_t) u; + u >>= 8; + x[3] = (uint8_t) u; + u >>= 8; + x[2] = (uint8_t) u; + u >>= 8; + x[1] = (uint8_t) u; + u >>= 8; + x[0] = (uint8_t) u; +} + +#define SHR(x, c) ((x) >> (c)) +#define ROTR_32(x, c) (((x) >> (c)) | ((x) << (32 - (c)))) +#define ROTR_64(x,c) (((x) >> (c)) | ((x) << (64 - (c)))) + +#define Ch(x, y, z) (((x) & (y)) ^ (~(x) & (z))) +#define Maj(x, y, z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z))) + +#define Sigma0_32(x) (ROTR_32(x, 2) ^ ROTR_32(x,13) ^ ROTR_32(x,22)) +#define Sigma1_32(x) (ROTR_32(x, 6) ^ ROTR_32(x,11) ^ ROTR_32(x,25)) +#define sigma0_32(x) (ROTR_32(x, 7) ^ ROTR_32(x,18) ^ SHR(x, 3)) +#define sigma1_32(x) (ROTR_32(x,17) ^ ROTR_32(x,19) ^ SHR(x,10)) + +#define Sigma0_64(x) (ROTR_64(x,28) ^ ROTR_64(x,34) ^ ROTR_64(x,39)) +#define Sigma1_64(x) (ROTR_64(x,14) ^ ROTR_64(x,18) ^ ROTR_64(x,41)) +#define sigma0_64(x) (ROTR_64(x, 1) ^ ROTR_64(x, 8) ^ SHR(x,7)) +#define sigma1_64(x) (ROTR_64(x,19) ^ ROTR_64(x,61) ^ SHR(x,6)) + +#define M_32(w0, w14, w9, w1) w0 = sigma1_32(w14) + (w9) + sigma0_32(w1) + (w0); +#define M_64(w0, w14, w9, w1) w0 = sigma1_64(w14) + (w9) + sigma0_64(w1) + (w0); + +#define EXPAND_32 \ + M_32(w0, w14, w9, w1) \ + M_32(w1, w15, w10, w2) \ + M_32(w2, w0, w11, w3) \ + M_32(w3, w1, w12, w4) \ + M_32(w4, w2, w13, w5) \ + M_32(w5, w3, w14, w6) \ + M_32(w6, w4, w15, w7) \ + M_32(w7, w5, w0, w8) \ + M_32(w8, w6, w1, w9) \ + M_32(w9, w7, w2, w10) \ + M_32(w10, w8, w3, w11) \ + M_32(w11, w9, w4, w12) \ + M_32(w12, w10, w5, w13) \ + M_32(w13, w11, w6, w14) \ + M_32(w14, w12, w7, w15) \ + M_32(w15, w13, w8, w0) + +#define EXPAND_64 \ + M_64(w0 ,w14,w9 ,w1 ) \ + M_64(w1 ,w15,w10,w2 ) \ + M_64(w2 ,w0 ,w11,w3 ) \ + M_64(w3 ,w1 ,w12,w4 ) \ + M_64(w4 ,w2 ,w13,w5 ) \ + M_64(w5 ,w3 ,w14,w6 ) \ + M_64(w6 ,w4 ,w15,w7 ) \ + M_64(w7 ,w5 ,w0 ,w8 ) \ + M_64(w8 ,w6 ,w1 ,w9 ) \ + M_64(w9 ,w7 ,w2 ,w10) \ + M_64(w10,w8 ,w3 ,w11) \ + M_64(w11,w9 ,w4 ,w12) \ + M_64(w12,w10,w5 ,w13) \ + M_64(w13,w11,w6 ,w14) \ + M_64(w14,w12,w7 ,w15) \ + M_64(w15,w13,w8 ,w0 ) + +#define F_32(w, k) \ + T1 = h + Sigma1_32(e) + Ch(e, f, g) + (k) + (w); \ + T2 = Sigma0_32(a) + Maj(a, b, c); \ + h = g; \ + g = f; \ + f = e; \ + e = d + T1; \ + d = c; \ + c = b; \ + b = a; \ + a = T1 + T2; + +#define F_64(w,k) \ + T1 = h + Sigma1_64(e) + Ch(e,f,g) + k + w; \ + T2 = Sigma0_64(a) + Maj(a,b,c); \ + h = g; \ + g = f; \ + f = e; \ + e = d + T1; \ + d = c; \ + c = b; \ + b = a; \ + a = T1 + T2; + +static size_t crypto_hashblocks_sha256(uint8_t *statebytes, + const uint8_t *in, size_t inlen) { + uint32_t state[8]; + uint32_t a; + uint32_t b; + uint32_t c; + uint32_t d; + uint32_t e; + uint32_t f; + uint32_t g; + uint32_t h; + uint32_t T1; + uint32_t T2; + + a = load_bigendian_32(statebytes + 0); + state[0] = a; + b = load_bigendian_32(statebytes + 4); + state[1] = b; + c = load_bigendian_32(statebytes + 8); + state[2] = c; + d = load_bigendian_32(statebytes + 12); + state[3] = d; + e = load_bigendian_32(statebytes + 16); + state[4] = e; + f = load_bigendian_32(statebytes + 20); + state[5] = f; + g = load_bigendian_32(statebytes + 24); + state[6] = g; + h = load_bigendian_32(statebytes + 28); + state[7] = h; + + while (inlen >= 64) { + uint32_t w0 = load_bigendian_32(in + 0); + uint32_t w1 = load_bigendian_32(in + 4); + uint32_t w2 = load_bigendian_32(in + 8); + uint32_t w3 = load_bigendian_32(in + 12); + uint32_t w4 = load_bigendian_32(in + 16); + uint32_t w5 = load_bigendian_32(in + 20); + uint32_t w6 = load_bigendian_32(in + 24); + uint32_t w7 = load_bigendian_32(in + 28); + uint32_t w8 = load_bigendian_32(in + 32); + uint32_t w9 = load_bigendian_32(in + 36); + uint32_t w10 = load_bigendian_32(in + 40); + uint32_t w11 = load_bigendian_32(in + 44); + uint32_t w12 = load_bigendian_32(in + 48); + uint32_t w13 = load_bigendian_32(in + 52); + uint32_t w14 = load_bigendian_32(in + 56); + uint32_t w15 = load_bigendian_32(in + 60); + + F_32(w0, 0x428a2f98) + F_32(w1, 0x71374491) + F_32(w2, 0xb5c0fbcf) + F_32(w3, 0xe9b5dba5) + F_32(w4, 0x3956c25b) + F_32(w5, 0x59f111f1) + F_32(w6, 0x923f82a4) + F_32(w7, 0xab1c5ed5) + F_32(w8, 0xd807aa98) + F_32(w9, 0x12835b01) + F_32(w10, 0x243185be) + F_32(w11, 0x550c7dc3) + F_32(w12, 0x72be5d74) + F_32(w13, 0x80deb1fe) + F_32(w14, 0x9bdc06a7) + F_32(w15, 0xc19bf174) + + EXPAND_32 + + F_32(w0, 0xe49b69c1) + F_32(w1, 0xefbe4786) + F_32(w2, 0x0fc19dc6) + F_32(w3, 0x240ca1cc) + F_32(w4, 0x2de92c6f) + F_32(w5, 0x4a7484aa) + F_32(w6, 0x5cb0a9dc) + F_32(w7, 0x76f988da) + F_32(w8, 0x983e5152) + F_32(w9, 0xa831c66d) + F_32(w10, 0xb00327c8) + F_32(w11, 0xbf597fc7) + F_32(w12, 0xc6e00bf3) + F_32(w13, 0xd5a79147) + F_32(w14, 0x06ca6351) + F_32(w15, 0x14292967) + + EXPAND_32 + + F_32(w0, 0x27b70a85) + F_32(w1, 0x2e1b2138) + F_32(w2, 0x4d2c6dfc) + F_32(w3, 0x53380d13) + F_32(w4, 0x650a7354) + F_32(w5, 0x766a0abb) + F_32(w6, 0x81c2c92e) + F_32(w7, 0x92722c85) + F_32(w8, 0xa2bfe8a1) + F_32(w9, 0xa81a664b) + F_32(w10, 0xc24b8b70) + F_32(w11, 0xc76c51a3) + F_32(w12, 0xd192e819) + F_32(w13, 0xd6990624) + F_32(w14, 0xf40e3585) + F_32(w15, 0x106aa070) + + EXPAND_32 + + F_32(w0, 0x19a4c116) + F_32(w1, 0x1e376c08) + F_32(w2, 0x2748774c) + F_32(w3, 0x34b0bcb5) + F_32(w4, 0x391c0cb3) + F_32(w5, 0x4ed8aa4a) + F_32(w6, 0x5b9cca4f) + F_32(w7, 0x682e6ff3) + F_32(w8, 0x748f82ee) + F_32(w9, 0x78a5636f) + F_32(w10, 0x84c87814) + F_32(w11, 0x8cc70208) + F_32(w12, 0x90befffa) + F_32(w13, 0xa4506ceb) + F_32(w14, 0xbef9a3f7) + F_32(w15, 0xc67178f2) + + a += state[0]; + b += state[1]; + c += state[2]; + d += state[3]; + e += state[4]; + f += state[5]; + g += state[6]; + h += state[7]; + + state[0] = a; + state[1] = b; + state[2] = c; + state[3] = d; + state[4] = e; + state[5] = f; + state[6] = g; + state[7] = h; + + in += 64; + inlen -= 64; + } + + store_bigendian_32(statebytes + 0, state[0]); + store_bigendian_32(statebytes + 4, state[1]); + store_bigendian_32(statebytes + 8, state[2]); + store_bigendian_32(statebytes + 12, state[3]); + store_bigendian_32(statebytes + 16, state[4]); + store_bigendian_32(statebytes + 20, state[5]); + store_bigendian_32(statebytes + 24, state[6]); + store_bigendian_32(statebytes + 28, state[7]); + + return inlen; +} + +static int crypto_hashblocks_sha512(unsigned char *statebytes,const unsigned char *in,unsigned long long inlen) +{ + uint64_t state[8]; + uint64_t a; + uint64_t b; + uint64_t c; + uint64_t d; + uint64_t e; + uint64_t f; + uint64_t g; + uint64_t h; + uint64_t T1; + uint64_t T2; + + a = load_bigendian_64(statebytes + 0); state[0] = a; + b = load_bigendian_64(statebytes + 8); state[1] = b; + c = load_bigendian_64(statebytes + 16); state[2] = c; + d = load_bigendian_64(statebytes + 24); state[3] = d; + e = load_bigendian_64(statebytes + 32); state[4] = e; + f = load_bigendian_64(statebytes + 40); state[5] = f; + g = load_bigendian_64(statebytes + 48); state[6] = g; + h = load_bigendian_64(statebytes + 56); state[7] = h; + + while (inlen >= 128) { + uint64_t w0 = load_bigendian_64(in + 0); + uint64_t w1 = load_bigendian_64(in + 8); + uint64_t w2 = load_bigendian_64(in + 16); + uint64_t w3 = load_bigendian_64(in + 24); + uint64_t w4 = load_bigendian_64(in + 32); + uint64_t w5 = load_bigendian_64(in + 40); + uint64_t w6 = load_bigendian_64(in + 48); + uint64_t w7 = load_bigendian_64(in + 56); + uint64_t w8 = load_bigendian_64(in + 64); + uint64_t w9 = load_bigendian_64(in + 72); + uint64_t w10 = load_bigendian_64(in + 80); + uint64_t w11 = load_bigendian_64(in + 88); + uint64_t w12 = load_bigendian_64(in + 96); + uint64_t w13 = load_bigendian_64(in + 104); + uint64_t w14 = load_bigendian_64(in + 112); + uint64_t w15 = load_bigendian_64(in + 120); + + F_64(w0 ,0x428a2f98d728ae22ULL) + F_64(w1 ,0x7137449123ef65cdULL) + F_64(w2 ,0xb5c0fbcfec4d3b2fULL) + F_64(w3 ,0xe9b5dba58189dbbcULL) + F_64(w4 ,0x3956c25bf348b538ULL) + F_64(w5 ,0x59f111f1b605d019ULL) + F_64(w6 ,0x923f82a4af194f9bULL) + F_64(w7 ,0xab1c5ed5da6d8118ULL) + F_64(w8 ,0xd807aa98a3030242ULL) + F_64(w9 ,0x12835b0145706fbeULL) + F_64(w10,0x243185be4ee4b28cULL) + F_64(w11,0x550c7dc3d5ffb4e2ULL) + F_64(w12,0x72be5d74f27b896fULL) + F_64(w13,0x80deb1fe3b1696b1ULL) + F_64(w14,0x9bdc06a725c71235ULL) + F_64(w15,0xc19bf174cf692694ULL) + + EXPAND_64 + + F_64(w0 ,0xe49b69c19ef14ad2ULL) + F_64(w1 ,0xefbe4786384f25e3ULL) + F_64(w2 ,0x0fc19dc68b8cd5b5ULL) + F_64(w3 ,0x240ca1cc77ac9c65ULL) + F_64(w4 ,0x2de92c6f592b0275ULL) + F_64(w5 ,0x4a7484aa6ea6e483ULL) + F_64(w6 ,0x5cb0a9dcbd41fbd4ULL) + F_64(w7 ,0x76f988da831153b5ULL) + F_64(w8 ,0x983e5152ee66dfabULL) + F_64(w9 ,0xa831c66d2db43210ULL) + F_64(w10,0xb00327c898fb213fULL) + F_64(w11,0xbf597fc7beef0ee4ULL) + F_64(w12,0xc6e00bf33da88fc2ULL) + F_64(w13,0xd5a79147930aa725ULL) + F_64(w14,0x06ca6351e003826fULL) + F_64(w15,0x142929670a0e6e70ULL) + + EXPAND_64 + + F_64(w0 ,0x27b70a8546d22ffcULL) + F_64(w1 ,0x2e1b21385c26c926ULL) + F_64(w2 ,0x4d2c6dfc5ac42aedULL) + F_64(w3 ,0x53380d139d95b3dfULL) + F_64(w4 ,0x650a73548baf63deULL) + F_64(w5 ,0x766a0abb3c77b2a8ULL) + F_64(w6 ,0x81c2c92e47edaee6ULL) + F_64(w7 ,0x92722c851482353bULL) + F_64(w8 ,0xa2bfe8a14cf10364ULL) + F_64(w9 ,0xa81a664bbc423001ULL) + F_64(w10,0xc24b8b70d0f89791ULL) + F_64(w11,0xc76c51a30654be30ULL) + F_64(w12,0xd192e819d6ef5218ULL) + F_64(w13,0xd69906245565a910ULL) + F_64(w14,0xf40e35855771202aULL) + F_64(w15,0x106aa07032bbd1b8ULL) + + EXPAND_64 + + F_64(w0 ,0x19a4c116b8d2d0c8ULL) + F_64(w1 ,0x1e376c085141ab53ULL) + F_64(w2 ,0x2748774cdf8eeb99ULL) + F_64(w3 ,0x34b0bcb5e19b48a8ULL) + F_64(w4 ,0x391c0cb3c5c95a63ULL) + F_64(w5 ,0x4ed8aa4ae3418acbULL) + F_64(w6 ,0x5b9cca4f7763e373ULL) + F_64(w7 ,0x682e6ff3d6b2b8a3ULL) + F_64(w8 ,0x748f82ee5defb2fcULL) + F_64(w9 ,0x78a5636f43172f60ULL) + F_64(w10,0x84c87814a1f0ab72ULL) + F_64(w11,0x8cc702081a6439ecULL) + F_64(w12,0x90befffa23631e28ULL) + F_64(w13,0xa4506cebde82bde9ULL) + F_64(w14,0xbef9a3f7b2c67915ULL) + F_64(w15,0xc67178f2e372532bULL) + + EXPAND_64 + + F_64(w0 ,0xca273eceea26619cULL) + F_64(w1 ,0xd186b8c721c0c207ULL) + F_64(w2 ,0xeada7dd6cde0eb1eULL) + F_64(w3 ,0xf57d4f7fee6ed178ULL) + F_64(w4 ,0x06f067aa72176fbaULL) + F_64(w5 ,0x0a637dc5a2c898a6ULL) + F_64(w6 ,0x113f9804bef90daeULL) + F_64(w7 ,0x1b710b35131c471bULL) + F_64(w8 ,0x28db77f523047d84ULL) + F_64(w9 ,0x32caab7b40c72493ULL) + F_64(w10,0x3c9ebe0a15c9bebcULL) + F_64(w11,0x431d67c49c100d4cULL) + F_64(w12,0x4cc5d4becb3e42b6ULL) + F_64(w13,0x597f299cfc657e2aULL) + F_64(w14,0x5fcb6fab3ad6faecULL) + F_64(w15,0x6c44198c4a475817ULL) + + a += state[0]; + b += state[1]; + c += state[2]; + d += state[3]; + e += state[4]; + f += state[5]; + g += state[6]; + h += state[7]; + + state[0] = a; + state[1] = b; + state[2] = c; + state[3] = d; + state[4] = e; + state[5] = f; + state[6] = g; + state[7] = h; + + in += 128; + inlen -= 128; + } + + store_bigendian_64(statebytes + 0,state[0]); + store_bigendian_64(statebytes + 8,state[1]); + store_bigendian_64(statebytes + 16,state[2]); + store_bigendian_64(statebytes + 24,state[3]); + store_bigendian_64(statebytes + 32,state[4]); + store_bigendian_64(statebytes + 40,state[5]); + store_bigendian_64(statebytes + 48,state[6]); + store_bigendian_64(statebytes + 56,state[7]); + + return inlen; +} + + +static const uint8_t iv_256[32] = { + 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85, + 0x3c, 0x6e, 0xf3, 0x72, 0xa5, 0x4f, 0xf5, 0x3a, + 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05, 0x68, 0x8c, + 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19 +}; + +static const uint8_t iv_512[64] = { + 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae, + 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94, + 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51, + 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c, + 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd, + 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79 +}; + +void sha256_inc_init(uint8_t *state) { + for (size_t i = 0; i < 32; ++i) { + state[i] = iv_256[i]; + } + for (size_t i = 32; i < 40; ++i) { + state[i] = 0; + } +} + +void sha512_inc_init(uint8_t *state) { + for (size_t i = 0; i < 64; ++i) { + state[i] = iv_512[i]; + } + for (size_t i = 64; i < 72; ++i) { + state[i] = 0; + } +} + +void sha256_inc_blocks(uint8_t *state, const uint8_t *in, size_t inblocks) { + uint64_t bytes = load_bigendian_64(state + 32); + + crypto_hashblocks_sha256(state, in, 64 * inblocks); + bytes += 64 * inblocks; + + store_bigendian_64(state + 32, bytes); +} + +void sha512_inc_blocks(uint8_t *state, const uint8_t *in, size_t inblocks) { + uint64_t bytes = load_bigendian_64(state + 64); + + crypto_hashblocks_sha512(state, in, 128 * inblocks); + bytes += 128 * inblocks; + + store_bigendian_64(state + 64, bytes); +} + +void sha256_inc_finalize(uint8_t *out, uint8_t *state, const uint8_t *in, size_t inlen) { + uint8_t padded[128]; + uint64_t bytes = load_bigendian_64(state + 32) + inlen; + + crypto_hashblocks_sha256(state, in, inlen); + in += inlen; + inlen &= 63; + in -= inlen; + + for (size_t i = 0; i < inlen; ++i) { + padded[i] = in[i]; + } + padded[inlen] = 0x80; + + if (inlen < 56) { + for (size_t i = inlen + 1; i < 56; ++i) { + padded[i] = 0; + } + padded[56] = (uint8_t) (bytes >> 53); + padded[57] = (uint8_t) (bytes >> 45); + padded[58] = (uint8_t) (bytes >> 37); + padded[59] = (uint8_t) (bytes >> 29); + padded[60] = (uint8_t) (bytes >> 21); + padded[61] = (uint8_t) (bytes >> 13); + padded[62] = (uint8_t) (bytes >> 5); + padded[63] = (uint8_t) (bytes << 3); + crypto_hashblocks_sha256(state, padded, 64); + } else { + for (size_t i = inlen + 1; i < 120; ++i) { + padded[i] = 0; + } + padded[120] = (uint8_t) (bytes >> 53); + padded[121] = (uint8_t) (bytes >> 45); + padded[122] = (uint8_t) (bytes >> 37); + padded[123] = (uint8_t) (bytes >> 29); + padded[124] = (uint8_t) (bytes >> 21); + padded[125] = (uint8_t) (bytes >> 13); + padded[126] = (uint8_t) (bytes >> 5); + padded[127] = (uint8_t) (bytes << 3); + crypto_hashblocks_sha256(state, padded, 128); + } + + for (size_t i = 0; i < 32; ++i) { + out[i] = state[i]; + } + +} + +void sha512_inc_finalize(uint8_t *out, uint8_t *state, const uint8_t *in, size_t inlen) { + uint8_t padded[256]; + uint64_t bytes = load_bigendian_64(state + 64) + inlen; + + crypto_hashblocks_sha512(state, in, inlen); + in += inlen; + inlen &= 127; + in -= inlen; + + for (size_t i = 0; i < inlen; ++i) { + padded[i] = in[i]; + } + padded[inlen] = 0x80; + + if (inlen < 112) { + for (size_t i = inlen + 1; i < 119; ++i) { + padded[i] = 0; + } + padded[119] = (uint8_t) (bytes >> 61); + padded[120] = (uint8_t) (bytes >> 53); + padded[121] = (uint8_t) (bytes >> 45); + padded[122] = (uint8_t) (bytes >> 37); + padded[123] = (uint8_t) (bytes >> 29); + padded[124] = (uint8_t) (bytes >> 21); + padded[125] = (uint8_t) (bytes >> 13); + padded[126] = (uint8_t) (bytes >> 5); + padded[127] = (uint8_t) (bytes << 3); + crypto_hashblocks_sha512(state, padded, 128); + } else { + for (size_t i = inlen + 1; i < 247; ++i) { + padded[i] = 0; + } + padded[247] = (uint8_t) (bytes >> 61); + padded[248] = (uint8_t) (bytes >> 53); + padded[249] = (uint8_t) (bytes >> 45); + padded[250] = (uint8_t) (bytes >> 37); + padded[251] = (uint8_t) (bytes >> 29); + padded[252] = (uint8_t) (bytes >> 21); + padded[253] = (uint8_t) (bytes >> 13); + padded[254] = (uint8_t) (bytes >> 5); + padded[255] = (uint8_t) (bytes << 3); + crypto_hashblocks_sha512(state, padded, 256); + } + + for (size_t i = 0; i < 64; ++i) { + out[i] = state[i]; + } +} + +void sha256(uint8_t *out, const uint8_t *in, size_t inlen) { + uint8_t state[40]; + + sha256_inc_init(state); + sha256_inc_finalize(out, state, in, inlen); +} + +void sha512(uint8_t *out, const uint8_t *in, size_t inlen) { + uint8_t state[72]; + + sha512_inc_init(state); + sha512_inc_finalize(out, state, in, inlen); +} + +/** + * mgf1 function based on the SHA-256 hash function + * Note that inlen should be sufficiently small that it still allows for + * an array to be allocated on the stack. Typically 'in' is merely a seed. + * Outputs outlen number of bytes + */ +void mgf1_256(unsigned char *out, unsigned long outlen, + const unsigned char *in, unsigned long inlen) +{ + SPX_VLA(uint8_t, inbuf, inlen+4); + unsigned char outbuf[SPX_SHA256_OUTPUT_BYTES]; + unsigned long i; + + memcpy(inbuf, in, inlen); + + /* While we can fit in at least another full block of SHA256 output.. */ + for (i = 0; (i+1)*SPX_SHA256_OUTPUT_BYTES <= outlen; i++) { + u32_to_bytes(inbuf + inlen, i); + sha256(out, inbuf, inlen + 4); + out += SPX_SHA256_OUTPUT_BYTES; + } + /* Until we cannot anymore, and we fill the remainder. */ + if (outlen > i*SPX_SHA256_OUTPUT_BYTES) { + u32_to_bytes(inbuf + inlen, i); + sha256(outbuf, inbuf, inlen + 4); + memcpy(out, outbuf, outlen - i*SPX_SHA256_OUTPUT_BYTES); + } +} + +/* + * mgf1 function based on the SHA-512 hash function + */ +void mgf1_512(unsigned char *out, unsigned long outlen, + const unsigned char *in, unsigned long inlen) +{ + SPX_VLA(uint8_t, inbuf, inlen+4); + unsigned char outbuf[SPX_SHA512_OUTPUT_BYTES]; + unsigned long i; + + memcpy(inbuf, in, inlen); + + /* While we can fit in at least another full block of SHA512 output.. */ + for (i = 0; (i+1)*SPX_SHA512_OUTPUT_BYTES <= outlen; i++) { + u32_to_bytes(inbuf + inlen, i); + sha512(out, inbuf, inlen + 4); + out += SPX_SHA512_OUTPUT_BYTES; + } + /* Until we cannot anymore, and we fill the remainder. */ + if (outlen > i*SPX_SHA512_OUTPUT_BYTES) { + u32_to_bytes(inbuf + inlen, i); + sha512(outbuf, inbuf, inlen + 4); + memcpy(out, outbuf, outlen - i*SPX_SHA512_OUTPUT_BYTES); + } +} + + +/** + * Absorb the constant pub_seed using one round of the compression function + * This initializes state_seeded and state_seeded_512, which can then be + * reused in thash + **/ +void seed_state(spx_ctx *ctx) { + uint8_t block[SPX_SHA512_BLOCK_BYTES]; + size_t i; + + for (i = 0; i < SPX_N; ++i) { + block[i] = ctx->pub_seed[i]; + } + for (i = SPX_N; i < SPX_SHA512_BLOCK_BYTES; ++i) { + block[i] = 0; + } + /* block has been properly initialized for both SHA-256 and SHA-512 */ + + sha256_inc_init(ctx->state_seeded); + sha256_inc_blocks(ctx->state_seeded, block, 1); +#if SPX_SHA512 + sha512_inc_init(ctx->state_seeded_512); + sha512_inc_blocks(ctx->state_seeded_512, block, 1); +#endif +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/thash_sha2_robust.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/thash_sha2_robust.c new file mode 100644 index 0000000..1918bab --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/thash_sha2_robust.c @@ -0,0 +1,75 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/thash.h" +#include "../../../app/include/params.h" +#include "../../../app/include/utils.h" + +#include "../include/sha2.h" + +#if SPX_SHA512 +static void thash_512(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]); +#endif + +/** + * Takes an array of inblocks concatenated arrays of SPX_N bytes. + */ +void thash(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ +#if SPX_SHA512 + if (inblocks > 1) { + thash_512(out, in, inblocks, ctx, addr); + return; + } +#endif + unsigned char outbuf[SPX_SHA256_OUTPUT_BYTES]; + SPX_VLA(uint8_t, bitmask, inblocks * SPX_N); + SPX_VLA(uint8_t, buf, SPX_N + SPX_SHA256_OUTPUT_BYTES + inblocks*SPX_N); + uint8_t sha2_state[40]; + unsigned int i; + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_SHA256_ADDR_BYTES); + mgf1_256(bitmask, inblocks * SPX_N, buf, SPX_N + SPX_SHA256_ADDR_BYTES); + + /* Retrieve precomputed state containing pub_seed */ + memcpy(sha2_state, ctx->state_seeded, 40 * sizeof(uint8_t)); + + for (i = 0; i < inblocks * SPX_N; i++) { + buf[SPX_N + SPX_SHA256_ADDR_BYTES + i] = in[i] ^ bitmask[i]; + } + + sha256_inc_finalize(outbuf, sha2_state, buf + SPX_N, + SPX_SHA256_ADDR_BYTES + inblocks*SPX_N); + memcpy(out, outbuf, SPX_N); +} + +#if SPX_SHA512 +static void thash_512(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ + unsigned char outbuf[SPX_SHA512_OUTPUT_BYTES]; + SPX_VLA(uint8_t, bitmask, inblocks * SPX_N); + SPX_VLA(uint8_t, buf, SPX_N + SPX_SHA256_ADDR_BYTES + inblocks*SPX_N); + uint8_t sha2_state[72]; + unsigned int i; + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_SHA256_ADDR_BYTES); + mgf1_512(bitmask, inblocks * SPX_N, buf, SPX_N + SPX_SHA256_ADDR_BYTES); + + /* Retrieve precomputed state containing pub_seed */ + memcpy(sha2_state, ctx->state_seeded_512, 72 * sizeof(uint8_t)); + + for (i = 0; i < inblocks * SPX_N; i++) { + buf[SPX_N + SPX_SHA256_ADDR_BYTES + i] = in[i] ^ bitmask[i]; + } + + sha512_inc_finalize(outbuf, sha2_state, buf + SPX_N, + SPX_SHA256_ADDR_BYTES + inblocks*SPX_N); + memcpy(out, outbuf, SPX_N); +} +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/thash_sha2_simple.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/thash_sha2_simple.c new file mode 100644 index 0000000..7c22884 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/sha2/src/thash_sha2_simple.c @@ -0,0 +1,60 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/params.h" +#include "../../../app/include/thash.h" +#include "../../../app/include/utils.h" + +#include "../include/sha2.h" + +#if SPX_SHA512 +static void thash_512(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]); +#endif + +/** + * Takes an array of inblocks concatenated arrays of SPX_N bytes. + */ +void thash(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ +#if SPX_SHA512 + if (inblocks > 1) { + thash_512(out, in, inblocks, ctx, addr); + return; + } +#endif + + unsigned char outbuf[SPX_SHA256_OUTPUT_BYTES]; + uint8_t sha2_state[40]; + SPX_VLA(uint8_t, buf, SPX_SHA256_ADDR_BYTES + inblocks*SPX_N); + + /* Retrieve precomputed state containing pub_seed */ + memcpy(sha2_state, ctx->state_seeded, 40 * sizeof(uint8_t)); + + memcpy(buf, addr, SPX_SHA256_ADDR_BYTES); + memcpy(buf + SPX_SHA256_ADDR_BYTES, in, inblocks * SPX_N); + + sha256_inc_finalize(outbuf, sha2_state, buf, SPX_SHA256_ADDR_BYTES + inblocks*SPX_N); + memcpy(out, outbuf, SPX_N); +} + +#if SPX_SHA512 +static void thash_512(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ + unsigned char outbuf[SPX_SHA512_OUTPUT_BYTES]; + uint8_t sha2_state[72]; + SPX_VLA(uint8_t, buf, SPX_SHA256_ADDR_BYTES + inblocks*SPX_N); + + /* Retrieve precomputed state containing pub_seed */ + memcpy(sha2_state, ctx->state_seeded_512, 72 * sizeof(uint8_t)); + + memcpy(buf, addr, SPX_SHA256_ADDR_BYTES); + memcpy(buf + SPX_SHA256_ADDR_BYTES, in, inblocks * SPX_N); + + sha512_inc_finalize(outbuf, sha2_state, buf, SPX_SHA256_ADDR_BYTES + inblocks*SPX_N); + memcpy(out, outbuf, SPX_N); +} +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/CMakeLists.txt b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/CMakeLists.txt new file mode 100644 index 0000000..1ccfe5b --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/CMakeLists.txt @@ -0,0 +1,11 @@ +set(SHAKE_SOURCES + src/fips202.c + src/hash_shake.c + src/thash_shake_${THASH}.c +) + +set(PARAMS "sphincs-shake-${SECPAR}") +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DPARAMS=${PARAMS}") + +add_library(shake SHARED ${SHAKE_SOURCES}) +target_include_directories(shake PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include) diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/include/fips202.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/include/fips202.h new file mode 100644 index 0000000..e11cb7f --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/include/fips202.h @@ -0,0 +1,47 @@ +#ifndef SPX_FIPS202_H +#define SPX_FIPS202_H + +#include +#include + +#define SHAKE128_RATE 168 +#define SHAKE256_RATE 136 +#define SHA3_256_RATE 136 +#define SHA3_512_RATE 72 + +void shake128_absorb(uint64_t *s, const uint8_t *input, size_t inlen); + +void shake128_squeezeblocks(uint8_t *output, size_t nblocks, uint64_t *s); + +void shake128_inc_init(uint64_t *s_inc); +void shake128_inc_absorb(uint64_t *s_inc, const uint8_t *input, size_t inlen); +void shake128_inc_finalize(uint64_t *s_inc); +void shake128_inc_squeeze(uint8_t *output, size_t outlen, uint64_t *s_inc); + +void shake256_absorb(uint64_t *s, const uint8_t *input, size_t inlen); +void shake256_squeezeblocks(uint8_t *output, size_t nblocks, uint64_t *s); + +void shake256_inc_init(uint64_t *s_inc); +void shake256_inc_absorb(uint64_t *s_inc, const uint8_t *input, size_t inlen); +void shake256_inc_finalize(uint64_t *s_inc); +void shake256_inc_squeeze(uint8_t *output, size_t outlen, uint64_t *s_inc); + +void shake128(uint8_t *output, size_t outlen, + const uint8_t *input, size_t inlen); + +void shake256(uint8_t *output, size_t outlen, + const uint8_t *input, size_t inlen); + +void sha3_256_inc_init(uint64_t *s_inc); +void sha3_256_inc_absorb(uint64_t *s_inc, const uint8_t *input, size_t inlen); +void sha3_256_inc_finalize(uint8_t *output, uint64_t *s_inc); + +void sha3_256(uint8_t *output, const uint8_t *input, size_t inlen); + +void sha3_512_inc_init(uint64_t *s_inc); +void sha3_512_inc_absorb(uint64_t *s_inc, const uint8_t *input, size_t inlen); +void sha3_512_inc_finalize(uint8_t *output, uint64_t *s_inc); + +void sha3_512(uint8_t *output, const uint8_t *input, size_t inlen); + +#endif diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/include/shake_offsets.h b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/include/shake_offsets.h new file mode 100644 index 0000000..0407bdf --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/include/shake_offsets.h @@ -0,0 +1,20 @@ +#if !defined( SHAKE_OFFSETS_H_ ) +#define SHAKE_OFFSETS_H_ + +/* + * Offsets of various fields in the address structure when we use SHAKE as + * the Sphincs+ hash function + */ + +#define SPX_OFFSET_LAYER 3 /* The byte used to specify the Merkle tree layer */ +#define SPX_OFFSET_TREE 8 /* The start of the 8 byte field used to specify the tree */ +#define SPX_OFFSET_TYPE 19 /* The byte used to specify the hash type (reason) */ +#define SPX_OFFSET_KP_ADDR 20 /* The start of the 4 byte field used to specify the key pair address */ +#define SPX_OFFSET_CHAIN_ADDR 27 /* The byte used to specify the chain address (which Winternitz chain) */ +#define SPX_OFFSET_HASH_ADDR 31 /* The byte used to specify the hash address (where in the Winternitz chain) */ +#define SPX_OFFSET_TREE_HGT 27 /* The byte used to specify the height of this node in the FORS or Merkle tree */ +#define SPX_OFFSET_TREE_INDEX 28 /* The start of the 4 byte field used to specify the node in the FORS or Merkle tree */ + +#define SPX_SHAKE 1 + +#endif /* SHAKE_OFFSETS_H_ */ diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/fips202.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/fips202.c new file mode 100644 index 0000000..3e73f09 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/fips202.c @@ -0,0 +1,598 @@ +/* Based on the public domain implementation in + * crypto_hash/keccakc512/simple/ from http://bench.cr.yp.to/supercop.html + * by Ronny Van Keer + * and the public domain "TweetFips202" implementation + * from https://twitter.com/tweetfips202 + * by Gilles Van Assche, Daniel J. Bernstein, and Peter Schwabe */ + +#include +#include + +#include "../include/fips202.h" + +#define NROUNDS 24 +#define ROL(a, offset) (((a) << (offset)) ^ ((a) >> (64 - (offset)))) + +/************************************************* + * Name: load64 + * + * Description: Load 8 bytes into uint64_t in little-endian order + * + * Arguments: - const uint8_t *x: pointer to input byte array + * + * Returns the loaded 64-bit unsigned integer + **************************************************/ +static uint64_t load64(const uint8_t *x) { + uint64_t r = 0; + for (size_t i = 0; i < 8; ++i) { + r |= (uint64_t)x[i] << 8 * i; + } + + return r; +} + +/************************************************* + * Name: store64 + * + * Description: Store a 64-bit integer to a byte array in little-endian order + * + * Arguments: - uint8_t *x: pointer to the output byte array + * - uint64_t u: input 64-bit unsigned integer + **************************************************/ +static void store64(uint8_t *x, uint64_t u) { + for (size_t i = 0; i < 8; ++i) { + x[i] = (uint8_t) (u >> 8 * i); + } +} + +/* Keccak round constants */ +static const uint64_t KeccakF_RoundConstants[NROUNDS] = { + 0x0000000000000001ULL, 0x0000000000008082ULL, + 0x800000000000808aULL, 0x8000000080008000ULL, + 0x000000000000808bULL, 0x0000000080000001ULL, + 0x8000000080008081ULL, 0x8000000000008009ULL, + 0x000000000000008aULL, 0x0000000000000088ULL, + 0x0000000080008009ULL, 0x000000008000000aULL, + 0x000000008000808bULL, 0x800000000000008bULL, + 0x8000000000008089ULL, 0x8000000000008003ULL, + 0x8000000000008002ULL, 0x8000000000000080ULL, + 0x000000000000800aULL, 0x800000008000000aULL, + 0x8000000080008081ULL, 0x8000000000008080ULL, + 0x0000000080000001ULL, 0x8000000080008008ULL +}; + +/************************************************* + * Name: KeccakF1600_StatePermute + * + * Description: The Keccak F1600 Permutation + * + * Arguments: - uint64_t *state: pointer to input/output Keccak state + **************************************************/ +static void KeccakF1600_StatePermute(uint64_t *state) { + int round; + + uint64_t Aba, Abe, Abi, Abo, Abu; + uint64_t Aga, Age, Agi, Ago, Agu; + uint64_t Aka, Ake, Aki, Ako, Aku; + uint64_t Ama, Ame, Ami, Amo, Amu; + uint64_t Asa, Ase, Asi, Aso, Asu; + uint64_t BCa, BCe, BCi, BCo, BCu; + uint64_t Da, De, Di, Do, Du; + uint64_t Eba, Ebe, Ebi, Ebo, Ebu; + uint64_t Ega, Ege, Egi, Ego, Egu; + uint64_t Eka, Eke, Eki, Eko, Eku; + uint64_t Ema, Eme, Emi, Emo, Emu; + uint64_t Esa, Ese, Esi, Eso, Esu; + + // copyFromState(A, state) + Aba = state[0]; + Abe = state[1]; + Abi = state[2]; + Abo = state[3]; + Abu = state[4]; + Aga = state[5]; + Age = state[6]; + Agi = state[7]; + Ago = state[8]; + Agu = state[9]; + Aka = state[10]; + Ake = state[11]; + Aki = state[12]; + Ako = state[13]; + Aku = state[14]; + Ama = state[15]; + Ame = state[16]; + Ami = state[17]; + Amo = state[18]; + Amu = state[19]; + Asa = state[20]; + Ase = state[21]; + Asi = state[22]; + Aso = state[23]; + Asu = state[24]; + + for (round = 0; round < NROUNDS; round += 2) { + // prepareTheta + BCa = Aba ^ Aga ^ Aka ^ Ama ^ Asa; + BCe = Abe ^ Age ^ Ake ^ Ame ^ Ase; + BCi = Abi ^ Agi ^ Aki ^ Ami ^ Asi; + BCo = Abo ^ Ago ^ Ako ^ Amo ^ Aso; + BCu = Abu ^ Agu ^ Aku ^ Amu ^ Asu; + + // thetaRhoPiChiIotaPrepareTheta(round , A, E) + Da = BCu ^ ROL(BCe, 1); + De = BCa ^ ROL(BCi, 1); + Di = BCe ^ ROL(BCo, 1); + Do = BCi ^ ROL(BCu, 1); + Du = BCo ^ ROL(BCa, 1); + + Aba ^= Da; + BCa = Aba; + Age ^= De; + BCe = ROL(Age, 44); + Aki ^= Di; + BCi = ROL(Aki, 43); + Amo ^= Do; + BCo = ROL(Amo, 21); + Asu ^= Du; + BCu = ROL(Asu, 14); + Eba = BCa ^ ((~BCe) & BCi); + Eba ^= KeccakF_RoundConstants[round]; + Ebe = BCe ^ ((~BCi) & BCo); + Ebi = BCi ^ ((~BCo) & BCu); + Ebo = BCo ^ ((~BCu) & BCa); + Ebu = BCu ^ ((~BCa) & BCe); + + Abo ^= Do; + BCa = ROL(Abo, 28); + Agu ^= Du; + BCe = ROL(Agu, 20); + Aka ^= Da; + BCi = ROL(Aka, 3); + Ame ^= De; + BCo = ROL(Ame, 45); + Asi ^= Di; + BCu = ROL(Asi, 61); + Ega = BCa ^ ((~BCe) & BCi); + Ege = BCe ^ ((~BCi) & BCo); + Egi = BCi ^ ((~BCo) & BCu); + Ego = BCo ^ ((~BCu) & BCa); + Egu = BCu ^ ((~BCa) & BCe); + + Abe ^= De; + BCa = ROL(Abe, 1); + Agi ^= Di; + BCe = ROL(Agi, 6); + Ako ^= Do; + BCi = ROL(Ako, 25); + Amu ^= Du; + BCo = ROL(Amu, 8); + Asa ^= Da; + BCu = ROL(Asa, 18); + Eka = BCa ^ ((~BCe) & BCi); + Eke = BCe ^ ((~BCi) & BCo); + Eki = BCi ^ ((~BCo) & BCu); + Eko = BCo ^ ((~BCu) & BCa); + Eku = BCu ^ ((~BCa) & BCe); + + Abu ^= Du; + BCa = ROL(Abu, 27); + Aga ^= Da; + BCe = ROL(Aga, 36); + Ake ^= De; + BCi = ROL(Ake, 10); + Ami ^= Di; + BCo = ROL(Ami, 15); + Aso ^= Do; + BCu = ROL(Aso, 56); + Ema = BCa ^ ((~BCe) & BCi); + Eme = BCe ^ ((~BCi) & BCo); + Emi = BCi ^ ((~BCo) & BCu); + Emo = BCo ^ ((~BCu) & BCa); + Emu = BCu ^ ((~BCa) & BCe); + + Abi ^= Di; + BCa = ROL(Abi, 62); + Ago ^= Do; + BCe = ROL(Ago, 55); + Aku ^= Du; + BCi = ROL(Aku, 39); + Ama ^= Da; + BCo = ROL(Ama, 41); + Ase ^= De; + BCu = ROL(Ase, 2); + Esa = BCa ^ ((~BCe) & BCi); + Ese = BCe ^ ((~BCi) & BCo); + Esi = BCi ^ ((~BCo) & BCu); + Eso = BCo ^ ((~BCu) & BCa); + Esu = BCu ^ ((~BCa) & BCe); + + // prepareTheta + BCa = Eba ^ Ega ^ Eka ^ Ema ^ Esa; + BCe = Ebe ^ Ege ^ Eke ^ Eme ^ Ese; + BCi = Ebi ^ Egi ^ Eki ^ Emi ^ Esi; + BCo = Ebo ^ Ego ^ Eko ^ Emo ^ Eso; + BCu = Ebu ^ Egu ^ Eku ^ Emu ^ Esu; + + // thetaRhoPiChiIotaPrepareTheta(round+1, E, A) + Da = BCu ^ ROL(BCe, 1); + De = BCa ^ ROL(BCi, 1); + Di = BCe ^ ROL(BCo, 1); + Do = BCi ^ ROL(BCu, 1); + Du = BCo ^ ROL(BCa, 1); + + Eba ^= Da; + BCa = Eba; + Ege ^= De; + BCe = ROL(Ege, 44); + Eki ^= Di; + BCi = ROL(Eki, 43); + Emo ^= Do; + BCo = ROL(Emo, 21); + Esu ^= Du; + BCu = ROL(Esu, 14); + Aba = BCa ^ ((~BCe) & BCi); + Aba ^= KeccakF_RoundConstants[round + 1]; + Abe = BCe ^ ((~BCi) & BCo); + Abi = BCi ^ ((~BCo) & BCu); + Abo = BCo ^ ((~BCu) & BCa); + Abu = BCu ^ ((~BCa) & BCe); + + Ebo ^= Do; + BCa = ROL(Ebo, 28); + Egu ^= Du; + BCe = ROL(Egu, 20); + Eka ^= Da; + BCi = ROL(Eka, 3); + Eme ^= De; + BCo = ROL(Eme, 45); + Esi ^= Di; + BCu = ROL(Esi, 61); + Aga = BCa ^ ((~BCe) & BCi); + Age = BCe ^ ((~BCi) & BCo); + Agi = BCi ^ ((~BCo) & BCu); + Ago = BCo ^ ((~BCu) & BCa); + Agu = BCu ^ ((~BCa) & BCe); + + Ebe ^= De; + BCa = ROL(Ebe, 1); + Egi ^= Di; + BCe = ROL(Egi, 6); + Eko ^= Do; + BCi = ROL(Eko, 25); + Emu ^= Du; + BCo = ROL(Emu, 8); + Esa ^= Da; + BCu = ROL(Esa, 18); + Aka = BCa ^ ((~BCe) & BCi); + Ake = BCe ^ ((~BCi) & BCo); + Aki = BCi ^ ((~BCo) & BCu); + Ako = BCo ^ ((~BCu) & BCa); + Aku = BCu ^ ((~BCa) & BCe); + + Ebu ^= Du; + BCa = ROL(Ebu, 27); + Ega ^= Da; + BCe = ROL(Ega, 36); + Eke ^= De; + BCi = ROL(Eke, 10); + Emi ^= Di; + BCo = ROL(Emi, 15); + Eso ^= Do; + BCu = ROL(Eso, 56); + Ama = BCa ^ ((~BCe) & BCi); + Ame = BCe ^ ((~BCi) & BCo); + Ami = BCi ^ ((~BCo) & BCu); + Amo = BCo ^ ((~BCu) & BCa); + Amu = BCu ^ ((~BCa) & BCe); + + Ebi ^= Di; + BCa = ROL(Ebi, 62); + Ego ^= Do; + BCe = ROL(Ego, 55); + Eku ^= Du; + BCi = ROL(Eku, 39); + Ema ^= Da; + BCo = ROL(Ema, 41); + Ese ^= De; + BCu = ROL(Ese, 2); + Asa = BCa ^ ((~BCe) & BCi); + Ase = BCe ^ ((~BCi) & BCo); + Asi = BCi ^ ((~BCo) & BCu); + Aso = BCo ^ ((~BCu) & BCa); + Asu = BCu ^ ((~BCa) & BCe); + } + + // copyToState(state, A) + state[0] = Aba; + state[1] = Abe; + state[2] = Abi; + state[3] = Abo; + state[4] = Abu; + state[5] = Aga; + state[6] = Age; + state[7] = Agi; + state[8] = Ago; + state[9] = Agu; + state[10] = Aka; + state[11] = Ake; + state[12] = Aki; + state[13] = Ako; + state[14] = Aku; + state[15] = Ama; + state[16] = Ame; + state[17] = Ami; + state[18] = Amo; + state[19] = Amu; + state[20] = Asa; + state[21] = Ase; + state[22] = Asi; + state[23] = Aso; + state[24] = Asu; +} + +/************************************************* + * Name: keccak_absorb + * + * Description: Absorb step of Keccak; + * non-incremental, starts by zeroeing the state. + * + * Arguments: - uint64_t *s: pointer to (uninitialized) output Keccak state + * - uint32_t r: rate in bytes (e.g., 168 for SHAKE128) + * - const uint8_t *m: pointer to input to be absorbed into s + * - size_t mlen: length of input in bytes + * - uint8_t p: domain-separation byte for different + * Keccak-derived functions + **************************************************/ +static void keccak_absorb(uint64_t *s, uint32_t r, const uint8_t *m, + size_t mlen, uint8_t p) { + size_t i; + uint8_t t[200]; + + /* Zero state */ + for (i = 0; i < 25; ++i) { + s[i] = 0; + } + + while (mlen >= r) { + for (i = 0; i < r / 8; ++i) { + s[i] ^= load64(m + 8 * i); + } + + KeccakF1600_StatePermute(s); + mlen -= r; + m += r; + } + + for (i = 0; i < r; ++i) { + t[i] = 0; + } + for (i = 0; i < mlen; ++i) { + t[i] = m[i]; + } + t[i] = p; + t[r - 1] |= 128; + for (i = 0; i < r / 8; ++i) { + s[i] ^= load64(t + 8 * i); + } +} + +/************************************************* + * Name: keccak_squeezeblocks + * + * Description: Squeeze step of Keccak. Squeezes full blocks of r bytes each. + * Modifies the state. Can be called multiple times to keep + * squeezing, i.e., is incremental. + * + * Arguments: - uint8_t *h: pointer to output blocks + * - size_t nblocks: number of blocks to be + * squeezed (written to h) + * - uint64_t *s: pointer to input/output Keccak state + * - uint32_t r: rate in bytes (e.g., 168 for SHAKE128) + **************************************************/ +static void keccak_squeezeblocks(uint8_t *h, size_t nblocks, + uint64_t *s, uint32_t r) { + while (nblocks > 0) { + KeccakF1600_StatePermute(s); + for (size_t i = 0; i < (r >> 3); i++) { + store64(h + 8 * i, s[i]); + } + h += r; + nblocks--; + } +} + +/************************************************* + * Name: keccak_inc_init + * + * Description: Initializes the incremental Keccak state to zero. + * + * Arguments: - uint64_t *s_inc: pointer to input/output incremental state + * First 25 values represent Keccak state. + * 26th value represents either the number of absorbed bytes + * that have not been permuted, or not-yet-squeezed bytes. + **************************************************/ +static void keccak_inc_init(uint64_t *s_inc) { + size_t i; + + for (i = 0; i < 25; ++i) { + s_inc[i] = 0; + } + s_inc[25] = 0; +} + +/************************************************* + * Name: keccak_inc_absorb + * + * Description: Incremental keccak absorb + * Preceded by keccak_inc_init, succeeded by keccak_inc_finalize + * + * Arguments: - uint64_t *s_inc: pointer to input/output incremental state + * First 25 values represent Keccak state. + * 26th value represents either the number of absorbed bytes + * that have not been permuted, or not-yet-squeezed bytes. + * - uint32_t r: rate in bytes (e.g., 168 for SHAKE128) + * - const uint8_t *m: pointer to input to be absorbed into s + * - size_t mlen: length of input in bytes + **************************************************/ +static void keccak_inc_absorb(uint64_t *s_inc, uint32_t r, const uint8_t *m, + size_t mlen) { + size_t i; + + /* Recall that s_inc[25] is the non-absorbed bytes xored into the state */ + while (mlen + s_inc[25] >= r) { + for (i = 0; i < r - s_inc[25]; i++) { + /* Take the i'th byte from message + xor with the s_inc[25] + i'th byte of the state; little-endian */ + s_inc[(s_inc[25] + i) >> 3] ^= (uint64_t)m[i] << (8 * ((s_inc[25] + i) & 0x07)); + } + mlen -= (size_t)(r - s_inc[25]); + m += r - s_inc[25]; + s_inc[25] = 0; + + KeccakF1600_StatePermute(s_inc); + } + + for (i = 0; i < mlen; i++) { + s_inc[(s_inc[25] + i) >> 3] ^= (uint64_t)m[i] << (8 * ((s_inc[25] + i) & 0x07)); + } + s_inc[25] += mlen; +} + +/************************************************* + * Name: keccak_inc_finalize + * + * Description: Finalizes Keccak absorb phase, prepares for squeezing + * + * Arguments: - uint64_t *s_inc: pointer to input/output incremental state + * First 25 values represent Keccak state. + * 26th value represents either the number of absorbed bytes + * that have not been permuted, or not-yet-squeezed bytes. + * - uint32_t r: rate in bytes (e.g., 168 for SHAKE128) + * - uint8_t p: domain-separation byte for different + * Keccak-derived functions + **************************************************/ +static void keccak_inc_finalize(uint64_t *s_inc, uint32_t r, uint8_t p) { + /* After keccak_inc_absorb, we are guaranteed that s_inc[25] < r, + so we can always use one more byte for p in the current state. */ + s_inc[s_inc[25] >> 3] ^= (uint64_t)p << (8 * (s_inc[25] & 0x07)); + s_inc[(r - 1) >> 3] ^= (uint64_t)128 << (8 * ((r - 1) & 0x07)); + s_inc[25] = 0; +} + +/************************************************* + * Name: keccak_inc_squeeze + * + * Description: Incremental Keccak squeeze; can be called on byte-level + * + * Arguments: - uint8_t *h: pointer to output bytes + * - size_t outlen: number of bytes to be squeezed + * - uint64_t *s_inc: pointer to input/output incremental state + * First 25 values represent Keccak state. + * 26th value represents either the number of absorbed bytes + * that have not been permuted, or not-yet-squeezed bytes. + * - uint32_t r: rate in bytes (e.g., 168 for SHAKE128) + **************************************************/ +static void keccak_inc_squeeze(uint8_t *h, size_t outlen, + uint64_t *s_inc, uint32_t r) { + size_t i; + + /* First consume any bytes we still have sitting around */ + for (i = 0; i < outlen && i < s_inc[25]; i++) { + /* There are s_inc[25] bytes left, so r - s_inc[25] is the first + available byte. We consume from there, i.e., up to r. */ + h[i] = (uint8_t)(s_inc[(r - s_inc[25] + i) >> 3] >> (8 * ((r - s_inc[25] + i) & 0x07))); + } + h += i; + outlen -= i; + s_inc[25] -= i; + + /* Then squeeze the remaining necessary blocks */ + while (outlen > 0) { + KeccakF1600_StatePermute(s_inc); + + for (i = 0; i < outlen && i < r; i++) { + h[i] = (uint8_t)(s_inc[i >> 3] >> (8 * (i & 0x07))); + } + h += i; + outlen -= i; + s_inc[25] = r - i; + } +} + +void shake256_inc_init(uint64_t *s_inc) { + keccak_inc_init(s_inc); +} + +void shake256_inc_absorb(uint64_t *s_inc, const uint8_t *input, size_t inlen) { + keccak_inc_absorb(s_inc, SHAKE256_RATE, input, inlen); +} + +void shake256_inc_finalize(uint64_t *s_inc) { + keccak_inc_finalize(s_inc, SHAKE256_RATE, 0x1F); +} + +void shake256_inc_squeeze(uint8_t *output, size_t outlen, uint64_t *s_inc) { + keccak_inc_squeeze(output, outlen, s_inc, SHAKE256_RATE); +} + +/************************************************* + * Name: shake256_absorb + * + * Description: Absorb step of the SHAKE256 XOF. + * non-incremental, starts by zeroeing the state. + * + * Arguments: - uint64_t *s: pointer to (uninitialized) output Keccak state + * - const uint8_t *input: pointer to input to be absorbed + * into s + * - size_t inlen: length of input in bytes + **************************************************/ +void shake256_absorb(uint64_t *s, const uint8_t *input, size_t inlen) { + keccak_absorb(s, SHAKE256_RATE, input, inlen, 0x1F); +} + +/************************************************* + * Name: shake256_squeezeblocks + * + * Description: Squeeze step of SHAKE256 XOF. Squeezes full blocks of + * SHAKE256_RATE bytes each. Modifies the state. Can be called + * multiple times to keep squeezing, i.e., is incremental. + * + * Arguments: - uint8_t *output: pointer to output blocks + * - size_t nblocks: number of blocks to be squeezed + * (written to output) + * - uint64_t *s: pointer to input/output Keccak state + **************************************************/ +void shake256_squeezeblocks(uint8_t *output, size_t nblocks, uint64_t *s) { + keccak_squeezeblocks(output, nblocks, s, SHAKE256_RATE); +} + +/************************************************* + * Name: shake256 + * + * Description: SHAKE256 XOF with non-incremental API + * + * Arguments: - uint8_t *output: pointer to output + * - size_t outlen: requested output length in bytes + * - const uint8_t *input: pointer to input + * - size_t inlen: length of input in bytes + **************************************************/ +void shake256(uint8_t *output, size_t outlen, + const uint8_t *input, size_t inlen) { + size_t nblocks = outlen / SHAKE256_RATE; + uint8_t t[SHAKE256_RATE]; + uint64_t s[25]; + + shake256_absorb(s, input, inlen); + shake256_squeezeblocks(output, nblocks, s); + + output += nblocks * SHAKE256_RATE; + outlen -= nblocks * SHAKE256_RATE; + + if (outlen) { + shake256_squeezeblocks(t, 1, s); + for (size_t i = 0; i < outlen; ++i) { + output[i] = t[i]; + } + } +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/hash_shake.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/hash_shake.c new file mode 100644 index 0000000..87e8992 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/hash_shake.c @@ -0,0 +1,98 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/hash.h" +#include "../../../app/include/params.h" +#include "../../../app/include/utils.h" + +#include "../include/fips202.h" + +/* For SHAKE256, there is no immediate reason to initialize at the start, + so this function is an empty operation. */ +void initialize_hash_function(spx_ctx* ctx) +{ + (void)ctx; /* Suppress an 'unused parameter' warning. */ +} + +/* + * Computes PRF(pk_seed, sk_seed, addr) + */ +void prf_addr(unsigned char *out, const spx_ctx *ctx, + const uint32_t addr[8]) +{ + unsigned char buf[2*SPX_N + SPX_ADDR_BYTES]; + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_ADDR_BYTES); + memcpy(buf + SPX_N + SPX_ADDR_BYTES, ctx->sk_seed, SPX_N); + + shake256(out, SPX_N, buf, 2*SPX_N + SPX_ADDR_BYTES); +} + +/** + * Computes the message-dependent randomness R, using a secret seed and an + * optional randomization value as well as the message. + */ +void gen_message_random(unsigned char *R, const unsigned char *sk_prf, + const unsigned char *optrand, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx) +{ + (void)ctx; + uint64_t s_inc[26]; + + shake256_inc_init(s_inc); + shake256_inc_absorb(s_inc, sk_prf, SPX_N); + shake256_inc_absorb(s_inc, optrand, SPX_N); + shake256_inc_absorb(s_inc, m, mlen); + shake256_inc_finalize(s_inc); + shake256_inc_squeeze(R, SPX_N, s_inc); +} + +/** + * Computes the message hash using R, the public key, and the message. + * Outputs the message digest and the index of the leaf. The index is split in + * the tree index and the leaf index, for convenient copying to an address. + */ +void hash_message(unsigned char *digest, uint64_t *tree, uint32_t *leaf_idx, + const unsigned char *R, const unsigned char *pk, + const unsigned char *m, unsigned long long mlen, + const spx_ctx *ctx) +{ + (void)ctx; +#define SPX_TREE_BITS (SPX_TREE_HEIGHT * (SPX_D - 1)) +#define SPX_TREE_BYTES ((SPX_TREE_BITS + 7) / 8) +#define SPX_LEAF_BITS SPX_TREE_HEIGHT +#define SPX_LEAF_BYTES ((SPX_LEAF_BITS + 7) / 8) +#define SPX_DGST_BYTES (SPX_FORS_MSG_BYTES + SPX_TREE_BYTES + SPX_LEAF_BYTES) + + unsigned char buf[SPX_DGST_BYTES]; + unsigned char *bufp = buf; + uint64_t s_inc[26]; + + shake256_inc_init(s_inc); + shake256_inc_absorb(s_inc, R, SPX_N); + shake256_inc_absorb(s_inc, pk, SPX_PK_BYTES); + shake256_inc_absorb(s_inc, m, mlen); + shake256_inc_finalize(s_inc); + shake256_inc_squeeze(buf, SPX_DGST_BYTES, s_inc); + + memcpy(digest, bufp, SPX_FORS_MSG_BYTES); + bufp += SPX_FORS_MSG_BYTES; + +#if SPX_TREE_BITS > 64 + #error For given height and depth, 64 bits cannot represent all subtrees +#endif + + if (SPX_D == 1) { + *tree = 0; + } else { + *tree = bytes_to_ull(bufp, SPX_TREE_BYTES); + *tree &= (~(uint64_t)0) >> (64 - SPX_TREE_BITS); + } + bufp += SPX_TREE_BYTES; + + *leaf_idx = (uint32_t)bytes_to_ull(bufp, SPX_LEAF_BYTES); + *leaf_idx &= (~(uint32_t)0) >> (32 - SPX_LEAF_BITS); +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/thash_shake_robust.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/thash_shake_robust.c new file mode 100644 index 0000000..fb22444 --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/thash_shake_robust.c @@ -0,0 +1,31 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/params.h" +#include "../../../app/include/thash.h" +#include "../../../app/include/utils.h" + +#include "../include/fips202.h" + +/** + * Takes an array of inblocks concatenated arrays of SPX_N bytes. + */ +void thash(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ + SPX_VLA(uint8_t, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); + SPX_VLA(uint8_t, bitmask, inblocks * SPX_N); + unsigned int i; + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_ADDR_BYTES); + + shake256(bitmask, inblocks * SPX_N, buf, SPX_N + SPX_ADDR_BYTES); + + for (i = 0; i < inblocks * SPX_N; i++) { + buf[SPX_N + SPX_ADDR_BYTES + i] = in[i] ^ bitmask[i]; + } + + shake256(out, SPX_N, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); +} diff --git a/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/thash_shake_simple.c b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/thash_shake_simple.c new file mode 100644 index 0000000..71bedab --- /dev/null +++ b/examples/sphincs_PQCgenKAT_sign_blake_128f_simple/test_case/lib/shake/src/thash_shake_simple.c @@ -0,0 +1,24 @@ +#include +#include + +#include "../../../app/include/address.h" +#include "../../../app/include/params.h" +#include "../../../app/include/thash.h" +#include "../../../app/include/utils.h" + +#include "../include/fips202.h" + +/** + * Takes an array of inblocks concatenated arrays of SPX_N bytes. + */ +void thash(unsigned char *out, const unsigned char *in, unsigned int inblocks, + const spx_ctx *ctx, uint32_t addr[8]) +{ + SPX_VLA(uint8_t, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); + + memcpy(buf, ctx->pub_seed, SPX_N); + memcpy(buf + SPX_N, addr, SPX_ADDR_BYTES); + memcpy(buf + SPX_N + SPX_ADDR_BYTES, in, inblocks * SPX_N); + + shake256(out, SPX_N, buf, SPX_N + SPX_ADDR_BYTES + inblocks*SPX_N); +} diff --git a/pyproject.toml b/pyproject.toml index 9ff11e0..e026c2c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,10 @@ requires-python = "~=3.13.0" dependencies = [ "clang==21.1.7", "dspy==3.1.2", + "kiss-agent-framework==0.2.27", "hydra-core==1.3.2", + "networkx==3.6.1", + "tomlkit>=0.14.0", "tree-sitter==0.25.2", "tree-sitter-rust==0.24.0", ] @@ -18,12 +21,12 @@ dependencies = [ dev = [ "basedpyright==1.29.4", "pre-commit==4.2.0", - "pytest==8.4.0", + "pytest==9.0.3", "ruff==0.11.13", ] [build-system] -requires = ["uv_build>=0.10.2,<0.11.0"] +requires = ["uv_build>=0.10.9,<0.11.0"] build-backend = "uv_build" [tool.basedpyright] diff --git a/src/ideas/__init__.py b/src/ideas/__init__.py index cdad296..a3693c7 100644 --- a/src/ideas/__init__.py +++ b/src/ideas/__init__.py @@ -7,7 +7,9 @@ from .ast import create_translation_unit, extract_info_c, TreeResult from .model import ModelConfig, GenerateConfig from .translate_recurrent import RecurrentTranslator -from .translate_symbol import SymbolTranslator +from .translate_snippet import SnippetTranslator +from .wrapper import WrapperGenerator +from .test_symbol import SymbolTester from clang.cindex import Config __all__ = [ @@ -17,7 +19,9 @@ "ModelConfig", "GenerateConfig", "RecurrentTranslator", - "SymbolTranslator", + "SnippetTranslator", + "WrapperGenerator", + "SymbolTester", ] # NOTE: .so is *nix specific diff --git a/src/ideas/agents/printer.py b/src/ideas/agents/printer.py new file mode 100644 index 0000000..e0d7b4f --- /dev/null +++ b/src/ideas/agents/printer.py @@ -0,0 +1,64 @@ +# +# Copyright (C) 2026 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 +# + +import sys +import logging +from typing import TextIO +from rich.text import Text + +from rich.console import Console + +from kiss.core.print_to_console import ConsolePrinter + + +class ConsoleTee: + """ + Duck-typed file-like that writes to both a real stream and a logger. + The real stream gets raw output (preserving ANSI). + The logger only receives complete lines (ANSI-stripped by the handler formatter). + """ + + class StripANSIFormatter(logging.Formatter): + def format(self, record: logging.LogRecord) -> str: + return Text.from_ansi(super().format(record)).plain + + def __init__(self, stream: TextIO, logger: logging.Logger, level: int = logging.INFO): + self.stream = stream + self.logger = logger + self.level = level + self.buf = "" + self.encoding: str = getattr(stream, "encoding", "utf-8") + + def write(self, s: str) -> int: + self.stream.write(s) + self.buf += s + # Write one line at a time + while "\n" in self.buf: + line, self.buf = self.buf.split("\n", 1) + if line: + self.logger.log(self.level, line) + return len(s) + + def flush(self) -> None: + self.stream.flush() + + def close(self) -> None: + if self.buf: + self.logger.log(self.level, self.buf) + self.buf = "" + + def fileno(self) -> int: + return self.stream.fileno() + + def isatty(self) -> bool: + return hasattr(self.stream, "isatty") and self.stream.isatty() + + +class LoggingConsolePrinter(ConsolePrinter): + def __init__(self, logger: logging.Logger, level: int = logging.INFO): + tee = ConsoleTee(sys.__stdout__ or sys.stdout, logger, level) + super().__init__(file=tee) + self._console = Console(highlight=False, file=tee) # type: ignore[reportArgumentType] diff --git a/src/ideas/agents/testgen.py b/src/ideas/agents/testgen.py new file mode 100644 index 0000000..bce81b2 --- /dev/null +++ b/src/ideas/agents/testgen.py @@ -0,0 +1,523 @@ +# +# Copyright (C) 2026 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import sys +import os +import logging +import tempfile +import textwrap +import shutil +from pathlib import Path +from dataclasses import dataclass + +import hydra +from omegaconf import MISSING +from hydra.core.config_store import ConfigStore +from hydra.core.hydra_config import HydraConfig + +from ideas.agents.printer import ConsoleTee, LoggingConsolePrinter +from ideas.tools import run_subprocess + +from kiss.agents.sorcar.useful_tools import UsefulTools +from kiss.core.relentless_agent import RelentlessAgent + +logger = logging.getLogger("ideas.agents.testgen") + + +@dataclass +class TestgenConfig: + model: str = MISSING + c_code: Path = MISSING + project_name: str = MISSING + test_vectors_out: Path = MISSING + test_crate_out: Path = MISSING + + num_vectors: int = 3 + desired_symbols: int = 3 + + def __post_init__(self): + if not self.c_code.exists(): + raise ValueError( + f"c_code must be a directory containing a CMake C project or a single C file, got: {self.c_code}" + ) + + +@dataclass +class TestgenInstructions: + analyze_dir: str = textwrap.dedent( + """ + ## Step 1 – Analyze the C project ## + Carefully read and understand the C project rooted at `{c_proj_path}`. + + Inspect the CMakeLists.txt to learn: + - the project / library name + - all source files and include directories + - any required link libraries (e.g. `-lm`) + + List **all** top-level (exported / non-static) library functions declared in the + public header(s) under `{c_proj_path}/include`. + Write only their function names (no declaration or body) to `{c_proj_path}/functions.lst`, + newline separated. + """ + ) + + analyze_file: str = textwrap.dedent( + """ + ## Step 1 – Analyze the standalone C file ## + Carefully read and understand the single C source file at + `{c_proj_path}/{c_filename}`. + + This is a **standalone** C file (no CMake project, no separate headers). + All declarations and definitions live in this one file. + + List **all** non-static functions defined in the file. + Write only their function names (no declaration or body) to `{c_proj_path}/functions.lst`, + newline separated. + """ + ) + + analyze_select: str = textwrap.dedent( + """ + From that list, select **up to {desired_symbols}** functions that are the best + candidates for black-box testing. + + The selected functions **must** have all their dependencies defined in the project. + If they reference functions that are **only declared**, they **cannot** be tested. + + Only select fewer than {desired_symbols} if there are not + that many functions. Prefer functions that: + - are **high-level entry points** (i.e. they orchestrate significant portions + of the code's logic rather than being small utility helpers) + - accept rich input (structs, arrays, multiple parameters) so that a single + call exercises many internal code-paths + - together give broad coverage of the public API + Write only their function names (no declaration or body) to `{c_proj_path}/selected.lst`, + newline separated. + + For each of the selected functions analyze: which parameters are **input-only**, + which are **output-only** (written by the callee), and which are **in/out** to understand + how to set up its test data and collect its outputs. + """ + ) + + build_rs: str = textwrap.dedent( + """ + ## Step 2 – Create a Rust crate with a `build.rs` that compiles and links the C project ## + Initialize a new Rust **library** crate at `{rs_crate_path}`: + ```bash + cargo init --lib --edition=2024 --vcs none --name= {rs_crate_path} + ``` + + Add the `cc` build dependency and the following dev-dependencies to `Cargo.toml`: + ```toml + [build-dependencies] + cc = "1.2.59" + + [dev-dependencies] + serde = {{ version = "1", features = ["derive"] }} + serde_json = "1" + ``` + + Write a `{rs_crate_path}/build.rs` that: + 1. Uses `cc::Build::new()` with `.compiler("clang")` to compile **all** C source files discovered in Step 1. + 2. Adds the correct include directories so the C headers are found. + 3. Links any extra system libraries the C project requires (e.g. `println!("cargo::rustc-link-lib=m");`). + 4. Passes `-w` (suppress warnings) and `-std=c99`. + """ + ) + + bindgen_dir: str = textwrap.dedent( + """ + ### Obtain the exact FFI API with `bindgen` ### + Before populating the crate sources, use `bindgen` on the shell to generate the + correct Rust FFI declarations for the selected functions from Step 1. + + Run a **separate** `bindgen` invocation for each function and **redirect each + output directly** into its own binding module file: + ```bash + mkdir -p {rs_crate_path}/src/binding + BINDGEN_EXTRA_CLANG_ARGS="-I" bindgen \ + --disable-header-comment --no-doc-comments --no-layout-tests \ + \ + --allowlist-function \ + > {rs_crate_path}/src/binding/.rs + ``` + + Where you must properly identify: + - `` – one or more `-I` arguments pointing to the + C include directories discovered in Step 1. If multiple header directories + are needed, list them all as space-separated `-I` arguments inside + `BINDGEN_EXTRA_CLANG_ARGS`. + - `` – the public header that declares the function. + - `` – the exact C function name (one per invocation). + """ + ) + + bindgen_file: str = textwrap.dedent( + """ + ### Obtain the exact FFI API with `bindgen` ### + Before populating the crate sources, use `bindgen` on the shell to generate the + correct Rust FFI declarations for the selected functions from Step 1. + + Since this is a standalone C file with no separate headers, run `bindgen` + directly on the source file. Run a **separate** invocation for each function + and redirect each output directly into its own binding module file: + ```bash + mkdir -p {rs_crate_path}/src/binding + bindgen \ + --disable-header-comment --no-doc-comments --no-layout-tests \ + {c_proj_path}/{c_filename} \ + --allowlist-function \ + > {rs_crate_path}/src/binding/.rs + ``` + + Where `` is the exact C function name (one per invocation). + """ + ) + + build_rs_librs: str = textwrap.dedent( + """ + ### Critical: crate module layout ### + The crate **must** use a modular layout that keeps each symbol's bindgen output + in its own file. Create the following structure: + + 1. **`{rs_crate_path}/src/lib.rs`** – contains **only**: + ```rust + pub mod binding; + ``` + + 2. **`{rs_crate_path}/src/binding.rs`** – contains one `pub mod ;` + line for **each** selected function. Example (if the selected functions are + `foo` and `bar`): + ```rust + pub mod foo; + pub mod bar; + ``` + + 3. **`{rs_crate_path}/src/binding/.rs`** – each file is the + **exact, unmodified** output of the corresponding `bindgen` invocation from + the previous step (already written there by the shell redirects above). + Do **not** hand-edit these files. + + Build using `cargo build` to confirm the C code compiles and links. + """ + ) + + gen_data_collection_tests: str = textwrap.dedent( + """ + ## Step 3 – Generate a data-collection test harness ## + Create `{rs_crate_path}/tests/test_collect.rs`. + + ### 3a – FFI linkage (critical!) ### + **Do NOT** declare `unsafe extern "C"` blocks in the test file. + Instead, import the FFI functions through the binding modules using + **absolute crate paths**. The crate name is derived from the `name` field in + `Cargo.toml` (with hyphens replaced by underscores). Import like this: + ```rust + use ::binding::::; + ``` + This is **mandatory** because the C static library is attached to the + library crate by `build.rs`. If the test declares its own `extern "C"` + block the linker will NOT find the C symbols and you will get + `undefined symbol` errors. + + ### 3b – `#[repr(C)]` struct mirrors ### + Import the `#[repr(C)]` struct types through the binding modules (they were + generated by `bindgen` and placed in `src/binding/.rs`): + ```rust + use ::binding::::; + ``` + Then add `#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]` to + **local** wrapper types or re-definitions of those structs that you need for + JSON serialization. Because `serde` derives cannot be added to a type imported + from another crate, you may need to define local copies of the structs in the + test file with the exact same layout and field names, adding the serde derives. + Make sure the field names, types, and order match the `bindgen`-generated + definitions exactly. + + ### 3c – Helper: JSON state container ### + Define a `#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]` struct + called `LibState` whose field names match the parameters of the C function + (one field per parameter, using the parameter name from the C header). + **Important – pointer-typed fields**: any field in the C signature that is a + pointer (e.g. `*mut T`, `*const T`) must NOT be hard-coded as a numeric + address. Instead: + - Instantiate the pointed-to data structure as a local `let mut` variable + with values populating all its fields. + - Store the underlying data structure in the `LibState` struct. + - Obtain a raw pointer to it (e.g. `&mut local_var as *mut T`) and pass that pointer + to the function-under-test. + This ensures the pointer is valid for the duration of the call and that the + test does not rely on hard-coded addresses. + If the function returns a value, add a `returns` field. + Nested C structs must be represented by the mirrored Rust struct – never flatten + them. + + Define a wrapper: + ```rust + #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] + struct TestVector {{ + lib_state_in: LibState, + lib_state_out: LibState, + }} + ``` + + ### 3d – Data-collection test functions ### + For each set of representative input values you choose (at least {num_vectors} + distinct sets), write a `#[test]` function named `collect_vector_` that: + 1. Constructs a `LibState` with the chosen inputs (and outputs / return field zeroed). + 2. Clones it into `lib_state_in`. + 3. Calls the C function through `unsafe`, using the symbol imported from the + binding module, passing (and receiving) mutable references where needed. + 4. Captures the post-call state into `lib_state_out`. + 5. Asserts the call did not obviously fail (e.g. no null-pointer dereference – a + simple `assert!` that pointers are non-null or that expected invariants hold). + 6. Serializes a `TestVector {{ lib_state_in, lib_state_out }}` to pretty JSON and + prints it to stdout with: + ```rust + println!("{{}}", serde_json::to_string_pretty(&vector).unwrap()); + ``` + + The chosen inputs should exercise a variety of code-paths in the C function: + - a zeroed / default / neutral input + - a "normal" input with representative non-trivial values + - an edge-case or boundary input + + Build and run tests in the crate with: + ```bash + cargo test --manifest-path {rs_crate_path}/Cargo.toml --quiet -- --nocapture + ``` + Verify that all tests pass and JSON is printed. + """ + ) + + write_test_vectors: str = textwrap.dedent( + """ + ## Step 4 – Save test vectors as JSON files ## + Create the directory `{test_vectors_path}`. + + Run each data-collection test **individually** and capture its stdout. + Write the JSON output of each `collect_vector_` test to + `{test_vectors_path}/.json`, where `` is the 1-based index. + + Use `uv` to extract the JSON reliably – do NOT rely on grep/sed: + ```bash + cargo test collect_vector_ -- --nocapture 2>/dev/null | \ + uv run python -c " + import sys, json + buf = sys.stdin.read() + start = buf.index('{{') + end = buf.rindex('}}') + 1 + obj = json.loads(buf[start:end]) + print(json.dumps(obj, indent=2)) + " > {test_vectors_path}/.json + ``` + + Verify each file is valid JSON with the expected `lib_state_in` / `lib_state_out` + structure by running `uv run python -m json.tool {test_vectors_path}/.json`. + """ + ) + + write_assert_tests: str = textwrap.dedent( + """ + ## Step 5 – Write assert-style Rust tests ## + Create `{rs_crate_path}/tests/test_assert.rs`. + + Import the FFI symbols through the binding modules, the same way + `test_collect.rs` does: + ```rust + use ::binding::::; + ``` + If the function call requires `#[repr(C)]` structs, import them too: + ```rust + use ::binding::::; + ``` + + **Important**: `test_assert.rs` must **not** depend on `serde` or `serde_json`. + Because the crate's types are exact `bindgen` output (no serde derives), the + assert tests reconstruct all values as **plain Rust literals** taken from the + JSON files saved in Step 4. Do **not** `#[derive(Serialize, Deserialize)]` on + any type in this file and do **not** add `use serde*` or `use serde_json*`. + + For **each** JSON test vector saved in Step 4, write a `#[test]` function named + `test_vector_` that: + 1. Reconstructs the `lib_state_in` values from the JSON file as Rust literals. + 2. Calls the C function through `unsafe` using the imported symbol. + 3. Asserts **every** field of the output state matches `lib_state_out` from the + JSON file. + - For floating-point fields use an epsilon comparison: + ```rust + assert!((actual - expected).abs() < 1e-4, + "field ``: expected {{expected}}, got {{actual}}"); + ``` + - For integer / bool fields use `assert_eq!`. + - For pointer-typed output fields, dereference the pointer (inside `unsafe`) + and compare the pointed-to value rather than the pointer address itself. + + Once done, run: + ```bash + cargo test --manifest-path {rs_crate_path}/Cargo.toml --quiet --test test_assert + ``` + All tests **must** pass. + """ + ) + + deny_dependencies: str = textwrap.dedent( + """ + ## External dependencies ## + Apart from `cc` (build-dependency), `serde`, and `serde_json` (dev-dependencies), + do not add any other dependencies to the Cargo.toml file. + """ + ) + + simple_exit: str = textwrap.dedent( + """ + Once all assert tests pass and JSON files are written, finish the task and exit. + Do not over-verify or generate extensive reports. + """ + ) + + @classmethod + def dir_task_description(cls) -> str: + return ( + cls.analyze_dir + + cls.analyze_select + + cls.build_rs + + cls.bindgen_dir + + cls.build_rs_librs + + cls.gen_data_collection_tests + + cls.write_test_vectors + + cls.write_assert_tests + + cls.deny_dependencies + + cls.simple_exit + ) + + @classmethod + def file_task_description(cls) -> str: + return ( + cls.analyze_file + + cls.analyze_select + + cls.build_rs + + cls.bindgen_file + + cls.build_rs_librs + + cls.gen_data_collection_tests + + cls.write_test_vectors + + cls.write_assert_tests + + cls.deny_dependencies + + cls.simple_exit + ) + + +cs = ConfigStore.instance() +cs.store(name="testgen", node=TestgenConfig) + + +def get_tools(): + useful_tools = UsefulTools() + return [useful_tools.Bash, useful_tools.Read, useful_tools.Edit, useful_tools.Write] + + +@hydra.main(version_base=None, config_name="testgen") +def main(cfg: TestgenConfig) -> None: + try: + _main(cfg) + except Exception as e: + logger.exception(e) + sys.exit(1) + + +def _main(cfg: TestgenConfig) -> None: + output_dir = Path(HydraConfig.get().runtime.output_dir) + logger.info(f"Saving results to {output_dir}") + + # Separately log the complete trajectory + logger_trajectory = logging.getLogger("ideas.testgen.trajectory") + logger_trajectory.propagate = False + fh = logging.FileHandler(output_dir / "testgen_trajectory.log") + fh.setFormatter(ConsoleTee.StripANSIFormatter("%(asctime)s %(message)s")) + logger_trajectory.addHandler(fh) + # Simultaneous print and log to file + printer = LoggingConsolePrinter(logger=logger_trajectory) + + name = "C library test vector generator" + agent = RelentlessAgent(name=name) + + project_name = cfg.project_name + work_dir = Path(tempfile.mkdtemp()) / project_name + os.makedirs(work_dir) + + # Copy the C project into the working directory + c_proj_path = work_dir / "test_case" + is_single_file = Path(cfg.c_code).is_file() + if is_single_file: + # Coherent /tmp and on-disk paths + c_proj_path = work_dir / cfg.c_code.parent + os.makedirs(c_proj_path) + shutil.copy(cfg.c_code, c_proj_path / cfg.c_code.name) + else: + shutil.copytree(cfg.c_code, c_proj_path, dirs_exist_ok=True) + + # Paths the agent will populate + rs_crate_path = work_dir / (cfg.test_crate_out if is_single_file else "testgen_crate") + test_vectors_path = work_dir / "test_vectors" + + # Build the task prompt + task_description = ( + TestgenInstructions.file_task_description() + if is_single_file + else TestgenInstructions.dir_task_description() + ) + arguments = { + "c_proj_path": c_proj_path.relative_to(work_dir), + "rs_crate_path": rs_crate_path.relative_to(work_dir), + "test_vectors_path": test_vectors_path.relative_to(work_dir), + "num_vectors": cfg.num_vectors, + "desired_symbols": cfg.desired_symbols, + } + if is_single_file: + arguments["c_filename"] = cfg.c_code.name + task_description = task_description.format(**arguments) + + # Run agent in the work directory + original_dir = os.getcwd() + os.chdir(work_dir) + + agent.run( + model_name=cfg.model, + system_instructions="", + prompt_template=task_description, + max_steps=100, + max_budget=4, + max_sub_sessions=1, + work_dir=str(work_dir), + tools=get_tools(), + printer=printer, + verbose=True, + ) + # Verify that assertion tests pass + cargo_toml = work_dir / cfg.test_crate_out / "Cargo.toml" + ok, output, error, returncode = run_subprocess( + ["cargo", "test", "--manifest-path", str(cargo_toml), "--test", "test_assert"], + timeout=60, + ) + if not ok: + raise RuntimeError( + f"Assert tests failed for target {project_name}: {error}! Tests will not be used during hybrid build!" + ) + os.chdir(original_dir) + + # Copy test vectors + shutil.copytree(test_vectors_path, cfg.test_vectors_out, dirs_exist_ok=True) + # Copy test crate + shutil.copytree(rs_crate_path, cfg.test_crate_out, dirs_exist_ok=True) + # Copy C analysis results + shutil.copy(c_proj_path / "functions.lst", cfg.test_crate_out / "functions.lst") + shutil.copy(c_proj_path / "selected.lst", cfg.test_crate_out / "selected.lst") + + +if __name__ == "__main__": + main() diff --git a/src/ideas/ast.py b/src/ideas/ast.py index e1e82e5..5efa4f4 100644 --- a/src/ideas/ast.py +++ b/src/ideas/ast.py @@ -4,15 +4,22 @@ # SPDX-License-Identifier: Apache-2.0 # +import logging +from pathlib import Path from collections import defaultdict from collections.abc import Iterable +from functools import cached_property from dataclasses import dataclass, field -from clang.cindex import TranslationUnit, Cursor, CursorKind, SourceRange +from clang.cindex import TranslationUnit, TranslationUnitLoadError, Diagnostic +from clang.cindex import Cursor, CursorKind, SourceRange, TokenKind from clang.cindex import PrintingPolicy, PrintingPolicyProperty, LinkageKind -from clang.cindex import conf +from clang.cindex import conf, SourceLocation from ctypes import pointer, c_size_t, c_char_p +from .tools import run_subprocess + +logger = logging.getLogger("ideas.ast") FILENAME = "file.c" @@ -21,15 +28,78 @@ class Symbol: name: str cursor: Cursor parent: Cursor | None = None + decl: Cursor | None = None + + @property + def spelling(self) -> str: + return self.cursor.spelling @property def kind(self) -> CursorKind: return self.cursor.kind @property - def code(self): + def declaration(self) -> str | None: + return get_cursor_code(self.decl) if self.decl else None + + @property + def code(self) -> str: return get_cursor_code(self.parent or self.cursor, pretty_print=True) + @property + def is_definition(self) -> bool: + return self.cursor.is_definition() + + @property + def is_variable(self) -> bool: + return self.cursor.kind == CursorKind.VAR_DECL + + @property + def is_function(self) -> bool: + return self.cursor.kind == CursorKind.FUNCTION_DECL + + @property + def is_global(self) -> bool: + return self.cursor.linkage == LinkageKind.EXTERNAL + + @property + def is_system(self) -> bool: + return self.cursor.location.is_in_system_header + + @cached_property + def static_translation(self) -> str: + # FIXME: Handle VAR_DECL via c2rust? + # Ignore non-containers + if self.kind not in ( + CursorKind.STRUCT_DECL, + CursorKind.UNION_DECL, + CursorKind.ENUM_DECL, + CursorKind.TYPEDEF_DECL, + ): + return "" + + # Ignore anonymous containers + symbol_name = (self.parent or self.cursor).spelling + if not symbol_name: + return "" + + # Generate translation of container + bindgen = [ + "bindgen", + "--disable-header-comment", + "--no-doc-comments", + "--no-layout-tests", + "--no-recursive-allowlist", + "--allowlist-item", + symbol_name, + self.cursor.translation_unit.spelling, + ] + ok, output, _, _ = run_subprocess(bindgen) + return output if ok else "" + + def with_declaration(self, decl: Cursor) -> "Symbol": + return Symbol(self.name, self.cursor, self.parent, decl=decl) + @dataclass class TreeResult: @@ -39,9 +109,14 @@ class TreeResult: ) -def create_translation_unit(code: str) -> TranslationUnit: +def create_translation_unit(path_or_code: Path | str) -> TranslationUnit: # Parse the code using clang - tu = TranslationUnit.from_source(FILENAME, unsaved_files=[(FILENAME, code)]) + if isinstance(path_or_code, str): + tu = TranslationUnit.from_source(FILENAME, unsaved_files=[(FILENAME, path_or_code)]) + else: + tu = TranslationUnit.from_source(str(path_or_code.resolve())) + if any(d.severity >= Diagnostic.Error for d in tu.diagnostics): + raise TranslationUnitLoadError("\n".join([d.format() for d in tu.diagnostics])) return tu @@ -57,8 +132,8 @@ def extract_info_c(tu: TranslationUnit) -> TreeResult: return TreeResult(symbols=symbols, complete_graph=graph) -def extract_symbol_info_c(node: Cursor) -> dict[str, Symbol]: - symbols = {} +def extract_symbol_info_c(node: Cursor, parent: Cursor | None = None) -> dict[str, Symbol]: + symbols: dict[str, Symbol] = {} # If enter new scope then exit early if node.kind == CursorKind.COMPOUND_STMT: @@ -66,6 +141,7 @@ def extract_symbol_info_c(node: Cursor) -> dict[str, Symbol]: # Add declarative nodes to symbols usr = node.get_usr() + # FIXME: Use node.kind.is_declaration()? if node.kind in ( CursorKind.STRUCT_DECL, CursorKind.UNION_DECL, @@ -75,17 +151,28 @@ def extract_symbol_info_c(node: Cursor) -> dict[str, Symbol]: CursorKind.VAR_DECL, CursorKind.TYPEDEF_DECL, ): - symbols[usr] = Symbol(usr, node) + symbols[usr] = Symbol(usr, node, parent=parent) - # Recurse through children and merge any definitional symbol or unseen symbol + # Recurse through children and merge them into symbols for child_node in node.get_children(): - child_symbols = extract_symbol_info_c(child_node) + parent = node if parent is None and node.kind != CursorKind.TRANSLATION_UNIT else parent + child_symbols = extract_symbol_info_c(child_node, parent=parent) for child_name, child_symbol in child_symbols.items(): - # Set child's parent - if node.kind != CursorKind.TRANSLATION_UNIT: - child_symbol = Symbol(child_symbol.name, child_symbol.cursor, parent=node) - if child_name not in symbols or child_symbol.cursor.is_definition(): + if child_name not in symbols: + # Found a new symbol + symbols[child_name] = child_symbol + elif symbols[child_name].is_definition and child_symbol.is_definition: + # Always keep current definition symbols[child_name] = child_symbol + elif not symbols[child_name].is_definition and child_symbol.is_definition: + # Previous symbol was a declaration so replace it with new definitional symbol + symbols[child_name] = child_symbol.with_declaration(symbols[child_name].cursor) + elif symbols[child_name].is_definition and not child_symbol.is_definition: + if not symbols[child_name].is_system or not child_symbol.is_system: + logger.warning(f"Ignoring declaration after definition of `{child_name}`") + elif not symbols[child_name].is_definition and not child_symbol.is_definition: + if not symbols[child_name].is_system or not child_symbol.is_system: + logger.warning(f"Ignoring re-declaration of `{child_name}`") return symbols @@ -155,16 +242,182 @@ def get_cursor_code(cursor: Cursor, pretty_print: bool = False) -> str: return code -def get_internally_linked_cursors(cursor: Cursor, filter_system: bool = True) -> list[Cursor]: - statics: dict[str, Cursor] = {} - for node in cursor.walk_preorder(): - if node.linkage == LinkageKind.INTERNAL: - statics[node.get_usr()] = node - elif node.referenced is not None and node.referenced.linkage == LinkageKind.INTERNAL: - statics[node.referenced.get_usr()] = node.referenced +def clang_rename_( + tu: TranslationUnit, renames: dict[str, str], sources: dict[Path, bytes] | None = None +): + logger.info( + f"Renaming {len(renames)} symbols in {tu.spelling}: {', '.join(renames.keys())}" + ) + # Group edits by file path and source offsets because cursor traversal may revisit tokens. + edits_by_file: dict[Path, dict[tuple[int, int], bytes]] = {} + assert tu.cursor is not None + for cursor in tu.cursor.walk_preorder(): + target_usr = cursor.get_usr() + target_spelling = cursor.spelling + + # If the cursor itself is not a symbol we want to rename, check if it's a reference to one. + if target_usr not in renames: + referenced = cursor.referenced + if referenced is None: + continue + target_usr = referenced.get_usr() + target_spelling = referenced.spelling + if target_usr not in renames: + continue + if not target_spelling: + continue + + # Record edits for all tokens that match the symbol's spelling and are not in system headers + for token in _get_tokens(cursor): + if token.spelling != target_spelling or token.location.is_in_system_header: + continue + file_path = Path(token.location.file.name).resolve() + extent = (token.extent.start.offset, token.extent.end.offset) + edits_by_file.setdefault(file_path, {})[extent] = renames[target_usr].encode() + + # Apply edits for each file and optionally save the pre-edit source snapshot. + for file_path, edits in edits_by_file.items(): + if sources is not None and file_path not in sources: + sources[file_path] = file_path.read_bytes() + _apply_edits(file_path, edits) + + +DEFINITION_START_TOKEN = {CursorKind.FUNCTION_DECL: "{", CursorKind.VAR_DECL: "="} + + +def clang_make_global_(path: Path, spelling: str): + tu = create_translation_unit(path) + cursor = _find_cursor(tu, spelling) + if cursor.kind not in DEFINITION_START_TOKEN: + raise ValueError(f"Unhandled cursor kind {cursor.kind}!") + + tokens = list(_get_tokens(cursor)) + assert len(tokens) > 0 + + edits: dict[tuple[int, int], bytes] = {} + + for i, token in enumerate(tokens): + # Remove storage specifiers from declaration while preserving offsets + if token.kind == TokenKind.KEYWORD and token.spelling in ("static", "inline"): + assert i + 1 < len(tokens), "storage specifier should always come before name" + start_offset = token.extent.start.offset + # Use start of next token as end offset to remove any whitespace + end_offset = tokens[i + 1].extent.start.offset + edits[(start_offset, end_offset)] = b"" + + # Don't change anything after definition start + elif ( + token.kind == TokenKind.PUNCTUATION + and token.spelling == DEFINITION_START_TOKEN[cursor.kind] + ): + break + + if edits: + _apply_edits(path, edits) + + +def clang_make_extern_(path: Path, spelling: str): + tu = create_translation_unit(path) + cursor = _find_cursor(tu, spelling) + # Determine punctuation token to find based on cursor kind (function or variable) + if cursor.kind not in DEFINITION_START_TOKEN: + raise ValueError(f"Unhandled cursor kind {cursor.kind}!") + + tokens = list(_get_tokens(cursor)) + assert len(tokens) > 0 + + edits: dict[tuple[int, int], bytes] = {} + is_extern = False + definition_start_token_idx = None + + for i, token in enumerate(tokens): + # Remove storage specifiers from declaration while preserving offsets + if token.kind == TokenKind.KEYWORD and token.spelling in ("static", "inline"): + assert i + 1 < len(tokens), "storage specifier should always come before name" + start_offset = token.extent.start.offset + # Use start of next token as end offset to remove any whitespace + end_offset = tokens[i + 1].extent.start.offset + edits[(start_offset, end_offset)] = b"" + + # Check if extern keyword already present + elif token.kind == TokenKind.KEYWORD and token.spelling == "extern": + is_extern = True + + # Record the first definition-opening token. + elif ( + definition_start_token_idx is None + and token.kind == TokenKind.PUNCTUATION + and token.spelling == DEFINITION_START_TOKEN[cursor.kind] + ): + definition_start_token_idx = i + break + + # Replace definition portion with ';' + if definition_start_token_idx is not None: + assert definition_start_token_idx > 0 + # Use end of prior token as end offset to remove any whitespace + start_pos = tokens[definition_start_token_idx - 1].extent.end.offset + end_pos = cursor.extent.end.offset + edits[(start_pos, end_pos)] = b";" + + # Add 'extern ' prefix if not already present + if not is_extern: + extern_insert_pos = cursor.extent.start.offset + edits[(extern_insert_pos, extern_insert_pos)] = b"extern " + + if edits: + _apply_edits(path, edits) + + +def _get_tokens(cursor: Cursor): + # Use get_tokens if it actually returns a non-empty list + tokens = list(cursor.get_tokens()) + if len(tokens) > 0: + yield from tokens + return + + # Ideally we would use cursor.get_tokens() but does not work with macros: + # https://github.com/llvm/llvm-project/issues/43451 + tu = cursor.translation_unit + + start = cursor.extent.start + start = SourceLocation.from_position(tu, start.file, start.line, start.column) + + end = cursor.extent.end + end = SourceLocation.from_position(tu, end.file, end.line, end.column) + + extent = SourceRange.from_locations(start, end) + + yield from tu.get_tokens(extent=extent) + + +def _find_cursor(tu: TranslationUnit, spelling: str) -> Cursor: + definition: Cursor | None = None + declaration: Cursor | None = None + + assert tu.cursor is not None + for cursor in tu.cursor.walk_preorder(): + if cursor.kind not in (CursorKind.FUNCTION_DECL, CursorKind.VAR_DECL): + continue + if cursor.spelling != spelling: + continue + if cursor.is_definition(): + definition = cursor + break + if declaration is None: + declaration = cursor + + target = definition or declaration + if target is None: + raise ValueError(f"Unable to find function or variable with spelling `{spelling}`") + return target + + +def _apply_edits(path: Path, edits: dict[tuple[int, int], bytes]): + source = path.read_bytes() + + # Apply edits in reverse offset order to preserve validity of remaining offsets + for (start, end), replacement in sorted(edits.items(), key=lambda e: e[0][0], reverse=True): + source = source[:start] + replacement + source[end:] - # FIXME: Use set when Cursors are hashable - list_of_statics = list(statics.values()) - if filter_system: - list_of_statics = [c for c in list_of_statics if not c.location.is_in_system_header] - return list_of_statics + path.write_bytes(source) diff --git a/src/ideas/ast_rust.py b/src/ideas/ast_rust.py index bbfc91f..6e25457 100644 --- a/src/ideas/ast_rust.py +++ b/src/ideas/ast_rust.py @@ -48,8 +48,10 @@ def __eq__(self, other: object) -> bool: return self.__repr__() == other.__repr__() -def get_root(code: str) -> Node: - tree = RUST_PARSER.parse(code.encode()) +def get_root(code: str | bytes) -> Node: + if isinstance(code, str): + code = code.encode() + tree = RUST_PARSER.parse(code) return tree.root_node diff --git a/src/ideas/cmake.py b/src/ideas/cmake.py index bb0193c..39fac58 100644 --- a/src/ideas/cmake.py +++ b/src/ideas/cmake.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: Apache-2.0 # +import sys import os import logging import shutil @@ -51,9 +52,9 @@ def configure( else: cmd = ["cmake", "-S", ".", "--preset", preset] + flags - success, output = run_subprocess(cmd) + success, output, error, _ = run_subprocess(cmd) if not success: - raise RuntimeError(f"CMake configuration failed:{' '.join(cmd)}\n{output}") + raise RuntimeError(f"CMake configuration failed:{' '.join(cmd)}\n{output + error}") def build(build_dir: Path, preset: str | None = None) -> None: @@ -63,53 +64,14 @@ def build(build_dir: Path, preset: str | None = None) -> None: cmd = ["cmake", "--build", str(build_dir), "--target", "all", "--preset", preset] build_log_path = build_dir / "build.log" - success, output = run_subprocess(cmd) + success, output, error, _ = run_subprocess(cmd) if not success: with open(build_log_path, "w") as log_file: - log_file.write(output) - raise RuntimeError(f"CMake build failed: {' '.join(cmd)}\n{output}") + log_file.write(output + error) + raise RuntimeError(f"CMake build failed: {' '.join(cmd)}\n{output + error}") -def extract_symbols(build_dir: Path) -> None: - # Find *.type files generated by extract_info.cmake - cmd = ["find", str(build_dir), "-maxdepth", "1", "-name", "*.type"] - success, output = run_subprocess(cmd) - if not success: - raise RuntimeError(f"Finding executables failed: {' '.join(cmd)}\n{output}") - - executables = output.strip().split("\n") - for exe in executables: - if not exe: - raise RuntimeError(f"Found an empty line in the list of executables {executables}!") - - # Remove .type from suffix to get actual executable - exe = exe[: -len(".type")] - - # Extract symbols using nm and awk - cmd = ["nm", "--extern-only", exe] - success, output = run_subprocess(cmd) - if not success: - raise RuntimeError( - f"Extracting symbols from {exe} failed: {' '.join(cmd)}\n{output}" - ) - - # Filter for text symbols (T) and exclude symbols starting with _ - symbols = [] - for line in output.strip().split("\n"): - parts = line.split() - if len(parts) >= 2 and parts[1] == "T": - symbol = parts[-1] - if not symbol.startswith("_"): - symbols.append(symbol) - - # Write symbols to file - symbol_file = f"{exe}.symbols" - with open(symbol_file, "w") as f: - f.write("\n".join(symbols) + "\n") - - -@hydra.main(version_base=None, config_name="cmake") -def main(cfg: CmakeConfig) -> None: +def _main(cfg: CmakeConfig) -> None: # Determine Cmake preset preset = "test" if os.path.exists("CMakePresets.json") else None @@ -123,13 +85,15 @@ def main(cfg: CmakeConfig) -> None: # Build with Cmake build(cfg.build_dir, preset) - # Extract per-target symbols - extract_symbols(build_dir=cfg.build_dir) - -if __name__ == "__main__": +@hydra.main(version_base=None, config_name="cmake") +def main(cfg: CmakeConfig) -> None: try: - main() + _main(cfg) except Exception as e: - logger.error(e) - raise e + logger.exception(e) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/ideas/convert_tests.py b/src/ideas/convert_tests.py index e0c038f..18fc367 100644 --- a/src/ideas/convert_tests.py +++ b/src/ideas/convert_tests.py @@ -5,6 +5,7 @@ # +import sys import json import logging from dataclasses import dataclass @@ -25,8 +26,8 @@ class ConvertConfig: test_vectors: list[Path] = MISSING output: Path = MISSING - crate_manifest: Path = MISSING timeout: int = 600000 + vcs: str = "none" # Library-specific inputs runner_manifest: Path | None = None @@ -43,7 +44,7 @@ def rustfmt(path: Path) -> None: def to_rust_str(string): - return '"' + repr(string)[1:-1] + '"' + return '"' + repr(string)[1:-1].replace('"', '\\"') + '"' def is_bin_test(test_case: Path): @@ -51,15 +52,18 @@ def is_bin_test(test_case: Path): return "lib_state_in" not in test_case_json and "lib_state_out" not in test_case_json -def convert_tests_for_exec(test_cases: list[Path], crate: Crate, timeout: int = 60000) -> str: - test_cases = list(filter(is_bin_test, test_cases)) - if len(test_cases) == 0: - return "" - +def add_deps_for_exec(crate: Crate) -> None: # Add test dependencies crate.cargo_add(dep="assert_cmd@2.0.17", section="dev") crate.cargo_add(dep="ntest@0.9.3", section="dev") crate.cargo_add(dep="predicates@3.1.3", section="dev") + crate.invalidate_metadata() + + +def convert_tests_for_exec(test_cases: list[Path], timeout: int = 60000) -> str: + test_cases = list(filter(is_bin_test, test_cases)) + if len(test_cases) == 0: + return "" output = "" output += "use assert_cmd::Command;\n" @@ -139,24 +143,27 @@ def is_lib_test(test_case: Path): return "lib_state_in" in test_case_json and "lib_state_out" in test_case_json +def add_deps_for_lib(crate: Crate) -> None: + # Add test dependencies + crate.cargo_add(dep="ntest@0.9.3", section="dev") + crate.cargo_add(dep="once_cell@1.21.3", section="dev") + crate.cargo_add(dep="test-cdylib@1.1.0", section="dev") + crate.invalidate_metadata() + + def convert_tests_for_lib( test_cases: list[Path], - crate: Crate, runner_manifest: Path | None, template_path: Path | None, timeout: int = 60000, ) -> str: - if template_path is None: - return "" - test_cases = list(filter(is_lib_test, test_cases)) if len(test_cases) == 0: return "" - # Add test dependencies - crate.cargo_add(dep="ntest@0.9.3", section="dev") - crate.cargo_add(dep="once_cell@1.21.3", section="dev") - crate.cargo_add(dep="test-cdylib@1.1.0", section="dev") + # Library tests need a template + if template_path is None: + raise ValueError("Template path must be specified for library tests!") # Load template template = template_path.read_text() @@ -179,28 +186,47 @@ def convert_tests_for_lib( return template -@hydra.main(version_base=None, config_name="convert_tests") -def main(cfg: ConvertConfig) -> None: +def _main(cfg: ConvertConfig) -> None: output_dir = Path(HydraConfig.get().runtime.output_dir) logger.info(f"Saving results to {output_dir}") - test_vectors = [Path(path) for path in cfg.test_vectors] - crate = Crate(cargo_toml=cfg.crate_manifest) + runner_manifest = cfg.runner_manifest.absolute() if cfg.runner_manifest else None + test_vectors = [Path(path).absolute() for path in cfg.test_vectors] + cargo_toml = output_dir / "Cargo.toml" + output_file = output_dir / cfg.output - exec_tests = convert_tests_for_exec(test_vectors, crate, cfg.timeout) - lib_tests = convert_tests_for_lib( - test_vectors, crate, cfg.runner_manifest, cfg.template, cfg.timeout - ) + exec_tests = convert_tests_for_exec(test_vectors, cfg.timeout) + lib_tests = convert_tests_for_lib(test_vectors, runner_manifest, cfg.template, cfg.timeout) # Write and format tests - cfg.output.parent.mkdir(exist_ok=True, parents=True) - cfg.output.write_text(exec_tests + "\n" + lib_tests) - rustfmt(cfg.output) - - # Update VCS - crate.add(cfg.crate_manifest) - crate.add(cfg.output) + output_file.parent.mkdir(exist_ok=True) + output_file.write_text(exec_tests + "\n" + lib_tests) + rustfmt(output_file) + + # Add crate dependencies + crate = Crate(cargo_toml=cargo_toml, vcs=cfg.vcs) # type: ignore[reportArgumentType] + if exec_tests: + add_deps_for_exec(crate) + if lib_tests: + add_deps_for_lib(crate) + + # Update crate in VCS + crate.vcs.add(cargo_toml) + crate.vcs.add(output_file) + if (output_subdir := HydraConfig.get().output_subdir) is not None: + crate.vcs.add(output_dir / output_subdir) crate.invalidate_metadata() - crate.commit("Converted JSON test vectors to Rust tests") + msg = "Converted JSON test vectors to Rust tests" + logger.info(msg) + crate.vcs.commit(msg) + + +@hydra.main(version_base=None, config_name="convert_tests") +def main(cfg: ConvertConfig) -> None: + try: + _main(cfg) + except Exception as e: + logger.exception(e) + sys.exit(1) if __name__ == "__main__": diff --git a/src/ideas/init/consolidate.py b/src/ideas/init/consolidate.py index 819149e..dc95485 100644 --- a/src/ideas/init/consolidate.py +++ b/src/ideas/init/consolidate.py @@ -4,35 +4,34 @@ # SPDX-License-Identifier: Apache-2.0 # +import sys import os import logging -import sys from pathlib import Path -from graphlib import TopologicalSorter, CycleError -from collections.abc import Iterable, Container from dataclasses import dataclass +from itertools import combinations +from graphlib import TopologicalSorter, CycleError import hydra +import networkx as nx from omegaconf import MISSING from hydra.core.config_store import ConfigStore from hydra.core.hydra_config import HydraConfig -from clang.cindex import CompilationDatabase, TranslationUnit, CursorKind -from clang.cindex import Rewriter, TokenKind, SourceRange +from clang.cindex import CompilationDatabase, TranslationUnit from clang.cindex import TranslationUnitLoadError, Diagnostic -from ideas.ast import extract_info_c, TreeResult, Symbol -from ideas.ast import get_internally_linked_cursors -from ideas.tools import Crate, clang_rename_, check_c +from ideas.ast import extract_info_c, TreeResult, Symbol, clang_rename_ +from ideas.tools import Crate, check_c -logger = logging.getLogger("ideas.preprocess") +logger = logging.getLogger("ideas.init.consolidate") @dataclass class ConsolidateConfig: filename: Path = MISSING cargo_toml: Path = MISSING + vcs: str = "none" - export_symbols: Path | None = None source_priority: Path | None = None @@ -40,62 +39,97 @@ class ConsolidateConfig: cs.store(name="init.consolidate", node=ConsolidateConfig) -def init( - compile_commands: Path, - export_symbols: list[str] | None = None, - source_priority: list[Path] | None = None, -) -> str: - # Get symbol table and dependencies taking into account source priority and exported symbols, - # and prefix internally linked declarations/references since there can be name collisions - # between translation units. - asts = get_asts( - compile_commands, - valid_paths=source_priority, - prefix_internally_linked=False, - ) - symbols, dependencies = get_symbols_and_dependencies(asts, source_priority, export_symbols) +def init(compile_commands: Path, source_priority: list[Path]) -> str: + # Get symbol table and dependencies taking into account source priority + asts = get_asts(compile_commands, source_priority) + symbols, dependencies = get_symbols_and_dependencies(asts, source_priority) logger.info(f"Found {len(symbols)} symbols in {compile_commands}!") - # Assemble C sources in topological order - includes = get_includes(symbols) - sources = [] - sorted_symbol_names = list(TopologicalSorter(dependencies).static_order()) - for symbol_name in sorted_symbol_names: - # Ignore function declarations - if symbol_name not in symbols: - logger.warning(f"Skipping `{symbol_name}` ...") - continue - symbol_code = symbols[symbol_name].code - if symbol_code in sources: - continue - sources.append(symbol_code) - return "\n".join(includes) + "\n\n" + "\n\n".join(sources) + # Consolidate C sources in topological order + sources = get_includes(symbols) + for group in TopologicalSorter(dependencies).static_order(): + # Add forward declarations if more than one symbol in group + if len(group) > 1: + for name in group: + declaration = symbols[name].declaration + if declaration and declaration not in sources: + sources.append(declaration) + + # Add symbol definitions + for name in group: + definition = symbols[name].code + "\n" + if definition not in sources: + sources.append(definition) + return "\n".join(sources) def get_symbols_and_dependencies( asts: list[TreeResult], source_priority: list[Path] | None = None, - export_symbols: list[str] | None = None, -) -> tuple[dict[str, Symbol], dict[str, list[str]]]: - asts_symbols = [filter_symbols(ast.symbols) for ast in asts] - global_symbols = merge_symbols(asts_symbols, source_priority=source_priority) - - # Filter global symbols to create project symbols - project_symbols = filter_symbols(global_symbols, filter_system=True) - project_dependencies = merge_complete_graphs(asts, valid_names=project_symbols) - - # Use export_symbols to filter project symbols and dependencies - dependencies = remove_cycles_from_graph(project_dependencies, project_symbols) - if export_symbols is not None: - export_symbols = [c14n_symbol_name(name, project_symbols) for name in export_symbols] - dependencies = reachable_subgraph(dependencies, export_symbols) - symbols = filter_symbols(project_symbols, filter_function_declarations=True) - + external_symbol_names: list[str] | None = None, +) -> tuple[dict[str, Symbol], dict[tuple[str, ...], list[tuple[str, ...]]]]: + source_priority = source_priority or [] + + # Merge ASTs into non-system project dependencies + list_of_non_system_symbols = [ + {n: s for n, s in ast.symbols.items() if not s.is_system} for ast in asts + ] + project_symbols = merge_symbols(list_of_non_system_symbols, source_priority) + project_dependencies = nx.compose_all( + [ + nx.from_dict_of_lists(ast.complete_graph, create_using=nx.DiGraph) # type: ignore + for ast in asts + ] + ).subgraph(project_symbols.keys()) + + # Find all reachable symbols and subgraph of dependencies from symbols with global functions/variables + symbols = project_symbols.copy() + dependencies = project_dependencies.copy() + if external_symbol_names is None: + # Use global function/variables as desired external symbol names + external_symbol_names = [ + name + for name, symbol in symbols.items() + if symbol.is_global and (symbol.is_function or symbol.is_variable) + ] + if external_symbol_names: + paths = nx.multi_source_dijkstra_path(project_dependencies, external_symbol_names) + symbols = {k: v for k, v in symbols.items() if k in paths} + dependencies = dependencies.subgraph(symbols.keys()) + else: + logger.warning("No external symbols were found/specified!") + + # Remove cycles from graph by combining strongly-connected components. Note that we sort + # members in a SCC so they are ordered lexically. + def symbol_lexical_key(name: str) -> tuple[int, str, int, str]: + sym = symbols[name] + loc = sym.cursor.location + tu_file = Path(sym.cursor.translation_unit.spelling).resolve() + + loc_file = tu_file + if loc.file is not None: + loc_file = Path(loc.file.name).resolve() + + file_rank = len(source_priority) + if loc_file in source_priority: + file_rank = source_priority.index(loc_file) + return (file_rank, str(loc_file), loc.offset, name) + + C = nx.condensation(dependencies) + scc_map = {n: tuple(sorted(C.nodes[n]["members"], key=symbol_lexical_key)) for n in C.nodes} + dependencies = {scc_map[n]: [scc_map[s] for s in C.successors(n)] for n in C.nodes} + + # Make sure dependencies are topologically sortable + try: + list(TopologicalSorter(dependencies).static_order()) + except CycleError as ex: + logger.error(ex) + raise ex return symbols, dependencies -def get_includes(symbols: dict[str, Symbol]) -> set[str]: - includes: set[str] = set() +def get_includes(symbols: dict[str, Symbol]) -> list[str]: + includes: list[str] = [] for symbol in symbols.values(): tu = symbol.cursor.translation_unit for inclusion in tu.get_includes(): @@ -111,14 +145,14 @@ def get_includes(symbols: dict[str, Symbol]) -> set[str]: with open(inclusion.location.file.name, "rb") as f: f.seek(inclusion.location.offset) include = f.readline().decode().strip() - includes.add(f"#include {include}") + include = f"#include {include}" + if include not in includes: + includes.append(include) return includes def get_asts( - compile_commands: Path, - valid_paths: list[Path] | None = None, - prefix_internally_linked: bool = False, + compile_commands: Path, valid_paths: list[Path], rename_conflicting_symbols: bool = True ) -> list[TreeResult]: assert compile_commands.name == "compile_commands.json" db = CompilationDatabase.fromDirectory(compile_commands.parent) @@ -134,100 +168,93 @@ def get_asts( ) if any(d.severity >= Diagnostic.Error for d in tu.diagnostics): raise TranslationUnitLoadError("\n".join([d.format() for d in tu.diagnostics])) - if prefix_internally_linked: - # FIXME: It would be nicer to add a prefix to only those symbols that collide but we - # cannot know that until symbol merge time. - tu = add_prefix_to_internally_linked_cursors(tu, compile_commands) assert tu.cursor is not None - if valid_paths is None or Path(tu.cursor.spelling).resolve() in valid_paths: + if not valid_paths or Path(tu.cursor.spelling).resolve() in valid_paths: ast = extract_info_c(tu) asts.append(ast) + if rename_conflicting_symbols: + original_sources = rename_conflicting_symbols_(asts) + if original_sources: + try: + asts = get_asts(compile_commands, valid_paths, rename_conflicting_symbols=False) + finally: + # Always restore original source code after reparsing renamed symbols. + for path, source in original_sources.items(): + path.write_bytes(source) return asts -def add_prefix_to_internally_linked_cursors( - tu: TranslationUnit, - compile_commands: Path, -) -> TranslationUnit: - assert tu.cursor is not None - cursors = get_internally_linked_cursors(tu.cursor) - - # Prefix internally-linked declarations using TU stem - # FIXME: TU stem isn't guaranteed to be a non-clashing since folder1/stem.c - # and folder2/stem.c will produce same prefix. - source = Path(tu.spelling) - prefix = source.stem + "_" - renames = {cursor.spelling: prefix + cursor.spelling for cursor in cursors} - - # XXX: This is an in-place rename! Would be nice to have a context manager that can automatically - # restore the contents of the file. We need an in-place rename because downstream code may - # use tu.cursor.spelling which needs to point to a valid file. - source_bytes = source.read_bytes() - try: - # Remove static visibility from internally-linked cursors - remove_static_keyword_(tu) - - # Add prefix to internally-linked declarations - clang_rename_(source, renames, compile_commands=compile_commands) - tu.reparse() - finally: - source.write_bytes(source_bytes) - - # There should be no more internally linked cursors because we made them externally visible - assert len(get_internally_linked_cursors(tu.cursor)) == 0 - - return tu - - -def remove_static_keyword_(tu: TranslationUnit): - assert tu.cursor is not None - cursors = get_internally_linked_cursors(tu.cursor) - - rewriter = Rewriter.create(tu) - for cursor in cursors: - # Find static keyword in cursor tokens - tokens = list(cursor.get_tokens()) - for i, token in enumerate(tokens): - if token.kind == TokenKind.KEYWORD and token.spelling == "static": - # Use next token's start as end of extent so we capture the spacing between the static - # keyword and the next token. - extent = SourceRange.from_locations( - token.extent.start, - tokens[i + 1].extent.start if i + 1 < len(tokens) else token.extent.end, - ) - rewriter.remove_text(extent) - rewriter.overwrite_changed_files() - - -def filter_symbols( - symbols: dict[str, Symbol], - filter_system: bool = True, - filter_function_declarations: bool = False, -) -> dict[str, Symbol]: - filtered_symbols = {} - for name, symbol in symbols.items(): - # Ignore "system" symbols - if filter_system and symbol.cursor.location.is_in_system_header: +def rename_conflicting_symbols_(asts: list[TreeResult]) -> dict[Path, bytes]: + # Gather best representative symbol per spelling per AST into a single dict + symbols_with_spelling: dict[str, list[Symbol]] = {} + for ast in asts: + seen: dict[str, Symbol] = {} + for symbol in ast.symbols.values(): + spelling = symbol.spelling + if not spelling: + continue + # Save this symbol if we haven't seen it before + if spelling not in seen: + seen[spelling] = symbol + # Or replace it if it's a definition and existing symbol is a declaration + elif symbol.is_definition and not seen[spelling].is_definition: + seen[spelling] = symbol + for spelling, sym in seen.items(): + symbols_with_spelling.setdefault(spelling, []).append(sym) + + # Find symbols with common spelling but different definitions across ASTs + tu_renames: dict[TranslationUnit, dict[str, str]] = {} + for spelling, symbol1, symbol2 in ( + (spelling, *symbol_pair) + for spelling, symbols in symbols_with_spelling.items() + for symbol_pair in combinations(symbols, r=2) + ): + # Two symbols can only clash if they have same spelling but different definitions + if not (symbol1.is_definition and symbol2.is_definition): continue - - # Filter function declarations - if filter_function_declarations: - if ( - symbol.cursor.kind == CursorKind.FUNCTION_DECL - and not symbol.cursor.is_definition() - ): + if symbol1.code == symbol2.code: + continue + # Rename non-global, non-system symbols using TU stem as prefix + for symbol in (symbol1, symbol2): + if symbol.is_global or symbol.is_system: continue + path = Path(symbol.cursor.translation_unit.spelling).resolve() + new_spelling = path.stem + "_" + spelling + tu_renames.setdefault(symbol.cursor.translation_unit, {})[symbol.name] = ( + new_spelling + ) + if not tu_renames: + return {} + + # Check that renaming won't cause clashes with existing symbols with the same spelling + existing_spellings = set(symbols_with_spelling.keys()) + for renames in tu_renames.values(): + new_spellings = set(renames.values()) + if existing_spellings.intersection(new_spellings): + raise NotImplementedError( + "Renaming symbols would cause clashes with existing symbols with the same spelling!" + ) + existing_spellings.update(new_spellings) - filtered_symbols[name] = symbols[name] - return filtered_symbols + # Write renames to disk while keeping track of original source bytes + sources: dict[Path, bytes] = {} + try: + for tu, renames in tu_renames.items(): + # Reparse translation unit in case anything has changed on disk + tu.reparse() + clang_rename_(tu, renames, sources) + + except Exception: + # Restore original source code if renaming fails before caller can reparse. + for path, source in sources.items(): + path.write_bytes(source) + raise + return sources def merge_symbols( - list_of_symbols: list[dict[str, Symbol]], source_priority: list[Path] | None = None + list_of_symbols: list[dict[str, Symbol]], source_priority: list[Path] ) -> dict[str, Symbol]: - if source_priority is None: - source_priority = [] - global_symbols: dict[str, Symbol] = {} for symbols in list_of_symbols: # Gather symbols @@ -282,119 +309,53 @@ def merge_symbols( return global_symbols -def merge_complete_graphs( - asts: list[TreeResult], valid_names: Container[str] -) -> dict[str, list[str]]: - graph: dict[str, list[str]] = {} - for ast in asts: - for node, neighbors in ast.complete_graph.items(): - if node not in valid_names: - continue - if node not in graph: - graph[node] = [] - for neighbor in neighbors: - if neighbor not in valid_names or neighbor in graph[node]: - continue - graph[node].append(neighbor) - return dict(graph) - - -def remove_cycles_from_graph( - graph: dict[str, list[str]], symbols: dict[str, Symbol] -) -> dict[str, list[str]]: - # Remove self-dependencies from graph: symbol -> [symbol] => symbol -> [] - for dependent, dependencies in graph.items(): - graph[dependent] = [ - dependency for dependency in dependencies if dependency != dependent - ] - - # FIXME: Add more C-specific heuristics to remove cycles from the graph - - # Make sure graph is topologically sortable - try: - list(TopologicalSorter(graph).static_order()) - except CycleError as ex: - logger.error(ex) - raise ex - return graph - - -def reachable_subgraph( - dependencies: dict[str, list[str]], names: Iterable[str] -) -> dict[str, list[str]]: - subgraph: dict[str, list[str]] = {} - for name in names: - subgraph[name] = dependencies[name] - subgraph.update(reachable_subgraph(dependencies, subgraph[name])) - return subgraph - - -def c14n_symbol_name(name: str, symbols: dict[str, Symbol]): - if name in symbols: - return name - if f"c:@F@{name}" in symbols: - return f"c:@F@{name}" - - # Find symbols with spelling of name - potential_names = {s.name for s in symbols.values() if name in s.cursor.spelling} - if len(potential_names) == 0: - symbol_names = "\n".join(symbols.keys()) - raise ValueError(f"Unable to find {name} in symbols:\n{symbol_names}") - elif len(potential_names) != 1: - raise ValueError(f"Unable to find {name} in symbols! Found: {potential_names}") - return potential_names.pop() - - -@hydra.main(version_base=None, config_name="init.consolidate") -def main(cfg: ConsolidateConfig) -> None: +def _main(cfg: ConsolidateConfig): output_dir = Path(HydraConfig.get().runtime.output_dir) # Get crate information - crate = Crate(cargo_toml=cfg.cargo_toml) + crate = Crate(cargo_toml=cfg.cargo_toml, vcs=cfg.vcs) # type: ignore[reportArgumentType] - export_symbols = None - if isinstance(cfg.export_symbols, Path): - export_symbols = cfg.export_symbols.read_text().splitlines() + source_priority: list[Path] = [] + if cfg.source_priority: + lines = cfg.source_priority.read_text().splitlines() + source_priority = [Path(line.strip()).resolve() for line in lines if line.strip()] - source_priority = None - if isinstance(cfg.source_priority, Path): - source_priority = [ - Path(path).resolve() for path in cfg.source_priority.read_text().splitlines() - ] - - output = init( - cfg.filename, - export_symbols=export_symbols, - source_priority=source_priority, - ) + output = init(cfg.filename, source_priority) # Only run preprocess, compile, and assemble steps on C code compiles, compile_errors = check_c(output, flags=["-c"]) # Write C code to disk - crate.rust_src_path.parent.mkdir(exist_ok=True, parents=True) - crate.rust_src_path.with_suffix(".c").write_text(output) - crate.add(crate.rust_src_path.with_suffix(".c")) + crate.c_src_path.parent.mkdir(exist_ok=True, parents=True) + crate.c_src_path.write_text(output) + crate.vcs.add(crate.c_src_path) # Add hydra directory if (output_subdir := HydraConfig.get().output_subdir) is not None: - crate.add(output_dir / output_subdir) + crate.vcs.add(output_dir / output_subdir) # If the C code didn't compile, then error loudly name = crate.root_package["name"] + msg = f"Consolidated `{name}` in {output_dir}" if not compiles: - logger.error(f"Failed to compile `{name}` C code!") - crate.commit( - f"Failed to compile `{name}` C code!\n\n{' '.join(sys.argv)}\n\n{compile_errors}" - ) - sys.exit(1) - logger.info(f"Consolidated `{name}` in {output_dir}") - crate.commit(f"Consolidated `{name}`\n\n{' '.join(sys.argv)}") + msg = f"Failed to consolidate `{name}` C code!" + msg += f"\n\n{compile_errors}" + logger.error(msg) + else: + logger.info(msg) + crate.vcs.commit(msg) + if not compiles: + raise ValueError(f"Failed to compile consolidated `{name}` C code!") -if __name__ == "__main__": +@hydra.main(version_base=None, config_name="init.consolidate") +def main(cfg: ConsolidateConfig): try: - main() + _main(cfg) except Exception as e: - logger.error(e) - raise e + logger.exception(e) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/ideas/init/crate.py b/src/ideas/init/crate.py index d6787a5..de37ebe 100644 --- a/src/ideas/init/crate.py +++ b/src/ideas/init/crate.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: Apache-2.0 # +import sys import logging from pathlib import Path from dataclasses import dataclass @@ -15,7 +16,7 @@ from ideas.tools import Crate -logger = logging.getLogger("ideas.preprocess") +logger = logging.getLogger("ideas.init.crate") @dataclass @@ -34,8 +35,7 @@ def __post_init__(self): cs.store(name="init.crate", node=CrateConfig) -@hydra.main(version_base=None, config_name="init.crate") -def main(cfg: CrateConfig) -> None: +def _main(cfg: CrateConfig) -> None: output_dir = Path(HydraConfig.get().runtime.output_dir) # Initialize crate @@ -44,23 +44,37 @@ def main(cfg: CrateConfig) -> None: type=cfg.crate_type, # type: ignore[reportArgumentType] vcs=cfg.vcs, # type: ignore[reportArgumentType] ) - crate.add(crate.cargo_toml) + + # Delete default cargo init code + crate.rust_src_path.write_text("") # Add static dependencies and sections crate.cargo_add(dep="openssl@0.10.75") + crate.cargo_add(dep="cc@1.2.53", section="build") if cfg.crate_type == "lib": with crate.cargo_toml.open("a") as f: f.write('\n[lib]\ncrate-type = ["lib", "cdylib"]\n') crate.invalidate_metadata() - # Add hydra directory + # Add cargo, workspace cargo, hydra log directory to VCS + crate.vcs.add(crate.cargo_toml, crate.rust_src_path) + if crate.metadata.get("workspace_root", None): + crate.vcs.add(Path(crate.metadata["workspace_root"]) / "Cargo.toml") if (output_subdir := HydraConfig.get().output_subdir) is not None: - crate.add(output_dir / output_subdir) + crate.vcs.add(output_dir / output_subdir) + msg = f"Initialized crate `{crate.root_package['name']}`" + logger.info(msg) + crate.vcs.commit(msg) -if __name__ == "__main__": +@hydra.main(version_base=None, config_name="init.crate") +def main(cfg: CrateConfig) -> None: try: - main() + _main(cfg) except Exception as e: - logger.error(e) - raise e + logger.exception(e) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/ideas/init/workspace.py b/src/ideas/init/workspace.py new file mode 100644 index 0000000..a8f27dc --- /dev/null +++ b/src/ideas/init/workspace.py @@ -0,0 +1,71 @@ +# +# Copyright (C) 2026 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 +# + + +import sys +import logging + +from textwrap import dedent as d +from dataclasses import dataclass +from pathlib import Path + +import hydra +from omegaconf import MISSING +from hydra.core.config_store import ConfigStore + +from ideas.tools import Workspace + +logger = logging.getLogger("ideas.init.workspace") + + +@dataclass +class WorkspaceConfig: + cargo_toml: Path = MISSING + vcs: str = "none" + + def __post_init__(self): + if self.vcs not in ["git", "none"]: + raise ValueError(f"Invalid VCS: {self.vcs}!") + + +cs = ConfigStore.instance() +cs.store(name="init.workspace", node=WorkspaceConfig) + + +def _main(cfg: WorkspaceConfig) -> None: + # Initialize workspace + workspace = Workspace(cfg.cargo_toml, vcs=cfg.vcs) # type: ignore[reportArgumentType] + + if cfg.vcs == "git": + # Write .gitignore + (cfg.cargo_toml.parent / ".gitignore").write_text( + d(""" + Cargo.lock + target/ + *.log + *.jsonl + """).strip() + ) + + # Commit initial repo + workspace.vcs.add(Path("Cargo.toml")) + workspace.vcs.add(Path(".gitignore")) + msg = "Created cargo workspace" + logger.info(msg) + workspace.vcs.commit(msg) + + +@hydra.main(version_base=None, config_name="init.workspace") +def main(cfg: WorkspaceConfig) -> None: + try: + _main(cfg) + except Exception as e: + logger.exception(e) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/ideas/learn/translate.py b/src/ideas/learn/translate.py index 9083eed..7d11090 100644 --- a/src/ideas/learn/translate.py +++ b/src/ideas/learn/translate.py @@ -21,7 +21,7 @@ from dspy.teleprompt.gepa.gepa_utils import DSPyTrace, ScoreWithFeedback from ideas import model, ModelConfig, GenerateConfig, tools -from ideas.translate_symbol import SymbolTranslatorSignature +from ideas.translate_snippet import SnippetTranslatorSignature logger = logging.getLogger("ideas.learn.translate") @@ -67,7 +67,7 @@ def metric( # Write predicted translation rust_srcs = [] for name, translation in gold.crate_translation.items(): - if name != gold.symbol_name: + if name != gold.snippet_name: rust_srcs.append(translation) else: rust_srcs.append(pred_translation) @@ -80,7 +80,9 @@ def metric( crate.rust_src_path.write_text(rust_src) # Attempt to build and run all tests - success, _ = tools.run_subprocess(["cargo", "test", f"--manifest-path={cargo_toml}"]) + success, _, _, _ = tools.run_subprocess( + ["cargo", "test", f"--manifest-path={cargo_toml}"] + ) if not success: return ScoreWithFeedback( @@ -117,14 +119,14 @@ def split_examples( for jsonl in student_jsonl.read_text().splitlines(): student_translation = json.loads(jsonl) if student_translation["success"]: - student_success[student_translation["symbol_name"]] = True + student_success[student_translation["snippet_name"]] = True # Accumulate all successful teacher translations for the crate crate_translation = OrderedDict() for jsonl in teacher_jsonl.read_text().splitlines(): teacher_translation = json.loads(jsonl) if teacher_translation["success"]: - crate_translation[teacher_translation["symbol_name"]] = teacher_translation[ + crate_translation[teacher_translation["snippet_name"]] = teacher_translation[ "translation" ] @@ -145,7 +147,7 @@ def split_examples( # Use failed translations for validation if ( - student_success[teacher_translation["symbol_name"]] + student_success[teacher_translation["snippet_name"]] and teacher_translation["success"] ): train_examples.append(example) @@ -182,7 +184,7 @@ def main(cfg: TrainConfig) -> None: skip_perfect_score=False, ) - program = dspy.ChainOfThought(SymbolTranslatorSignature) + program = dspy.ChainOfThought(SnippetTranslatorSignature) optimized_program = gepa.compile( program, trainset=trainset, diff --git a/src/ideas/model.py b/src/ideas/model.py index 09e1c83..9181e25 100644 --- a/src/ideas/model.py +++ b/src/ideas/model.py @@ -23,7 +23,7 @@ class ModelConfig: @dataclass class GenerateConfig: - max_new_tokens: int = 32000 + max_new_tokens: int = 64000 temperature: float = 0.0 top_p: float = 1.0 top_k: int | None = None diff --git a/src/ideas/sync.rs b/src/ideas/sync.rs new file mode 100644 index 0000000..2c1f4e9 --- /dev/null +++ b/src/ideas/sync.rs @@ -0,0 +1,136 @@ +use std::fmt; +use std::ops::{Deref, DerefMut}; +use std::ptr; +use std::sync::{ + LockResult, Mutex as StdMutex, MutexGuard as StdMutexGuard, PoisonError, TryLockError, + TryLockResult, +}; + +#[repr(C)] +pub struct Mutex { + // Keep the payload first so it stays at offset 0 for C interop. + pub value: T, + inner: StdMutex<()>, +} + +pub struct MutexGuard<'a, T: Sized> { + _inner: StdMutexGuard<'a, ()>, + value: &'a mut T, +} + +impl Mutex { + pub const fn new(value: T) -> Self { + Self { + value, + inner: StdMutex::new(()), + } + } + + pub fn into_inner(self) -> LockResult { + let Self { inner, value } = self; + + match inner.into_inner() { + Ok(()) => Ok(value), + Err(_) => Err(PoisonError::new(value)), + } + } +} + +impl Mutex { + pub fn lock(&self) -> LockResult> { + match self.inner.lock() { + Ok(inner) => Ok(self.guard_from_inner(inner)), + Err(error) => Err(PoisonError::new(self.guard_from_inner(error.into_inner()))), + } + } + + pub fn try_lock(&self) -> TryLockResult> { + match self.inner.try_lock() { + Ok(inner) => Ok(self.guard_from_inner(inner)), + Err(TryLockError::WouldBlock) => Err(TryLockError::WouldBlock), + Err(TryLockError::Poisoned(error)) => Err(TryLockError::Poisoned(PoisonError::new( + self.guard_from_inner(error.into_inner()), + ))), + } + } + + pub fn is_poisoned(&self) -> bool { + self.inner.is_poisoned() + } + + pub fn clear_poison(&self) { + self.inner.clear_poison(); + } + + pub fn get_mut(&mut self) -> LockResult<&mut T> { + match self.inner.get_mut() { + Ok(()) => Ok(&mut self.value), + Err(_) => Err(PoisonError::new(&mut self.value)), + } + } + + fn guard_from_inner<'a>(&'a self, inner: StdMutexGuard<'a, ()>) -> MutexGuard<'a, T> { + MutexGuard { + _inner: inner, + value: unsafe { &mut *self.data_ptr() }, + } + } + + pub fn data_ptr(&self) -> *mut T { + ptr::addr_of!(self.value).cast_mut() + } +} + +impl From for Mutex { + fn from(value: T) -> Self { + Self::new(value) + } +} + +impl Default for Mutex { + fn default() -> Self { + Self::new(T::default()) + } +} + +impl fmt::Debug for Mutex { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.try_lock() { + Ok(guard) => f.debug_struct("Mutex").field("data", &&*guard).finish(), + Err(TryLockError::Poisoned(error)) => { + let guard = error.into_inner(); + f.debug_struct("Mutex") + .field("data", &&*guard) + .field("poisoned", &true) + .finish() + } + Err(TryLockError::WouldBlock) => f + .debug_struct("Mutex") + .field("data", &format_args!("")) + .finish(), + } + } +} + +impl Deref for MutexGuard<'_, T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + self.value + } +} + +impl DerefMut for MutexGuard<'_, T> { + fn deref_mut(&mut self) -> &mut Self::Target { + self.value + } +} + +impl fmt::Debug for MutexGuard<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(self.value, f) + } +} + +unsafe impl Send for Mutex {} +unsafe impl Sync for Mutex {} diff --git a/src/ideas/test_symbol.py b/src/ideas/test_symbol.py new file mode 100644 index 0000000..ddcd46f --- /dev/null +++ b/src/ideas/test_symbol.py @@ -0,0 +1,240 @@ +# +# Copyright (C) 2026 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 +# + +import re +import logging +import textwrap +from pathlib import Path + +import dspy + +from .tools import Crate, run_subprocess +from .ast import Symbol, clang_make_global_, clang_make_extern_ + +logger = logging.getLogger("ideas.test_symbol") + + +class SymbolTester(dspy.Module): + def __init__(self, crate: Crate, symbols: list[Symbol]): + super().__init__() + self.crate = crate + + # Write a build script to compile C code as a static library and link to it + self.write_build_script_() + + # Rewrite C code to make each function global. Then generate a Rust binding for it + # to force the Rust linker to include that C function in the Rust artifact. + # FIXME: If we ever test variables we should generate bindings for those here too! + self.main_function = "" + for symbol in symbols: + if not (symbol.is_function and symbol.is_definition): + continue + if self.crate.is_bin and symbol.spelling == "main": + # main requires special handling because we must bind to it as _main and + # statically create a Rust main that calls it + self.main_function = self.write_main_binding() + else: + clang_make_global_(self.crate.c_src_path, symbol.spelling) + self.write_symbol_binding_(symbol.spelling) + self.crate.vcs.add(self.crate.c_src_path) + + # Check whether all of the changes compile and commit them + passes, output = self.test() + msg = f"Prepared `{self.crate.root_package['name']}` for symbol testing!" + if not passes: + msg = f"Failed to prepare `{self.crate.root_package['name']}` for symbol testing!" + self.crate.vcs.commit(msg) + + # Error loudly if changes don't build + if not passes: + msg += output + raise ValueError(msg) + + def write_build_script_(self): + c_src_path = self.crate.c_src_path.relative_to(self.crate.cargo_toml.parent) + build_options = '.define("main", "_main")' if self.crate.is_bin else "" + build_rs_src = textwrap.dedent( + f""" + fn main() {{ + println!("cargo:rerun-if-changed={c_src_path}"); + cc::Build::new() + .compiler("clang") + .warnings(false) + .file("{c_src_path}") + {build_options} + .compile("library"); + println!("cargo:rustc-link-lib=static=library"); + // FIXME: How do we statically add libraries to link to? + println!("cargo:rustc-link-lib=dylib=crypto"); + }} + """ + ) + + build_rs_path = self.crate.cargo_toml.parent / "build.rs" + build_rs_path.write_text(build_rs_src) + self.crate.vcs.add(build_rs_path) + + def write_symbol_binding_(self, symbol_name: str): + symbol_binding = get_linked_binding(symbol_name, self.crate.c_src_path) + + symbol_binding_path = self.crate.rust_src_path.parent / "binding" / f"{symbol_name}.rs" + symbol_binding_path.parent.mkdir(exist_ok=True) + symbol_binding_path.write_text(symbol_binding) + self.crate.vcs.add(symbol_binding_path) + + binding_path = self.crate.rust_src_path.parent / "binding.rs" + with binding_path.open("a+") as f: + f.write(f"pub mod {symbol_name};\n") + self.crate.vcs.add(binding_path) + + def write_main_binding(self) -> str: + # Get binding for main (redefined as _main) + main_binding = get_linked_binding("_main", self.crate.c_src_path, "-Dmain=_main") + + main_binding_path = self.crate.rust_src_path.parent / "binding" / "main.rs" + main_binding_path.parent.mkdir(exist_ok=True) + main_binding_path.write_text(main_binding) + self.crate.vcs.add(main_binding_path) + + # Return appropriate main function instead of writing to binding.rs + if "fn _main()" in main_binding: + return textwrap.dedent( + """ + pub fn main() { + let ret = unsafe { binding::main::_main() }; + std::process::exit(ret); + } + """ + ) + else: + return textwrap.dedent( + """ + pub fn main() { + let mut args: Vec<_> = std::env::args().into_iter().map(|s| std::ffi::CString::new(s).unwrap().into_raw()).collect(); + let ret = unsafe { binding::main::_main(args.len() as i32, args.as_mut_ptr()) }; + std::process::exit(ret); + } + """ + ) + + def test(self) -> tuple[bool, str]: + orig_rust_src = self.crate.rust_src_path.read_text() + rust_src = orig_rust_src + + # Remove forbid unsafe from Rust source + rust_src = re.sub(re.escape("#![forbid(unsafe_code)]"), "", rust_src) + + # Replace Rust Mutex with C ABI-compatible Mutex in Rust source + RUST_MUTEX = "use std::sync::{Mutex, MutexGuard};" + C_ABI_MUTEX = "mod sync;\nuse crate::sync::{Mutex, MutexGuard};" + rust_src = re.sub( + f"^{re.escape(RUST_MUTEX)}$", C_ABI_MUTEX, rust_src, flags=re.MULTILINE + ) + + # Reference wrapper module in Rust source + WRAPPER_MOD = "pub mod wrapper;" + wrapper_src_path = self.crate.rust_src_path.parent / "wrapper.rs" + wrapper_src_path.touch() + self.crate.vcs.add(wrapper_src_path) + if not re.search(f"^{re.escape(WRAPPER_MOD)}$", rust_src, flags=re.MULTILINE): + rust_src += WRAPPER_MOD + "\n" + + # Reference binding module in Rust source + BINDING_MOD = "pub mod binding;" + binding_src_path = self.crate.rust_src_path.parent / "binding.rs" + binding_src_path.touch() + orig_binding_src = binding_src_path.read_text() + binding_src = orig_binding_src + if not re.search(f"^{re.escape(BINDING_MOD)}$", rust_src, flags=re.MULTILINE): + rust_src += BINDING_MOD + "\n" + + binding_src_path.write_text(binding_src) + self.crate.vcs.add(binding_src_path) + + self.crate.rust_src_path.write_text(rust_src) + self.crate.vcs.add(self.crate.rust_src_path) + + # Try building the crate, which should always works, before testing and detect + # if we need to insert a main + builds, feedback = self.crate.cargo_build(allow_unsafe=True, fix_E0601=False) + if "error[E0601]" in feedback and self.main_function: + binding_src += "pub mod main;\n" + binding_src_path.write_text(binding_src) + self.crate.vcs.add(binding_src_path) + + rust_src += self.main_function + self.crate.rust_src_path.write_text(rust_src) + self.crate.vcs.add(self.crate.rust_src_path) + builds, feedback = self.crate.cargo_build(allow_unsafe=True, fix_E0601=False) + if not builds: + raise RuntimeError(f"Crate does not build!\n{feedback}") + passes, output, error, _ = self.crate.cargo_test() + + # Restore originals + binding_src_path.write_text(orig_binding_src) + self.crate.rust_src_path.write_text(orig_rust_src) + + return passes, output + error + + def forward(self, symbol: Symbol) -> bool: + logger.info(f"Testing symbol `{symbol.name}` ....") + + # Overwrite C symbol to reference extern symbol that we will link to the Rust symbol. + # It is very important that this happens first since it will overwrite any other changes + # made to the C code. + clang_make_extern_(self.crate.c_src_path, symbol.spelling) + self.crate.vcs.add(self.crate.c_src_path) + + # Remove reference to binding since we're testing it + binding_path = self.crate.rust_src_path.parent / "binding.rs" + orig_binding_src = binding_path.read_text() + binding_src = orig_binding_src.replace(f"pub mod {symbol.spelling};\n", "") + if binding_src == orig_binding_src: + logger.warning(f"Unable to find `{symbol.spelling}` in binding.rs!") + binding_path.write_text(binding_src) + self.crate.vcs.add(binding_path) + + # Run cargo test + passes, output = self.test() + msg = f"Tested symbol `{symbol.name}`" + if not passes: + msg = f"Failed to test symbol `{symbol.name}`" + logger.error(msg) + msg += f"\n\n{output}" + self.crate.vcs.commit(msg) + + return passes + + +def get_linked_binding(function_name: str, c_src_path: Path, *bindgen_args: str) -> str: + # Use bindgen to generate binding to C symbol + bindgen = [ + "bindgen", + "--disable-header-comment", + "--no-doc-comments", + "--no-layout-tests", + "--allowlist-function", + function_name, + str(c_src_path), + "--", + *bindgen_args, + ] + ok, binding, error, _ = run_subprocess(bindgen) + if not ok: + raise ValueError(f"`{' '.join(bindgen)}` failed!\n{binding + error}") + + # Parse binding since we need to add special link instructions + linked_binding = re.sub( + r'unsafe extern "C" {\n(.*)\n}', + r'#[link(name="library", kind="static")]\nunsafe extern "C" {\n #[unsafe(no_mangle)]\n\1\n}', + binding, + flags=re.DOTALL, + ) + if linked_binding == binding: + raise ValueError( + f"Failed to convert binding to linked binding for {function_name}!\n{binding}" + ) + return linked_binding diff --git a/src/ideas/testgen.py b/src/ideas/testgen.py new file mode 100644 index 0000000..e40acf8 --- /dev/null +++ b/src/ideas/testgen.py @@ -0,0 +1,73 @@ +# +# Copyright (C) 2026 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 +# + +import sys +import json +import logging +from pathlib import Path +from dataclasses import dataclass + +import hydra +from omegaconf import MISSING +from hydra.core.config_store import ConfigStore +from hydra.core.hydra_config import HydraConfig + +from ideas.tools import run_subprocess + + +logger = logging.getLogger("ideas.testgen") + + +@dataclass +class TestgenConfig: + artifact: Path = MISSING + test_vector: Path = MISSING + + +cs = ConfigStore.instance() +cs.store(name="testgen", node=TestgenConfig) + + +def _main(cfg: TestgenConfig) -> None: + output_dir = Path(HydraConfig.get().runtime.output_dir) + logger.info(f"Saving results to {output_dir}") + + # Run the artifact and collect outputs + success, output, error, returncode = run_subprocess([str(cfg.artifact)]) + + # Stop the app on timeout + if returncode == "timeout": + raise RuntimeError( + f"Artifact {cfg.artifact} timed out! This may be due to indefinite waiting for `stdin`!" + ) + + if not success: + logger.warning( + f"Artifact {cfg.artifact} failed execution with return code {returncode}! The test vector will expect an error." + ) + + # Write the .json test_vector + test_vector = { + "stdout": {"pattern": f"{output}"}, + "stderr": {"pattern": f"{error}"}, + "rc": returncode, + } + cfg.test_vector.parent.mkdir(parents=True, exist_ok=True) + with open(cfg.test_vector, "w") as f: + json.dump(test_vector, f, indent=2) + + +@hydra.main(version_base=None, config_name="testgen") +def main(cfg: TestgenConfig) -> None: + try: + _main(cfg) + except Exception as e: + logger.exception(e) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/src/ideas/tools.py b/src/ideas/tools.py index 7f61122..5370483 100644 --- a/src/ideas/tools.py +++ b/src/ideas/tools.py @@ -7,7 +7,9 @@ import os import json from json import loads as js_loads +from textwrap import dedent as d +import tomlkit import logging import subprocess from functools import cached_property @@ -23,6 +25,83 @@ DEFAULT_TEST_TIMEOUT = 10.0 # seconds +class VCS: + def __init__( + self, + repo_dir: Path, + vcs: Literal["none", "git"] = "git", + ): + self.repo_dir = repo_dir + self.vcs = vcs + + def init(self, force_init: bool = False) -> bool: + if self.vcs == "none": + return True + + ok, out = False, "" + if not force_init: + ok, out = self("rev-parse --abbrev-ref HEAD") + if not ok: + ok, out = self("init --initial-branch=main") + if not ok: + raise ValueError(f"Failed to initialize git in {self.repo_dir}!\n{out}") + return ok + + def add(self, *paths: Path) -> bool: + if self.vcs == "none": + return True + + ok = True + for path in paths: + ok, out = self(f"add {path}") + if not ok: + raise ValueError(f"Failed to add {path}!\n{out}") + return ok + + def commit(self, message: str = "") -> bool: + if self.vcs == "none": + return True + + ok, out = self("commit --allow-empty -F -", input=message) + if not ok: + raise ValueError(f"Failed to commit changes to git!\n{out}") + return ok + + def __call__(self, cmd, *args, **kwargs) -> tuple[bool, str]: + if self.vcs == "none": + return True, "" + + success, output, error, _ = run_subprocess( + [self.vcs, "-C", str(self.repo_dir), *cmd.split(" "), *args], **kwargs + ) + return success, output + error + + +class Workspace: + def __init__( + self, + cargo_toml: Path, + vcs: Literal["none", "git"] = "none", + ): + self.cargo_toml = cargo_toml + + workspace_dir = self.cargo_toml.parent + self.vcs = VCS(repo_dir=workspace_dir, vcs=vcs) + + if not self.cargo_toml.exists(): + # Create a new workspace + os.makedirs(workspace_dir, exist_ok=True) + self.cargo_toml.write_text( + d(""" + [workspace] + resolver = "3" + """).strip() + ) + + # Initialize repository if needed + self.vcs.init(force_init=True) + + class Crate: def __init__( self, @@ -30,18 +109,19 @@ def __init__( vcs: Literal["none", "git"] = "none", type: Literal["bin", "lib"] | None = None, ): - self.cargo_toml: Path = cargo_toml - self.vcs = vcs + self.cargo_toml = cargo_toml + + crate_dir = self.cargo_toml.parent + self.vcs = VCS(repo_dir=crate_dir, vcs=vcs) if not self.cargo_toml.exists(): # Create a new crate with specified type, but without VCS - crate_dir = self.cargo_toml.parent if not type: raise ValueError( f"Crate at {crate_dir} does not exist; type must be specified!" ) os.makedirs(crate_dir, exist_ok=True) - success, output = run_subprocess( + success, output, error, _ = run_subprocess( [ "cargo", "init", @@ -53,26 +133,24 @@ def __init__( ) if not success: raise RuntimeError( - f"Failed to create new crate at {crate_dir} with error:\n\n{output}" + f"Failed to create new crate at {crate_dir} with error:\n\n{output + error}" ) - # Initialize repository - if self.vcs == "git": - ok, out = self.git("rev-parse --abbrev-ref HEAD") - if not ok: - ok, out = self.git("init --initial-branch=main") - if not ok: - raise ValueError( - f"Failed to initialize git in {self.cargo_toml.parent}!\n{out}" - ) + # Add unsafe feature that allow unsafe code + self.cargo_feature(unsafe=[]) + + # Initialize repository if needed + self.vcs.init() @cached_property def metadata(self) -> dict[str, Any]: - success, out = run_subprocess( + success, out, error, _ = run_subprocess( ["cargo", "metadata", "--manifest-path", str(self.cargo_toml)], ) if not success: - raise ValueError(f"Failed to get cargo metadata from {self.cargo_toml}!\n{out}") + raise ValueError( + f"Failed to get cargo metadata from {self.cargo_toml}!\n{out + error}" + ) metadata = json.loads(out) return metadata @@ -91,6 +169,14 @@ def root_package(self) -> dict[str, Any]: root_package = next(filter(lambda p: p["id"] == root, self.metadata["packages"])) return root_package + @property + def workspace_root(self) -> Path: + workspace_root = self.metadata.get("workspace_root", None) + if workspace_root is None: + # Standalone crates without workspace metadata fall back to crate directory. + return self.cargo_toml.parent + return Path(workspace_root) + @property def bin_targets(self) -> list[dict[str, Any]]: return list(filter(lambda t: "bin" in t["kind"], self.root_package["targets"])) @@ -123,6 +209,10 @@ def rust_src_path(self) -> Path: ) return rust_src_path + @property + def c_src_path(self) -> Path: + return self.rust_src_path.with_suffix(".c") + def cargo_add(self, dep: str, section: str | None = None) -> str: cmd = [ "cargo", @@ -134,37 +224,31 @@ def cargo_add(self, dep: str, section: str | None = None) -> str: cmd.append(f"--{section}") cmd.append(dep) - success, output = run_subprocess(cmd) + success, output, error, _ = run_subprocess(cmd) if not success: raise RuntimeError( - f"Failed to add dependency {dep} to {self.cargo_toml} with error:\n\n{output}" + f"Failed to add dependency {dep} to {self.cargo_toml} with error:\n\n{output + error}" ) # Invalidate cached metadata self.invalidate_metadata() return output - def cargo_feature(self, feature: str) -> None: - # Create section if it doesn't exist - if "[features]" not in self.cargo_toml.read_text(): - with self.cargo_toml.open("a") as f: - f.write("\n[features]\n") - - # Add the requested feature - # FIXME: It's user responsibility to ensure features are cross-compatible - self.cargo_toml.write_text( - self.cargo_toml.read_text().replace("[features]\n", f"[features]\n{feature}\n") - ) + def cargo_feature(self, **features: list[str]) -> None: + # Set/overwrite new features + cargo_toml = tomlkit.loads(self.cargo_toml.read_text()) + cargo_features = cargo_toml.get("features", {}) + for name, deps in features.items(): + cargo_features[name] = deps + cargo_toml["features"] = cargo_features + self.cargo_toml.write_text(tomlkit.dumps(cargo_toml)) # Invalidate cached metadata self.invalidate_metadata() - def cargo_build(self, allow_unsafe: bool = False) -> tuple[bool, str]: - env = os.environ.copy() - # Disallow unsafe by default; allow when explicitly requested - if not allow_unsafe: - env["RUSTFLAGS"] = (env.get("RUSTFLAGS", "") + " -D unsafe-code").strip() - + def cargo_build( + self, allow_unsafe: bool = False, fix_E0601: bool = True + ) -> tuple[bool, str]: cmd = [ "cargo", "build", @@ -172,44 +256,30 @@ def cargo_build(self, allow_unsafe: bool = False) -> tuple[bool, str]: "--color=never", f"--manifest-path={self.cargo_toml}", ] - builds, output = run_subprocess(cmd, env=env) + if allow_unsafe: + cmd += ["--features=unsafe"] + builds, output, error, _ = run_subprocess(cmd) # Work around E0601 error "No main function was found in a binary crate." - if "error[E0601]" in output: + if fix_E0601 and "error[E0601]" in error: rust_src = self.rust_src_path.read_text() with self.rust_src_path.open("a") as f: - f.write('fn main() {\n println!("Hello, world!");\n}\n') - builds, output = run_subprocess(cmd, env=env) + f.write('\n\nfn main() {\n println!("Hello, world!");\n}\n') + builds, output, error, _ = run_subprocess(cmd) self.rust_src_path.write_text(rust_src) - return builds, output - - def add(self, *paths: Path) -> bool: - if self.vcs != "git": - return True - - ok = True - for path in paths: - ok, out = self.git(f"add {path}") - if not ok: - raise ValueError(f"Failed to add {path}!\n{out}") - return ok - - def commit(self, message: str = "") -> bool: - if self.vcs != "git": - return True - - ok, out = self.git("commit --allow-empty -F -", input=message) - if not ok: - raise ValueError(f"Failed to commit changes to git!\n{out}") - return ok - - def git(self, cmd, *args, **kwargs) -> tuple[bool, str]: - if self.vcs != "git": - return True, "" + return builds, output + error - repo_dir = self.cargo_toml.parent - return run_subprocess(["git", "-C", str(repo_dir), *cmd.split(" "), *args], **kwargs) + def cargo_test(self) -> tuple[bool, str, str, int | Literal["timeout"]]: + cmd = [ + "cargo", + "test", + "--quiet", + "--color=never", + f"--manifest-path={self.cargo_toml}", + "--features=unsafe", + ] + return run_subprocess(cmd) def write(self, path: Path, data, **kwargs): if path.is_absolute(): @@ -223,7 +293,7 @@ def run_subprocess( input: str | None = None, timeout: float | None = None, **kwargs, -) -> tuple[bool, str]: +) -> tuple[bool, str, str, int | Literal["timeout"]]: try: result = subprocess.run( cmd, @@ -234,9 +304,16 @@ def run_subprocess( timeout=timeout, **kwargs, ) - return True, result.stdout + return True, result.stdout, result.stderr, result.returncode except subprocess.CalledProcessError as e: - return False, e.stdout + e.stderr + return False, e.stdout, e.stderr, e.returncode + except subprocess.TimeoutExpired as e: + return ( + False, + e.stdout.decode() if e.stdout else "", + e.stderr.decode() if e.stderr else "", + "timeout", + ) def compile_c( @@ -252,7 +329,8 @@ def compile_c( cmd.append(source_file) cmd.extend(["-o", output_file]) - return run_subprocess(cmd) + success, output, error, _ = run_subprocess(cmd) + return success, output + error def check_c( @@ -271,7 +349,8 @@ def check_c( cmd.append("-") cmd.extend(["-o", "/dev/null"]) - return run_subprocess(cmd, input=code) + success, output, error, _ = run_subprocess(cmd, input=code) + return success, output + error def compile_rust( @@ -292,7 +371,8 @@ def compile_rust( cmd.append("-") cmd.extend(["-o", str(output_file)]) - return run_subprocess(cmd, input=code) + success, output, error, _ = run_subprocess(cmd, input=code) + return success, output + error def check_rust( @@ -312,7 +392,8 @@ def check_rust( with TemporaryDirectory() as dirname: cmd.extend(["-", "--out-dir", dirname]) - return run_subprocess(cmd, input=code) + success, output, error, _ = run_subprocess(cmd, input=code) + return success, output + error def run_clippy( @@ -390,7 +471,8 @@ def run_test( stdin = "\n".join(stdin) # Run test and right-strip output of whitespace - return run_subprocess([str(executable), *args], stdin, timeout=timeout) + success, output, error, _ = run_subprocess([str(executable), *args], stdin, timeout=timeout) + return success, output + error def check_test( @@ -427,18 +509,3 @@ def run_and_check_tests( for test_case in test_cases: success += 1 if run_and_check_test(executable, test_case, timeout=timeout) else 0 return success - - -def clang_rename_(source: Path, renames: dict[str, str], compile_commands: Path | None = None): - for name, new_name in renames.items(): - logger.info(f"{source}: renaming `{name}` to `{new_name}`") - cmd = ["clang-refactor-21", "local-rename"] - if compile_commands is not None: - cmd.append(f"-p={str(compile_commands.absolute())}") - cmd.append(f"--old-qualified-name={name}") - cmd.append(f"--new-qualified-name={new_name}") - cmd.append("-i") - cmd.append(str(source)) - success, output = run_subprocess(cmd) - if not success: - raise ValueError(f"`{' '.join(cmd)}` failed!\n{output}") diff --git a/src/ideas/translate.py b/src/ideas/translate.py index 14f74cd..4e15681 100644 --- a/src/ideas/translate.py +++ b/src/ideas/translate.py @@ -1,9 +1,10 @@ # -# Copyright (C) 2025 Intel Corporation +# Copyright (C) 2026 Intel Corporation # # SPDX-License-Identifier: Apache-2.0 # +import sys import logging from pathlib import Path from dataclasses import dataclass, field @@ -11,13 +12,12 @@ import dspy import hydra from omegaconf import MISSING -from clang.cindex import TranslationUnit from hydra.core.config_store import ConfigStore from hydra.core.hydra_config import HydraConfig from ideas import adapters, model, ModelConfig, GenerateConfig -from ideas import SymbolTranslator, RecurrentTranslator -from ideas import extract_info_c +from ideas import SnippetTranslator, RecurrentTranslator, WrapperGenerator, SymbolTester +from ideas import create_translation_unit, extract_info_c from ideas.init.consolidate import get_symbols_and_dependencies from .tools import Crate @@ -30,8 +30,13 @@ class TranslateConfig: model: ModelConfig = field(default_factory=ModelConfig) generate: GenerateConfig = field(default_factory=GenerateConfig) + cargo_toml: Path = MISSING + translator: str = "ChainOfThought" + translator_max_iters: int = 5 + wrapper_max_iters: int = 5 max_iters: int = 5 + readonly_cache: Path | None = None vcs: str = "none" @@ -40,38 +45,64 @@ class TranslateConfig: cs.store(name="translate", node=TranslateConfig) -@hydra.main(version_base=None, config_name="translate") -def main(cfg: TranslateConfig) -> None: +def _main(cfg: TranslateConfig) -> None: output_dir = Path(HydraConfig.get().runtime.output_dir) logger.info(f"Saving results to {output_dir}") - crate = Crate(cargo_toml=output_dir / "Cargo.toml", vcs=cfg.vcs) # type: ignore[reportArgumentType] + crate = Crate(cargo_toml=cfg.cargo_toml.resolve(), vcs=cfg.vcs) # type: ignore[reportArgumentType] + # Get global symbol table + tu = create_translation_unit(cfg.filename) + asts = [extract_info_c(tu)] + symbols, dependencies = get_symbols_and_dependencies( + asts, source_priority=[], external_symbol_names=["c:@F@main"] if crate.is_bin else None + ) + + # Create translation agent model.configure(cfg.model, cfg.generate) dspy.configure(adapter=adapters.ChatAdapter()) translator = getattr(dspy, cfg.translator) - symbol_translator = SymbolTranslator(translator, crate, cfg.max_iters) - agent = RecurrentTranslator(symbol_translator) + snippet_translator = SnippetTranslator( + translator, crate, cfg.translator_max_iters, readonly_cache=cfg.readonly_cache + ) + symbol_wrapper = WrapperGenerator( + crate, cfg.wrapper_max_iters, readonly_cache=cfg.readonly_cache + ) + symbol_tester = SymbolTester(crate, list(symbols.values())) + agent = RecurrentTranslator( + crate, snippet_translator, symbol_wrapper, symbol_tester, cfg.max_iters + ) - # Get global symbol table - tu = TranslationUnit.from_source(cfg.filename) - asts = [extract_info_c(tu)] - symbols, dependencies = get_symbols_and_dependencies(asts) + # Run translation agent pred = agent(symbols, dependencies) translation: str = pred.translation translated: bool = pred.success + # FIXME: Only keep wrappers for symbols we need to export + # Write translation to disk - crate.rust_src_path.parent.mkdir(exist_ok=True, parents=True) crate.rust_src_path.write_text(translation) + crate.vcs.add(crate.rust_src_path) # Commit translation - crate.add(crate.rust_src_path) + crate.vcs.add(crate.c_src_path) if (output_subdir := HydraConfig.get().output_subdir) is not None: - crate.add(output_dir / output_subdir) - name = f"`{crate.root_package['name']}`" - crate.commit( - f"Translated {name} to Rust" if translated else f"Failed to translate {name} to Rust" - ) + crate.vcs.add(output_dir / output_subdir) + msg = f"Translated `{crate.root_package['name']}` to Rust!" + if not translated: + msg = f"Failed to translate `{crate.root_package['name']}` to Rust!" + logger.error(msg) + else: + logger.info(msg) + crate.vcs.commit(msg) + + +@hydra.main(version_base=None, config_name="translate") +def main(cfg: TranslateConfig) -> None: + try: + _main(cfg) + except Exception as e: + logger.exception(e) + sys.exit(1) if __name__ == "__main__": diff --git a/src/ideas/translate_recurrent.py b/src/ideas/translate_recurrent.py index 356eebe..07b3185 100644 --- a/src/ideas/translate_recurrent.py +++ b/src/ideas/translate_recurrent.py @@ -1,80 +1,277 @@ # -# Copyright (C) 2025 Intel Corporation +# Copyright (C) 2026 Intel Corporation # # SPDX-License-Identifier: Apache-2.0 # import logging -from graphlib import TopologicalSorter -from collections import OrderedDict, defaultdict, deque +from pathlib import Path +from difflib import unified_diff +from collections.abc import Iterable import dspy +import networkx as nx from .ast import Symbol +from .tools import Crate logger = logging.getLogger("ideas.translate_recurrent") class RecurrentTranslator(dspy.Module): - def __init__(self, symbol_translator: dspy.Module): + def __init__( + self, + crate: Crate, + symbol_translator: dspy.Module, + symbol_wrapper: dspy.Module, + symbol_tester: dspy.Module | None = None, + max_iters: int = 1, + ): super().__init__() + self.crate = crate self.translate_symbol = symbol_translator + self.wrap_symbol = symbol_wrapper + self.test_symbol = symbol_tester + self.max_iters = max_iters def forward( - self, symbols: dict[str, Symbol], dependencies: dict[str, list[str]] + self, + symbols: dict[str, Symbol], + dependencies: dict[tuple[str, ...], Iterable[tuple[str, ...]]], ) -> dspy.Prediction: - references = transpose_graph(dependencies) - sorted_symbol_names = list(TopologicalSorter(dependencies).static_order()) + G = nx.from_dict_of_lists(dependencies, create_using=nx.DiGraph) + assert isinstance(G, nx.DiGraph) + + # FIXME: This is from SnippetTranslator + self.crate.rust_src_path.write_text("use std::sync::{Mutex, MutexGuard};\n\n") # Translate symbols in topological order - translations: dict[str, str] = OrderedDict() - for symbol_name in sorted_symbol_names: - # Ignore tag definitions and function declarations - if symbol_name not in symbols: - logger.warning(f"Skipping symbol `{symbol_name}` ...") + snippets: dict[str, tuple[str, ...]] = {} + translations: dict[tuple[str, ...], str] = {} + sorted_symbol_names = list(reversed(list(nx.topological_sort(G)))) + symbol_names_with_variable = list( + filter( + lambda symbol_names: any(symbols[n].is_variable for n in symbol_names), + sorted_symbol_names, + ) + ) + symbols_count = len(sorted_symbol_names) + for i, symbol_names in enumerate(sorted_symbol_names, start=1): + logger.info( + f"Translating symbol group `{' '.join(symbol_names)}` ({i}/{symbols_count}) ..." + ) + # Gather code for each symbol and check if we have already translated such a snippet + snippet = "\n".join(symbols[name].code.strip() + "\n" for name in symbol_names) + if snippet in snippets: + logger.info( + f"Skipping translation of `{' '.join(symbol_names)}` because it was already translated by `{' '.join(snippets[snippet])}`..." + ) + translations[symbol_names] = translations[snippets[snippet]] continue + snippets[snippet] = symbol_names - symbol = symbols[symbol_name] - dep_names = [ - name for name in bfs(symbol_name, references, max_depth=1) if name in symbols - ] + # FIXME: We could save context here by only including translations of descendants of the current symbol. + # However, one must prompt the LLM to never generate use statements since those could conflict. + # Use all unique translations as reference code since many symbol names can map to the same translation + reference_code = "\n".join( + dict.fromkeys( + translations[name] for name in sorted_symbol_names if name in translations + ) + ) - # Gather reference and dependent code in order of translations and sorted symbols, respectively - ref_translations = "\n\n".join(translations.values()) - dep_symbols = [symbols[name] for name in sorted_symbol_names if name in dep_names] + # Gather dependent code in topological order + predecessors = list(G.predecessors(symbol_names)) + dependent_code = "\n".join( + symbols[name].code.strip() + "\n" + for names in sorted_symbol_names + if names in predecessors + for name in names + ) - pred = self.translate_symbol(ref_translations, symbol, dep_symbols) + # Use static translation for any symbol that a variable depends on + static_translation = "" + if any( + nx.has_path(G, group_with_variable, symbol_names) + for group_with_variable in symbol_names_with_variable + ): + static_translation = "\n".join( + symbols[name].static_translation.strip() + "\n" + for name in symbol_names + if symbols[name].static_translation != "" + ).strip() + if static_translation: + logger.info(f"Using static translation for `{' '.join(symbol_names)}`") + + # Translate snippet and save it if successful + pred = self.translate_with_retries( + reference_code=reference_code, + symbols=[symbols[name] for name in symbol_names], + dependent_code=dependent_code, + translation=static_translation, + ) if not pred.success: break + translations[symbol_names] = pred.translation.code.strip() + "\n" - # Save translation if it builds - translations[symbol_name] = pred.translation.code - - translation = "\n\n".join(translations.values()) - return dspy.Prediction( + # Re-assemble translation in order + translation = "use std::sync::{Mutex, MutexGuard};\n\n" + translation += "\n".join( + dict.fromkeys( + translations[name] for name in sorted_symbol_names if name in translations + ) + ) + if not self.crate.is_bin: + translation += "\npub mod wrapper;\n" + pred = dspy.Prediction( translation=translation, success=len(translations) == len(sorted_symbol_names) ) + return pred + + def translate_with_retries( + self, + reference_code: str, + symbols: list[Symbol], + dependent_code: str, + translation: str = "", + ) -> dspy.Prediction: + prior_translation, feedback = "", "" + pred = dspy.Prediction() + for i in range(max(self.max_iters, 1)): + # Attempt translation and exit early on success + pred = self.translate( + reference_code, + symbols, + dependent_code, + prior_translation=prior_translation, + feedback=feedback, + translation=translation if i == 0 else "", + ) + if pred.success: + break + + # On failure log a diff against prior translation + name = " ".join([f"`{s.name}`" for s in symbols]) + msg = f"Failed to translate symbol(s) {name} ({i + 1}/{self.max_iters})!" + if prior_translation: + diff = "\n".join( + unified_diff( + prior_translation.splitlines(), + pred.translation.code.splitlines(), + lineterm="", + fromfile="prior_translation", + tofile="current_translation", + ) + ) + if "reasoning" in pred: + msg += f"\n# Reason\n{pred.reasoning.strip()}\n" + msg += f"\n# Translation Diff\n{diff.strip()}\n" + logger.error(msg) + + # Create feedback for next iteration + prior_translation = pred.translation.code + feedback = "Carefully compare the Rust translation in `prior_translation` with the C `snippet` and find where any mis-translations happen. Then use this knowledge to generate a correct Rust `translation` of the C `snippet`. You should treat the C `snippet` as correct, so if the C `snippet` has a bug, you should replicate that bug in the Rust `translation` too." + return pred + + def _snapshot_wrapper_files(self) -> tuple[dict[Path, str], set[Path]]: + wrapper_paths: dict[Path, str] = {} + wrapper_dir = self.crate.rust_src_path.parent / "wrapper" + wrapper_mod = self.crate.rust_src_path.parent / "wrapper.rs" + + if wrapper_mod.exists(): + wrapper_paths[wrapper_mod] = wrapper_mod.read_text() + + existing_wrapper_files: set[Path] = set() + if wrapper_dir.exists(): + existing_wrapper_files = set( + path for path in wrapper_dir.rglob("*.rs") if path.is_file() + ) + for path in existing_wrapper_files: + wrapper_paths[path] = path.read_text() + + return wrapper_paths, existing_wrapper_files + def _restore_wrapper_files( + self, original_wrapper_src: dict[Path, str], existing_wrapper_files: set[Path] + ) -> None: + wrapper_dir = self.crate.rust_src_path.parent / "wrapper" + wrapper_mod = self.crate.rust_src_path.parent / "wrapper.rs" -def transpose_graph(graph: dict[str, list[str]]) -> dict[str, list[str]]: - transpose: dict[str, list[str]] = defaultdict(list) - for node, neighbors in graph.items(): - for neighbor in neighbors: - transpose[neighbor].append(node) - return dict(transpose) + if wrapper_mod.exists() and wrapper_mod not in original_wrapper_src: + wrapper_mod.unlink() + if wrapper_dir.exists(): + for path in (path for path in wrapper_dir.rglob("*.rs") if path.is_file()): + if path not in existing_wrapper_files: + path.unlink() -def bfs(node: str, graph: dict[str, list[str]], max_depth: int = -1) -> list[str]: - nodes = [node] - queue = deque() - queue.append((node, 0)) - while queue: - curr_node, level = queue.popleft() - for neighbor in graph.get(curr_node, []): - # ignore visited or too deep nodes - if neighbor in nodes or (max_depth >= 0 and level + 1 > max_depth): + for path, src in original_wrapper_src.items(): + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(src) + + def translate( + self, + reference_code: str, + symbols: list[Symbol], + dependent_code: str, + prior_translation: str = "", + feedback: str = "", + translation: str = "", + ) -> dspy.Prediction: + orig_rust_src = self.crate.rust_src_path.read_text() + original_wrapper_src, existing_wrapper_files = self._snapshot_wrapper_files() + + # Translate symbols and save it if successful + pred = self.translate_symbol( + name=" ".join(symbol.name for symbol in symbols), + reference_code=reference_code, + snippet="\n".join(symbol.code.strip() + "\n" for symbol in symbols), + dependent_code=dependent_code, + prior_translation=prior_translation, + feedback=feedback, + translation=translation, + ) + if not pred.success: + return pred + + # Write translation to crate + translation = pred.translation.code.strip() + "\n" + with self.crate.rust_src_path.open("a") as f: + f.write(translation + "\n") + + # Generate wrapper, that may modify the translation, for each symbol + unsafe_translation = pred.translation.code + wrappers: list[dspy.Prediction] = [] + for symbol in symbols: + # We can only hybrid build-test functions and variables + if not (symbol.is_function and symbol.is_definition) and not symbol.is_variable: continue - nodes.append(neighbor) - queue.append((neighbor, level + 1)) - # ignore initial node - return nodes[1:] + + # Wrap function or annotate variable + wrapper = self.wrap_symbol(symbol, reference_code, unsafe_translation) + unsafe_translation = wrapper.translation + + # Only functions needs to be cached since an LLM does not operate on variables + if symbol.is_function and symbol.is_definition: + wrappers.append(wrapper) + + # If wrapping failed exit early + if not wrapper.success: + pred.success = False + break + + # Try testing symbol and exit early if it fails + if self.test_symbol and not self.test_symbol(symbol): + pred.success = False + break + + if pred.success: + # Write successful translation and wrappers to cache + self.translate_symbol.write_cache(pred) + for wrapper in wrappers: + self.wrap_symbol.write_cache(wrapper) + else: + # Restore Rust source and wrapper files to original state since translation failed + self.crate.rust_src_path.write_text(orig_rust_src) + self._restore_wrapper_files(original_wrapper_src, existing_wrapper_files) + + return pred diff --git a/src/ideas/translate_snippet.py b/src/ideas/translate_snippet.py new file mode 100644 index 0000000..b29f013 --- /dev/null +++ b/src/ideas/translate_snippet.py @@ -0,0 +1,254 @@ +# +# Copyright (C) 2026 Intel Corporation +# +# SPDX-License-Identifier: Apache-2.0 +# + +import logging +import sqlite3 +from pathlib import Path + +import dspy + +from .tools import Crate +from .adapters import Code + + +logger = logging.getLogger("ideas.translate_snippet") + +CodeC = Code["c"] +CodeRust = Code["rust"] + + +class SnippetTranslatorSignature(dspy.Signature): + """ + Generate an idiomatic, memory-safe Rust translation of the snippet. + The reference_code contains Rust code that should be used by the translation. + The snippet contains a single C definition to translate to idiomatic, memory-safe Rust. + The dependent_code contains C code that uses the C snippet. + Reason about the dependent_code to understand any special memory management or complex ownership requirements a safe and idiomatic translation may need to take into account. + Ensure the translation of the snippet does not use any unsafe constructs! + Do not refactor the reference_code in the translation! + Do not translate dependent_code to Rust in the translation! + Do not define any implementations (`impl`) in the translation! + Always assume all C integer arithmetic operations on the underlying value are intended to have wrapping semantics, and thus any translation should use Rust's wrapping arithmetic functions like `wrapping_add`, `wrapping_shr`, etc.. + Analyze all bitwise operations carefully, especially rotations. + For all bitwise operations, including those that may appear to swap bits for bytes, implement the behavior exactly as written in the C code, without making assumptions about intent. + For mutable global state, always translate to `std::sync::Mutex`-backed statics, use only the short names `Mutex` and `MutexGuard` (never `::std::sync::Mutex` nor `std::sync::Mutex` in emitted code), and require all accesses to go through `lock()`/`try_lock()` guards instead of `static mut` or other unsafe global mutation patterns. + Use the feedback about the prior_translation, if provided, when generating the Rust translation. + """ + + reference_code: CodeRust = dspy.InputField() + snippet: CodeC = dspy.InputField() + dependent_code: CodeC = dspy.InputField() + prior_translation: CodeRust = dspy.InputField() + feedback: str = dspy.InputField() + translation: CodeRust = dspy.OutputField() + + +class SnippetTranslator(dspy.Module): + def __init__( + self, + translator: type[dspy.Module], + crate: Crate, + max_iters: int = 5, + readonly_cache: Path | None = None, + ): + super().__init__() + self.translate = translator(SnippetTranslatorSignature) + self.crate = crate + self.max_iters = max_iters + self.readonly_cache = readonly_cache + self.cache = _init_cache(crate.workspace_root / "cache.db") + + def forward( + self, + name: str, + reference_code: str, + snippet: str, + dependent_code: str, + prior_translation: str = "", + feedback: str = "", + translation: str = "", + ) -> dspy.Prediction: + logger.info(f"Translating snippet `{name}` ...") + + # If the snippet is empty, use static translation + if not snippet: + translation = f"// Empty snippet `{name}`" + + # Prefer supplied translation, crate cache, then read-only cache. + translation = ( + translation + or _read_cache(self.cache, name, snippet) + or _read_cache(self.readonly_cache, name, snippet) + ) + orig_rust_src = self.crate.rust_src_path.read_text() + pred = dspy.Prediction() + builds = False + dspy_exception = None + for i in range(max(self.max_iters, 1)): + # Use the translation from the prior iteration as feedback for the next iteration + if i > 0: + prior_translation = translation + + # Ensure any translated snippet is safe and uses std::sync::Mutex + rust_src = "#![forbid(unsafe_code)]\n" + rust_src += "use std::sync::{Mutex, MutexGuard};\n\n" + rust_src += (reference_code + "\n") if reference_code else "" + + # Use prior translation as the translation on first iteration only. + # This allows static translations that violate safety, which will be fixed by the LLM! + if i == 0 and translation: + pred = dspy.Prediction(translation=CodeRust(code=translation)) + else: + try: + pred = self.translate( + reference_code=CodeRust(code=rust_src), + snippet=CodeC(code=snippet), + dependent_code=CodeC(code=dependent_code), + prior_translation=CodeRust(code=prior_translation), + feedback=feedback, + ) + dspy_exception = None + except Exception as e: + logger.exception( + f"DSPy exception while translating snippet `{name}` on iteration {i + 1}/{self.max_iters}!" + ) + dspy_exception = e + # Attempt again before any build logic + continue + + translation = pred.translation.code + if translation in reference_code: + translation = f"// duplicate snippet `{name}` detected" + if translation == prior_translation: + logger.warning("Snippet translation loop detected!") + + # Append translation and check if it builds + rust_src += translation.strip() + "\n" + self.crate.rust_src_path.write_text(rust_src) + self.crate.vcs.add(self.crate.rust_src_path) + # FIXME: Checking name for c:@F@main is brittle but we have no better way here. + # The proper way to fix is to yield the translation back to the caller so it can + # build and tell us whether to translation is successful. + builds, feedback = self.crate.cargo_build(fix_E0601="c:@F@main" not in name) + if not builds: + feedback = "Running `cargo build` fails!\n" + feedback + + # Exit early if we build + if builds: + msg = f"Translated snippet `{name}`" + logger.info(msg) + msg += f"\n\n# Reasoning\n{pred.reasoning}" if "reasoning" in pred else "" + self.crate.vcs.commit(msg) + break + + msg = f"Failed to translate snippet `{name}` ({i + 1}/{self.max_iters})" + logger.error(msg) + msg += f"\n\n# Reasoning\n{pred.reasoning}" if "reasoning" in pred else "" + msg += f"\n\n# Feedback\n{feedback}" if feedback else "" + self.crate.vcs.commit(msg) + self.crate.rust_src_path.write_text(orig_rust_src) + # All iterations failed because of DSPy exceptions + if dspy_exception: + raise dspy_exception + pred.name = name + pred.snippet = snippet + pred.reference_code = reference_code + pred.dependent_code = dependent_code + pred.prior_translation = prior_translation + pred.feedback = feedback + pred.translation = CodeRust(code=translation) + pred.success = builds + return pred + + def write_cache(self, pred: dspy.Prediction) -> None: + _write_cache( + self.cache, + pred.name, + pred.snippet, + pred.reference_code, + pred.dependent_code, + pred.prior_translation, + pred.feedback, + pred.translation.code, + pred.success, + ) + + +def _init_cache(cache: Path | None) -> Path | None: + if cache is None: + return None + with sqlite3.connect(cache) as conn: + conn.execute( + """ + CREATE TABLE IF NOT EXISTS snippet_translations ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + snippet TEXT NOT NULL, + reference_code TEXT NOT NULL, + dependent_code TEXT NOT NULL, + prior_translation TEXT NOT NULL, + feedback TEXT NOT NULL, + translation TEXT NOT NULL, + success INTEGER NOT NULL + ) + """ + ) + conn.execute( + "CREATE INDEX IF NOT EXISTS idx_snippet_lookup" + " ON snippet_translations (name, snippet)" + ) + return cache + + +def _read_cache(cache: Path | None, name: str, snippet: str) -> str: + translation = "" + if cache is None: + return translation + with sqlite3.connect(cache) as conn: + try: + row = conn.execute( + "SELECT translation FROM snippet_translations WHERE snippet=? AND success=1 ORDER BY id DESC LIMIT 1", + (snippet,), + ).fetchone() + except Exception: + row = None + if row: + logger.info(f"Cache hit for `{name}`") + translation = row[0] + return translation + + +def _write_cache( + cache: Path | None, + name: str, + snippet: str, + reference_code: str, + dependent_code: str, + prior_translation: str, + feedback: str, + translation: str, + success: bool, +): + if cache is None: + return + with sqlite3.connect(cache) as conn: + conn.execute( + """ + INSERT INTO snippet_translations + (name, snippet, reference_code, dependent_code, prior_translation, feedback, translation, success) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + name, + snippet, + reference_code, + dependent_code, + prior_translation, + feedback, + translation, + int(success), + ), + ) diff --git a/src/ideas/translate_symbol.py b/src/ideas/translate_symbol.py deleted file mode 100644 index cc35da3..0000000 --- a/src/ideas/translate_symbol.py +++ /dev/null @@ -1,128 +0,0 @@ -# -# Copyright (C) 2025 Intel Corporation -# -# SPDX-License-Identifier: Apache-2.0 -# - -import json -import logging - -import dspy - -from .tools import Crate -from .ast import Symbol -from .adapters import Code - - -logger = logging.getLogger("ideas.translate_symbol") - -CodeC = Code["c"] -CodeRust = Code["rust"] - - -class SymbolTranslatorSignature(dspy.Signature): - """ - Generate an idiomatic, memory-safe Rust translation of the snippet. - The reference_code contains Rust code that should be used by the translation. - The snippet contains a single C definition to translate to idiomatic, memory-safe Rust. - The dependent_code contains C code that uses the C snippet. - Reason about the dependent_code to understand any special memory management or complex ownership requirements a safe and idiomatic translation may need to take into account. - Ensure the translation of the snippet does not use any unsafe constructs! - Do not refactor the reference_code in the translation! - Do not translate dependent_code to Rust in the translation! - Do not define any implementations (`impl`) in the translation! - Always assume all C integer arithmetic operations on the underlying value are intended to have wrapping semantics, and thus any translation should use Rust's wrapping arithmetic functions like `wrapping_add`, `wrapping_shr`, etc.. - Analyze all bitwise operations carefully, especially rotations. - For all bitwise operations, including those that may appear to swap bits for bytes, implement the behavior exactly as written in the C code, without making assumptions about intent. - Use the `cargo build` feedback about the prior_translation, if provided, when generating the Rust translation. - """ - - reference_code: CodeRust = dspy.InputField() - snippet: CodeC = dspy.InputField() - dependent_code: CodeC = dspy.InputField() - prior_translation: CodeRust = dspy.InputField() - feedback: str = dspy.InputField() - translation: CodeRust = dspy.OutputField() - - -class SymbolTranslator(dspy.Module): - def __init__( - self, - translator: type[dspy.Module], - crate: Crate, - max_iters: int = 5, - dump_jsonl: bool = True, - ): - super().__init__() - self.translate = translator(SymbolTranslatorSignature) - self.crate = crate - self.max_iters = max_iters - self.dump_jsonl = dump_jsonl - - # FIXME: Convert reference_code to list[Symbol] - def forward( - self, - reference_code: str, - symbol: Symbol, - dependent_symbols: list[Symbol], - prior_translation: str = "", - feedback: str = "", - ) -> dspy.Prediction: - logger.info(f"Translating symbol `{symbol.name}` ...") - dependent_code = "\n\n".join([s.code for s in dependent_symbols]) - - pred = dspy.Prediction() - for i in range(max(self.max_iters, 1)): - # Predict symbol translation - pred = self.translate( - reference_code=CodeRust(code=reference_code), - snippet=CodeC(code=symbol.code), - dependent_code=CodeC(code=dependent_code), - prior_translation=CodeRust(code=prior_translation), - feedback=feedback, - ) - - # Combine reference Rust code with predicted symbol translation to see if it builds - rust_src = "" - if len(reference_code) > 0: - rust_src += reference_code + "\n\n" - rust_src += pred.translation.code + "\n\n" - self.crate.rust_src_path.write_text(rust_src) - self.crate.add(self.crate.rust_src_path) - # FIXME: Add rustfmt and FeedbackException? - builds, feedback = self.crate.cargo_build() - pred["success"] = builds - pred["feedback"] = feedback - - # Write intermediate translation to disk - if self.dump_jsonl: - with self.crate.rust_src_path.with_suffix(".jsonl").open("a") as f: - jsonl = json.dumps( - { - "symbol_name": symbol.name, - "reference_code": reference_code, - "snippet": symbol.code, - "dependent_code": dependent_code, - "prior_translation": prior_translation, - "feedback": pred.feedback, - "translation": pred.translation.code, - "success": pred.success, - } - ) - f.write(jsonl + "\n") - - # Exit early if we build - if pred.success: - self.crate.commit( - f"Translated symbol `{symbol.name}`\n\n# Reasoning\n{pred.reasoning}" - ) - logger.info(f"Translated symbol `{symbol.name}`") - break - self.crate.commit( - f"Failed to translate symbol `{symbol.name}` ({i + 1}/{self.max_iters})!\n\n# Reasoning\n{pred.reasoning}\n\n# Feedback\n{pred.feedback}" - ) - logger.error( - f"Failed to translate symbol `{symbol.name}` ({i + 1}/{self.max_iters})!" - ) - prior_translation = pred.translation.code - return pred diff --git a/src/ideas/wrapper.py b/src/ideas/wrapper.py index 5c48e89..3c38124 100644 --- a/src/ideas/wrapper.py +++ b/src/ideas/wrapper.py @@ -1,13 +1,15 @@ # -# Copyright (C) 2025 Intel Corporation +# Copyright (C) 2026 Intel Corporation # # SPDX-License-Identifier: Apache-2.0 # import re +import sys +import sqlite3 import logging from pathlib import Path -from collections import defaultdict, OrderedDict +from collections import OrderedDict from dataclasses import dataclass, field import dspy @@ -18,8 +20,11 @@ from ideas import adapters, model, ModelConfig, GenerateConfig from ideas.tools import Crate, check_rust, run_subprocess +from ideas import create_translation_unit, extract_info_c from ideas.adapters import Code -from ideas.ast_rust import validate_changes +from ideas.init.consolidate import get_symbols_and_dependencies +from ideas.ast_rust import get_nodes, get_root, validate_changes +from ideas.ast import Symbol logger = logging.getLogger("ideas.wrapper") CodeRust = Code["rust"] @@ -27,13 +32,14 @@ @dataclass class WrapperConfig: + filename: Path = MISSING model: ModelConfig = field(default_factory=ModelConfig) generate: GenerateConfig = field(default_factory=GenerateConfig) - symbols: Path = MISSING cargo_toml: Path = MISSING max_iters: int = 5 + readonly_cache: Path | None = None vcs: str = "none" @@ -44,8 +50,8 @@ class WrapperConfig: class Signature(dspy.Signature): """ - Implement a C-compatible FFI wrapper for `crate::{symbol_name}` by replacing the `unimplemented!()` macro in `example_wrapper`. - **Only** modify the body of the function, and **nothing** else. + Output a C-compatible FFI wrapper for `crate::{symbol_name}`. + Use `example_wrapper` as a template for the `wrapper` and replace the `unimplemented!()` part with an implementation. The implementation for `crate::{symbol_name}` is in a crate that was read from "{crate_path}". Assume the types in `crate::wrapper::` do not have the same memory layout as those in `crate::`. The wrapper should properly convert between `crate::wrapper::` and `crate::` types by copying the values from the wrapper type to the crate type before calling `crate::{symbol_name}`. @@ -71,19 +77,25 @@ def generate_unimplemented_wrapper(crate: Crate, symbol_name: str) -> str: # unsafe extern "C" { # pub fn helloworld() -> ::std::os::raw::c_int; # } - ok, bindgen_wrapper = run_subprocess( + ok, bindgen_wrapper, error, _ = run_subprocess( [ "bindgen", "--disable-header-comment", "--no-doc-comments", "--no-layout-tests", - str(crate.rust_src_path.with_suffix(".c")), + "--sort-semantically", + str(crate.c_src_path), "--allowlist-function", symbol_name, ] ) if not ok: - raise ValueError(f"bindgen failed!\n{bindgen_wrapper}") + raise ValueError( + f"Bindgen failed to generate wrapper for `{symbol_name}`!\nError:\n{error}" + ) + + if bindgen_wrapper.strip() == "": + raise ValueError(f"Bindgen generated an empty wrapper for `{symbol_name}`!") # #[unsafe(export_name="helloworld")] # pub extern "C" fn helloworld() -> ::std::os::raw::c_int { @@ -96,7 +108,9 @@ def generate_unimplemented_wrapper(crate: Crate, symbol_name: str) -> str: flags=re.DOTALL, ) if unimplemented_wrapper == bindgen_wrapper: - raise ValueError("Failed to convert bindgen to valid wrapper!") + raise ValueError( + f"Failed to convert bindgen output to function for `{symbol_name}`!\nWrapper:\n{unimplemented_wrapper}" + ) unimplemented_wrapper = unimplemented_wrapper.rstrip() # Validate the template @@ -105,9 +119,9 @@ def generate_unimplemented_wrapper(crate: Crate, symbol_name: str) -> str: ) if not success: raise ValueError( - f"Invalid template for the wrapper: {unimplemented_wrapper}\n\nBuild error:\n{output}" + f"Failed to validate wrapper template for `{symbol_name}`!\nWrapper:\n{unimplemented_wrapper}\nError:\n{output}" ) - return unimplemented_wrapper + return unimplemented_wrapper.strip() + "\n" class WrapperGenerator(dspy.Module): @@ -115,48 +129,101 @@ def __init__( self, crate: Crate, max_iters: int, + readonly_cache: Path | None = None, ) -> None: super().__init__() self.crate = crate self.max_iters = max_iters + self.readonly_cache = readonly_cache + self.cache = _init_cache(crate.workspace_root / "cache.db") + + # Add sync module to crate + sync_path = crate.rust_src_path.parent / "sync.rs" + sync_path.write_text((Path(__file__).parent / "sync.rs").read_text()) + self.crate.vcs.add(sync_path) - # Setup crate for wrappers by writing empty wrapper.rs and adding `#[cfg(feature = "wrapper")]\npub mod wrapper;` to lib.rs + # Make sure wrapper module exists self.wrapper_path = crate.rust_src_path.parent / "wrapper.rs" - self.wrapper_path.write_text("") - if not re.search( - r"^pub mod wrapper;$", crate.rust_src_path.read_text(), flags=re.MULTILINE - ): - with crate.rust_src_path.open("a+") as f: - f.write('\n#[cfg(feature = "wrapper")]\npub mod wrapper;\n') - # Add the feature to Cargo.toml and make it default - self.crate.cargo_feature("wrapper = []") - # TODO: Make this more robust (currently features are stacked in reverse call order) - self.crate.cargo_feature('default = ["wrapper"]') - - def forward( + self.wrapper_path.touch() + + def forward(self, symbol: Symbol, reference_code: str, translation: str) -> dspy.Prediction: + if symbol.is_function and symbol.is_definition: + return self.wrap_function(symbol, reference_code, translation) + elif symbol.is_variable: + return self.annotate_variable(symbol, reference_code, translation) + else: + logger.info(f"Skipping wrap of symbol `{symbol.name}`") + return dspy.Prediction() + + def annotate_variable( + self, symbol: Symbol, reference_code: str, translation: str + ) -> dspy.Prediction: + logger.info(f"Adding export_name attribute to variable `{symbol.name}` ...") + orig_rust_src = self.crate.rust_src_path.read_text() + assert translation in orig_rust_src, "translation must be on disk!" + rust_src = orig_rust_src + + # Add export_name attribute to symbol translation + new_translation = export_first_unannotated_variable(translation, symbol.spelling) + if new_translation is None: + logger.error(f"Failed to add export_name attribute to variable `{symbol.name}`") + return dspy.Prediction(success=False, translation=translation) + + # Update Rust source with export_name attribute + rust_src = rust_src.replace(translation, new_translation) + self.crate.rust_src_path.write_text(rust_src) + self.crate.vcs.add(self.crate.rust_src_path) + + # Replace Rust Mutex with C ABI-compatible Mutex + RUST_MUTEX = "use std::sync::{Mutex, MutexGuard};" + C_ABI_MUTEX = "mod sync;\nuse crate::sync::{Mutex, MutexGuard};" + if RUST_MUTEX in rust_src: + rust_src = rust_src.replace(RUST_MUTEX, C_ABI_MUTEX) + self.crate.rust_src_path.write_text(rust_src) + self.crate.vcs.add(self.crate.rust_src_path) + + self.crate.vcs.commit(f"Added export_name attribute to variable `{symbol.name}` ...") + + return dspy.Prediction(success=True, translation=new_translation) + + def wrap_function( self, - symbol_name: str, + symbol: Symbol, + reference_code: str, + translation: str, + wrapper: str = "", + prior_wrapper: str = "", ) -> dspy.Prediction: - logger.info(f"Generating wrapper for symbol `{symbol_name}` ...") + # Don't bother wrapping main in binary crates + if symbol.spelling == "main" and self.crate.is_bin: + return dspy.Prediction(success=True, translation=translation) + + logger.info(f"Generating wrapper for function `{symbol.name}` ...") # Write blank wrapper and ensure only that blank wrapper is referenced since we're going to build - symbol_wrapper_path = self.wrapper_path.parent / "wrapper" / f"{symbol_name}.rs" + symbol_wrapper_path = self.wrapper_path.parent / "wrapper" / f"{symbol.spelling}.rs" symbol_wrapper_path.parent.mkdir(exist_ok=True, parents=True) symbol_wrapper_path.write_text("") - self.wrapper_path.write_text(f"pub mod {symbol_name};\n") # Try building the crate with an empty wrapper and if it fails then just return the unimplemented wrapper - max_iters = max(1, self.max_iters) if self.crate.cargo_build() == (True, "") else 0 + max_iters = max(1, self.max_iters) if self._build(symbol.spelling) == (True, "") else 0 # Use bindgen to generate unimplemented wrapper and write to disk. Note the unimplemented # wrapper contains unsafe code! - unimplemented_wrapper = generate_unimplemented_wrapper(self.crate, symbol_name) + unimplemented_wrapper = generate_unimplemented_wrapper(self.crate, symbol.spelling) symbol_wrapper_path.write_text(unimplemented_wrapper) + # Prefer supplied wrapper, crate cache, then read-only cache. + wrapper = ( + wrapper + or _read_cache(self.cache, symbol.spelling, unimplemented_wrapper) + or _read_cache(self.readonly_cache, symbol.spelling, unimplemented_wrapper) + ) + # Generate dynamic signature and module for symbol signature = Signature.with_instructions( Signature.instructions.format( - symbol_name=symbol_name, + symbol_name=symbol.spelling, crate_path=self.crate.rust_src_path.relative_to(self.crate.cargo_toml.parent), wrapper_path=symbol_wrapper_path.relative_to(self.crate.cargo_toml.parent), ) @@ -164,17 +231,36 @@ def forward( generate_wrapper = dspy.ChainOfThought(signature) # Try generating wrapper up to max_iter times - code = self.crate.rust_src_path.read_text() - wrapper, success, build_feedback = "", False, "" + msg = "" + success, build_feedback = False, "" + dspy_exception = None scope_feedback: OrderedDict[str, str] = OrderedDict() + pred = dspy.Prediction() for i in range(max_iters): - pred = generate_wrapper( - crate=CodeRust(code=code), - example_wrapper=CodeRust(code=unimplemented_wrapper), - prior_wrapper=CodeRust(code=wrapper), - build_feedback=build_feedback, - scope_feedback="\n\n".join(scope_feedback.values()), - ) + # Use the wrapper from the prior iteration as feedback for the next iteration + if i > 0: + prior_wrapper = wrapper + + try: + if i == 0 and wrapper: + pred = dspy.Prediction(wrapper=CodeRust(code=wrapper)) + else: + pred = generate_wrapper( + crate=CodeRust(code=reference_code + "\n" + translation), + example_wrapper=CodeRust(code=unimplemented_wrapper), + prior_wrapper=CodeRust(code=prior_wrapper), + build_feedback=build_feedback, + scope_feedback="\n\n".join(scope_feedback.values()), + ) + dspy_exception = None + except Exception as e: + logger.exception( + f"DSPy exception while generating wrapper for `{symbol.name}` on iteration {i + 1}/{self.max_iters}!" + ) + dspy_exception = e + # Attempt again before any build logic + continue + # Reset scope feedback scope_feedback.clear() @@ -184,7 +270,7 @@ def forward( ) wrapper = unimplemented_wrapper else: - wrapper = pred.wrapper.code + wrapper = pred.wrapper.code.strip() + "\n" # Validate that changes are in scope scope_feedback.update(validate_changes(wrapper, unimplemented_wrapper)) @@ -192,76 +278,254 @@ def forward( # Write wrapper to disk and check if we build with unsafe code since wrappers can use unsafe code symbol_wrapper_path.write_text(wrapper) - self.crate.add(self.crate.rust_src_path, self.wrapper_path, symbol_wrapper_path) - success, build_feedback = self.crate.cargo_build(allow_unsafe=True) - if success and not build_feedback and not scope_feedback: - self.crate.commit( - f"Wrapped symbol `{symbol_name}`\n\n# Reasoning\n{pred.reasoning}" - ) + self.crate.vcs.add(symbol_wrapper_path) + success, build_feedback = self._build(symbol.spelling) + success = success and not build_feedback and not scope_feedback + + if success: + msg = f"Wrapped function `{symbol.name}`" + logger.info(msg) + if "reasoning" in pred: + msg += f"\n\n# Reasoning\n{pred.reasoning}" break - self.crate.commit( - f"Failed to wrap symbol `{symbol_name}` ({i + 1}/{max_iters})!\n\n" - f"# Reasoning\n{pred.reasoning}\n\n" - f"# Build feedback\n{build_feedback}\n\n" - f"# Scope Feedback\n{scope_feedback}" + msg = f"Failed to wrap function `{symbol.name}` ({i + 1}/{max_iters})" + logger.error(msg) + msg += f"\n\n# Reasoning\n{pred.reasoning}" if "reasoning" in pred else "" + msg += f"\n\n# Build feedback\n{build_feedback}" + msg += f"\n\n# Scope Feedback\n{scope_feedback}" + self.crate.vcs.commit(msg) + + # Reference symbol wrapper in wrapper module + with self.wrapper_path.open("a") as f: + f.write(f"pub mod {symbol.spelling};\n") + self.crate.vcs.add(self.wrapper_path) + + # Write unimplemented wrapper to disk if generation failed + if not success: + symbol_wrapper_path.write_text(unimplemented_wrapper) + self.crate.vcs.add(symbol_wrapper_path) + msg = f"Wrote unimplemented wrapper for `{symbol.name}`" + logger.warning(msg) + self.crate.vcs.commit(msg) + + # All iterations failed because of DSPy exceptions + if dspy_exception: + raise dspy_exception + + pred.success = success + pred.name = symbol.spelling + pred.translation = translation + pred.wrapper = wrapper + pred.bindgen_template = unimplemented_wrapper + pred.prior_wrapper = prior_wrapper + pred.build_feedback = build_feedback + pred.scope_feedback = "\n\n".join(scope_feedback.values()) + return pred + + def _build(self, symbol_spelling: str) -> tuple[bool, str]: + orig_rust_src = self.crate.rust_src_path.read_text() + orig_wrapper_src = self.wrapper_path.read_text() + + # Reference wrapper module in Rust source + with self.crate.rust_src_path.open("a") as f: + f.write("pub mod wrapper;\n") + self.crate.vcs.add(self.crate.rust_src_path) + + # Reference symbol wrapper module to wrapper module + with self.wrapper_path.open("a") as f: + f.write(f"pub mod {symbol_spelling};\n") + self.crate.vcs.add(self.wrapper_path) + + # Check whether all of the changes compile and commit them + success, feedback = self.crate.cargo_build(allow_unsafe=True) + + # Restore original source + self.crate.rust_src_path.write_text(orig_rust_src) + self.wrapper_path.write_text(orig_wrapper_src) + + return success, feedback + + def write_cache(self, pred: dspy.Prediction) -> None: + required_fields = ( + "name", + "bindgen_template", + "prior_wrapper", + "build_feedback", + "scope_feedback", + "wrapper", + "success", + ) + if not all(hasattr(pred, field) for field in required_fields): + return + + _write_cache( + self.cache, + pred.name, + pred.bindgen_template, + pred.prior_wrapper, + pred.build_feedback, + pred.scope_feedback, + pred.wrapper, + pred.success, + ) + + +def export_first_unannotated_variable(rust_src: str, export_name: str) -> str | None: + # Loop through nodes trying to find a static item + attrs = [] + rust_bytes = rust_src.encode() + for node in get_nodes(get_root(rust_bytes)): + # Keep track of attributes + if node.type == "attribute_item": + attrs.append(node) + continue + + # Reset list of attributes when we encounter non-static/non-attribute item + elif node.type != "static_item": + attrs = [] + continue + + # If export name already in attrs, skip this static item + if any(b"export_name" in attr.text for attr in attrs if attr.text is not None): + continue + + # FIXME: Warn if name of variable does not correspond to export_name + + # Insert attribute at location + return ( + rust_bytes[: node.start_byte].decode() + + f'#[unsafe(export_name="{export_name}")]\n' + + rust_bytes[node.start_byte :].decode() + ) + return None + + +def _init_cache(cache: Path | None) -> Path | None: + if cache is None: + return None + with sqlite3.connect(cache) as conn: + conn.execute( + """ + CREATE TABLE IF NOT EXISTS wrapper_translations ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + bindgen_template TEXT NOT NULL, + prior_wrapper TEXT NOT NULL, + build_feedback TEXT NOT NULL, + scope_feedback TEXT NOT NULL, + wrapper TEXT NOT NULL, + success INTEGER NOT NULL ) - else: - logger.warning(f"Wrapper generation failed after {max_iters} feedback iterations!") - return dspy.Prediction( - wrapper_path=self.wrapper_path.relative_to(self.crate.cargo_toml.parent), - wrapper=self.wrapper_path.read_text(), - symbol_wrapper_path=symbol_wrapper_path.relative_to(self.crate.cargo_toml.parent), - symbol_wrapper=wrapper if success else unimplemented_wrapper, - success=success, + """ + ) + conn.execute( + "CREATE INDEX IF NOT EXISTS idx_wrapper_lookup" + " ON wrapper_translations (name, bindgen_template)" + ) + return cache + + +def _read_cache(cache: Path | None, name: str, bindgen_template: str) -> str: + wrapper = "" + if cache is None: + return wrapper + with sqlite3.connect(cache) as conn: + try: + row = conn.execute( + "SELECT wrapper FROM wrapper_translations WHERE name=? AND bindgen_template=? AND success=1 ORDER BY id DESC LIMIT 1", + (name, bindgen_template), + ).fetchone() + except Exception: + row = None + if row: + logger.info(f"Cache hit for wrapper `{name}`") + wrapper = row[0] + return wrapper + + +def _write_cache( + cache: Path | None, + name: str, + bindgen_template: str, + prior_wrapper: str, + build_feedback: str, + scope_feedback: str, + wrapper: str, + success: bool, +) -> None: + if cache is None: + return + with sqlite3.connect(cache) as conn: + conn.execute( + """ + INSERT INTO wrapper_translations + (name, bindgen_template, prior_wrapper, build_feedback, scope_feedback, wrapper, success) + VALUES (?, ?, ?, ?, ?, ?, ?) + """, + ( + name, + bindgen_template, + prior_wrapper, + build_feedback, + scope_feedback, + wrapper, + int(success), + ), ) -@hydra.main(version_base=None, config_name="wrapper") -def main(cfg: WrapperConfig) -> None: +def _main(cfg: WrapperConfig) -> None: output_dir = Path(HydraConfig.get().runtime.output_dir) logger.info(f"Saving results to {output_dir}") - # FIXME: Why not use output_dir / Cargo.toml? crate = Crate(cargo_toml=cfg.cargo_toml.resolve(), vcs=cfg.vcs) # type: ignore[reportArgumentType] model.configure(cfg.model, cfg.generate) dspy.configure(adapter=adapters.ChatAdapter()) - agent = WrapperGenerator(crate, max_iters=cfg.max_iters) - wrappers: dict[Path, list[str]] = defaultdict(list) - - # Generate wrappers for each symbol - wrapped = True - for symbol_name in cfg.symbols.read_text().splitlines(): - pred = agent(symbol_name) - symbol_wrapped: bool = pred.success - symbol_wrapper: str = pred.symbol_wrapper - symbol_wrapper_path: Path = pred.symbol_wrapper_path - wrapper: str = pred.wrapper - wrapper_path: Path = pred.wrapper_path + agent = WrapperGenerator(crate, max_iters=cfg.max_iters, readonly_cache=cfg.readonly_cache) - # Write unimplemented wrapper to disk if generation failed - if not symbol_wrapped: - crate.write(symbol_wrapper_path, symbol_wrapper) - crate.add(symbol_wrapper_path) - crate.commit(f"Failed to wrap symbol `{symbol_name}`") - wrapped = False - - # Save symbol wrapper declaration in wrapper_path. There should only ever be one - # wrapper_path, but we handle the case where there are many. - wrappers[wrapper_path].append(wrapper) - - # Update wrapper module with symbol wrapper declarations `pub mod {symbol_name};`; - for wrapper_path, symbol_wrapper_declarations in wrappers.items(): - crate.write(wrapper_path, "\n".join(symbol_wrapper_declarations)) - crate.add(wrapper_path) - - # Commit wrappers + # Remove forbid unsafe from Rust source + rust_src = re.sub(re.escape("#![forbid(unsafe_code)]"), "", crate.rust_src_path.read_text()) + crate.rust_src_path.write_text(rust_src) + + # Get global symbol table + tu = create_translation_unit(cfg.filename) + asts = [extract_info_c(tu)] + symbols, _ = get_symbols_and_dependencies(asts, source_priority=[]) + + # Generate wrappers for each global function definition + for symbol in symbols.values(): + if symbol.is_global and symbol.is_function and symbol.is_definition: + agent(symbol, "", rust_src) + + # Reference wrapper in Rust source + with crate.rust_src_path.open("a") as f: + f.write("pub mod wrapper;\n") + crate.vcs.add(crate.rust_src_path) + + success, feedback = crate.cargo_build(allow_unsafe=True) + + # Commit unsafe Rust code and wrappers if (output_subdir := HydraConfig.get().output_subdir) is not None: - crate.add(output_dir / output_subdir) + crate.vcs.add(output_dir / output_subdir) name = f"`{crate.root_package['name']}`" - crate.commit( - f"Wrapped all symbols in {name}" if wrapped else f"Failed to wrap all symbols in {name}" - ) + msg = f"Successfully wrapped all symbols in {name}!" + if not success: + msg = f"Failed to wrap all symbols in {name}!" + logger.error(msg) + msg += f"\n\n{feedback}" + else: + logger.info(msg) + crate.vcs.commit(msg) + + +@hydra.main(version_base=None, config_name="wrapper") +def main(cfg: WrapperConfig) -> None: + try: + _main(cfg) + except Exception as e: + logger.exception(e) + sys.exit(-1) if __name__ == "__main__": diff --git a/test/test_convert_json_to_rust.py b/test/test_convert_json_to_rust.py index a6f73f2..b2bc136 100644 --- a/test/test_convert_json_to_rust.py +++ b/test/test_convert_json_to_rust.py @@ -4,13 +4,11 @@ # SPDX-License-Identifier: Apache-2.0 # -import io -import contextlib import pytest import subprocess from pathlib import Path -from ideas import convert_tests, tools +from ideas import convert_tests @pytest.fixture @@ -37,17 +35,11 @@ def json_test_cases(fixtures_dir: Path) -> list[Path]: def test_convert_to_cargo_test( json_test_cases: list[Path], cargo_toml: Path, rust_tests_harness: Path ): - # Create a StringIO object to capture output - captured_output = io.StringIO() - - # Temporarily redirect stdout - with contextlib.redirect_stdout(captured_output): - convert_tests.convert_tests_for_exec(json_test_cases, tools.Crate(cargo_toml)) - - # Write the captured Rust code to a fresh tests/test_cases.rs + # Write tests to tests/test_cases.rs + test_cases = convert_tests.convert_tests_for_exec(json_test_cases) original_harness = rust_tests_harness.read_text() with open(rust_tests_harness, "w") as f: - f.write(captured_output.getvalue()) + f.write(test_cases) # Execute cargo test --test test_cases result = subprocess.run( diff --git a/test/test_rename_statics.py b/test/test_rename_statics.py deleted file mode 100644 index 5d31d9d..0000000 --- a/test/test_rename_statics.py +++ /dev/null @@ -1,84 +0,0 @@ -from pathlib import Path - -from ideas import ast, tools - -C_CODE = """ -#include - -int var0 = 1; -static int {prefix}var1; -static int {prefix}var2 = 10; - -void func0(); -static void {prefix}func1(); -static void {prefix}func1() {{ }} - -static void {prefix}func2() {{ - var0 += 1; - {prefix}var1 += 1; - {prefix}var2 += 1; - func0(); - {prefix}func1(); - {prefix}func2(); -}} - -void func0() {{ - static int var1 = 0; - static int var2; - static int var3; - var0 += 1; - var1 += 1; - var2 += 2; - var3 += 1; - func0(); - {prefix}func1(); - {prefix}func2(); -}} -""" - - -def test_get_internally_linked_cursors(): - c_code = C_CODE.format(prefix="") - tu = ast.create_translation_unit(c_code) - assert tu.cursor is not None - statics = [ - node.spelling - for node in ast.get_internally_linked_cursors(tu.cursor, filter_system=True) - ] - assert "var1" in statics - assert "var2" in statics - assert "func1" in statics - assert "func2" in statics - # stdlib.h internally linked declarations should be filtered - assert len(statics) == 4 - - -def test_get_internally_linked_cursors_no_filter(): - c_code = C_CODE.format(prefix="") - tu = ast.create_translation_unit(c_code) - assert tu.cursor is not None - statics = [ - node.spelling - for node in ast.get_internally_linked_cursors(tu.cursor, filter_system=False) - ] - # stdlib.h should include internally linked declarations - assert len(statics) > 4 - - -def test_clang_rename(tmp_path: Path): - prefix = "main_" - expected_c_code = C_CODE.format(prefix=prefix) - - c_code_path = tmp_path / "c_code.c" - c_code_path.write_text(C_CODE.format(prefix="")) - tools.clang_rename_( - c_code_path, - { - "var1": f"{prefix}var1", - "var2": f"{prefix}var2", - "func1": f"{prefix}func1", - "func2": f"{prefix}func2", - }, - ) - actual_c_code = c_code_path.read_text() - assert actual_c_code == expected_c_code diff --git a/uv.lock b/uv.lock index 7167e76..ffc804f 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,6 @@ version = 1 -revision = 2 -requires-python = ">=3.13.0, <3.14" +revision = 3 +requires-python = "==3.13.*" [manifest] build-constraints = [{ name = "setuptools", specifier = "==81.0.0" }] @@ -16,7 +16,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.13.3" +version = "3.13.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -27,25 +27,25 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } +sdist = { url = "https://files.pythonhosted.org/packages/45/4a/064321452809dae953c1ed6e017504e72551a26b6f5708a5a80e4bf556ff/aiohttp-3.13.4.tar.gz", hash = "sha256:d97a6d09c66087890c2ab5d49069e1e570583f7ac0314ecf98294c1b6aaebd38", size = 7859748, upload-time = "2026-03-28T17:19:40.6Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" }, - { url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" }, - { url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" }, - { url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" }, - { url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" }, - { url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" }, - { url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" }, - { url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" }, - { url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" }, - { url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" }, - { url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" }, - { url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" }, - { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" }, - { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" }, - { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ac/892f4162df9b115b4758d615f32ec63d00f3084c705ff5526630887b9b42/aiohttp-3.13.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:63dd5e5b1e43b8fb1e91b79b7ceba1feba588b317d1edff385084fcc7a0a4538", size = 745744, upload-time = "2026-03-28T17:16:44.67Z" }, + { url = "https://files.pythonhosted.org/packages/97/a9/c5b87e4443a2f0ea88cb3000c93a8fdad1ee63bffc9ded8d8c8e0d66efc6/aiohttp-3.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:746ac3cc00b5baea424dacddea3ec2c2702f9590de27d837aa67004db1eebc6e", size = 498178, upload-time = "2026-03-28T17:16:46.766Z" }, + { url = "https://files.pythonhosted.org/packages/94/42/07e1b543a61250783650df13da8ddcdc0d0a5538b2bd15cef6e042aefc61/aiohttp-3.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bda8f16ea99d6a6705e5946732e48487a448be874e54a4f73d514660ff7c05d3", size = 498331, upload-time = "2026-03-28T17:16:48.9Z" }, + { url = "https://files.pythonhosted.org/packages/20/d6/492f46bf0328534124772d0cf58570acae5b286ea25006900650f69dae0e/aiohttp-3.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b061e7b5f840391e3f64d0ddf672973e45c4cfff7a0feea425ea24e51530fc2", size = 1744414, upload-time = "2026-03-28T17:16:50.968Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/e02627b2683f68051246215d2d62b2d2f249ff7a285e7a858dc47d6b6a14/aiohttp-3.13.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b252e8d5cd66184b570d0d010de742736e8a4fab22c58299772b0c5a466d4b21", size = 1719226, upload-time = "2026-03-28T17:16:53.173Z" }, + { url = "https://files.pythonhosted.org/packages/7b/6c/5d0a3394dd2b9f9aeba6e1b6065d0439e4b75d41f1fb09a3ec010b43552b/aiohttp-3.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20af8aad61d1803ff11152a26146d8d81c266aa8c5aa9b4504432abb965c36a0", size = 1782110, upload-time = "2026-03-28T17:16:55.362Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2d/c20791e3437700a7441a7edfb59731150322424f5aadf635602d1d326101/aiohttp-3.13.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:13a5cc924b59859ad2adb1478e31f410a7ed46e92a2a619d6d1dd1a63c1a855e", size = 1884809, upload-time = "2026-03-28T17:16:57.734Z" }, + { url = "https://files.pythonhosted.org/packages/c8/94/d99dbfbd1924a87ef643833932eb2a3d9e5eee87656efea7d78058539eff/aiohttp-3.13.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:534913dfb0a644d537aebb4123e7d466d94e3be5549205e6a31f72368980a81a", size = 1764938, upload-time = "2026-03-28T17:17:00.221Z" }, + { url = "https://files.pythonhosted.org/packages/49/61/3ce326a1538781deb89f6cf5e094e2029cd308ed1e21b2ba2278b08426f6/aiohttp-3.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:320e40192a2dcc1cf4b5576936e9652981ab596bf81eb309535db7e2f5b5672f", size = 1570697, upload-time = "2026-03-28T17:17:02.985Z" }, + { url = "https://files.pythonhosted.org/packages/b6/77/4ab5a546857bb3028fbaf34d6eea180267bdab022ee8b1168b1fcde4bfdd/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9e587fcfce2bcf06526a43cb705bdee21ac089096f2e271d75de9c339db3100c", size = 1702258, upload-time = "2026-03-28T17:17:05.28Z" }, + { url = "https://files.pythonhosted.org/packages/79/63/d8f29021e39bc5af8e5d5e9da1b07976fb9846487a784e11e4f4eeda4666/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9eb9c2eea7278206b5c6c1441fdd9dc420c278ead3f3b2cc87f9b693698cc500", size = 1740287, upload-time = "2026-03-28T17:17:07.712Z" }, + { url = "https://files.pythonhosted.org/packages/55/3a/cbc6b3b124859a11bc8055d3682c26999b393531ef926754a3445b99dfef/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:29be00c51972b04bf9d5c8f2d7f7314f48f96070ca40a873a53056e652e805f7", size = 1753011, upload-time = "2026-03-28T17:17:10.053Z" }, + { url = "https://files.pythonhosted.org/packages/e0/30/836278675205d58c1368b21520eab9572457cf19afd23759216c04483048/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:90c06228a6c3a7c9f776fe4fc0b7ff647fffd3bed93779a6913c804ae00c1073", size = 1566359, upload-time = "2026-03-28T17:17:12.433Z" }, + { url = "https://files.pythonhosted.org/packages/50/b4/8032cc9b82d17e4277704ba30509eaccb39329dc18d6a35f05e424439e32/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:a533ec132f05fd9a1d959e7f34184cd7d5e8511584848dab85faefbaac573069", size = 1785537, upload-time = "2026-03-28T17:17:14.721Z" }, + { url = "https://files.pythonhosted.org/packages/17/7d/5873e98230bde59f493bf1f7c3e327486a4b5653fa401144704df5d00211/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1c946f10f413836f82ea4cfb90200d2a59578c549f00857e03111cf45ad01ca5", size = 1740752, upload-time = "2026-03-28T17:17:17.387Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f2/13e46e0df051494d7d3c68b7f72d071f48c384c12716fc294f75d5b1a064/aiohttp-3.13.4-cp313-cp313-win32.whl", hash = "sha256:48708e2706106da6967eff5908c78ca3943f005ed6bcb75da2a7e4da94ef8c70", size = 433187, upload-time = "2026-03-28T17:17:19.523Z" }, + { url = "https://files.pythonhosted.org/packages/ea/c0/649856ee655a843c8f8664592cfccb73ac80ede6a8c8db33a25d810c12db/aiohttp-3.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:74a2eb058da44fa3a877a49e2095b591d4913308bb424c418b77beb160c55ce3", size = 459778, upload-time = "2026-03-28T17:17:21.964Z" }, ] [[package]] @@ -83,6 +83,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] +[[package]] +name = "anthropic" +version = "0.91.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "docstring-parser" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c7/b5/f39ae52ce035490217203581b9bfca8ca853c3a497961d0e5a2f091d0233/anthropic-0.91.0.tar.gz", hash = "sha256:a6afd894d55c26504e3d33909fb3f174d0db7d63369bfe9bb387da3e2806076a", size = 599272, upload-time = "2026-04-07T18:41:17.202Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/1e/7f06da237fde2d7f760a1b61a554c0e780dffe1d264e5aacd0287a7a142b/anthropic-0.91.0-py3-none-any.whl", hash = "sha256:b8672878642774198aa6272f40eb526b9ca11c2a72d4a935d867d445c7371f68", size = 481829, upload-time = "2026-04-07T18:41:15.326Z" }, +] + [[package]] name = "antlr4-python3-runtime" version = "4.9.3" @@ -134,6 +153,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d5/dc/180fe721a2574fb3aad4051adcca196ac2d18adaf75122f5eeb47436cca2/basedpyright-1.29.4-py3-none-any.whl", hash = "sha256:e087513979972f83010639c6c1a1c13dd3b1d24ee45f8ecff747962cc2063d6f", size = 11476859, upload-time = "2025-06-11T22:25:52.01Z" }, ] +[[package]] +name = "blinker" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, +] + [[package]] name = "cachetools" version = "6.2.4" @@ -152,6 +180,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, ] +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, +] + [[package]] name = "cfgv" version = "3.5.0" @@ -237,6 +288,45 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6d/c1/e419ef3723a074172b68aaa89c9f3de486ed4c2399e2dbd8113a4fdcaf9e/colorlog-6.10.1-py3-none-any.whl", hash = "sha256:2d7e8348291948af66122cff006c9f8da6255d224e7cf8e37d8de2df3bad8c9c", size = 11743, upload-time = "2025-10-16T16:14:10.512Z" }, ] +[[package]] +name = "cryptography" +version = "46.0.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" }, + { url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" }, + { url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" }, + { url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" }, + { url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" }, + { url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" }, + { url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" }, + { url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" }, + { url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" }, + { url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" }, + { url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" }, + { url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" }, + { url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" }, + { url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" }, + { url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" }, +] + [[package]] name = "diskcache" version = "5.6.3" @@ -264,6 +354,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, ] +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, +] + +[[package]] +name = "docstring-parser" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, +] + [[package]] name = "dspy" version = "3.1.2" @@ -321,6 +434,23 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, ] +[[package]] +name = "flask" +version = "3.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "blinker" }, + { name = "click" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "markupsafe" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/00/35d85dcce6c57fdc871f3867d465d780f302a175ea360f62533f12b27e2b/flask-3.1.3.tar.gz", hash = "sha256:0ef0e52b8a9cd932855379197dd8f94047b359ca0a78695144304cb45f87c9eb", size = 759004, upload-time = "2026-02-19T05:00:57.678Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/9c/34f6962f9b9e9c71f6e5ed806e0d0ff03c9d1b0b2340088a0cf4bce09b18/flask-3.1.3-py3-none-any.whl", hash = "sha256:f4bcbefc124291925f1a26446da31a5178f9483862233b23c0c96a20701f670c", size = 103424, upload-time = "2026-02-19T05:00:56.027Z" }, +] + [[package]] name = "frozenlist" version = "1.8.0" @@ -380,6 +510,45 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/b1/33b035ff1aaf22d4e104c5b15ba48fe5050639764457048e967c20d6317a/gepa-0.0.24-py3-none-any.whl", hash = "sha256:6d8b16699e7b24ed01435dea7bbbc89156a88cbb4b877b14d90e7455db2b0032", size = 137539, upload-time = "2026-01-05T16:45:29.244Z" }, ] +[[package]] +name = "google-auth" +version = "2.49.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyasn1-modules" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ea/80/6a696a07d3d3b0a92488933532f03dbefa4a24ab80fb231395b9a2a1be77/google_auth-2.49.1.tar.gz", hash = "sha256:16d40da1c3c5a0533f57d268fe72e0ebb0ae1cc3b567024122651c045d879b64", size = 333825, upload-time = "2026-03-12T19:30:58.135Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/eb/c6c2478d8a8d633460be40e2a8a6f8f429171997a35a96f81d3b680dec83/google_auth-2.49.1-py3-none-any.whl", hash = "sha256:195ebe3dca18eddd1b3db5edc5189b76c13e96f29e73043b923ebcf3f1a860f7", size = 240737, upload-time = "2026-03-12T19:30:53.159Z" }, +] + +[package.optional-dependencies] +requests = [ + { name = "requests" }, +] + +[[package]] +name = "google-genai" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "google-auth", extra = ["requests"] }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "sniffio" }, + { name = "tenacity" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/dd/28e4682904b183acbfad3fe6409f13a42f69bb8eab6e882d3bcbea1dde01/google_genai-1.70.0.tar.gz", hash = "sha256:36b67b0fc6f319e08d1f1efd808b790107b1809c8743a05d55dfcf9d9fad7719", size = 519550, upload-time = "2026-04-01T10:52:46.487Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/a3/d4564c8a9beaf6a3cef8d70fa6354318572cebfee65db4f01af0d41f45ba/google_genai-1.70.0-py3-none-any.whl", hash = "sha256:b74c24549d8b4208f4c736fd11857374788e1ffffc725de45d706e35c97fceee", size = 760584, upload-time = "2026-04-01T10:52:44.349Z" }, +] + [[package]] name = "greenlet" version = "3.3.0" @@ -515,6 +684,9 @@ dependencies = [ { name = "clang" }, { name = "dspy" }, { name = "hydra-core" }, + { name = "kiss-agent-framework" }, + { name = "networkx" }, + { name = "tomlkit" }, { name = "tree-sitter" }, { name = "tree-sitter-rust" }, ] @@ -532,6 +704,9 @@ requires-dist = [ { name = "clang", specifier = "==21.1.7" }, { name = "dspy", specifier = "==3.1.2" }, { name = "hydra-core", specifier = "==1.3.2" }, + { name = "kiss-agent-framework", specifier = "==0.2.27" }, + { name = "networkx", specifier = "==3.6.1" }, + { name = "tomlkit", specifier = ">=0.14.0" }, { name = "tree-sitter", specifier = "==0.25.2" }, { name = "tree-sitter-rust", specifier = "==0.24.0" }, ] @@ -540,7 +715,7 @@ requires-dist = [ dev = [ { name = "basedpyright", specifier = "==1.29.4" }, { name = "pre-commit", specifier = "==4.2.0" }, - { name = "pytest", specifier = "==8.4.0" }, + { name = "pytest", specifier = "==9.0.3" }, { name = "ruff", specifier = "==0.11.13" }, ] @@ -583,6 +758,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] +[[package]] +name = "itsdangerous" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, +] + [[package]] name = "jinja2" version = "3.1.6" @@ -657,6 +841,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] +[[package]] +name = "kiss-agent-framework" +version = "0.2.27" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anthropic" }, + { name = "docker" }, + { name = "flask" }, + { name = "google-genai" }, + { name = "numpy" }, + { name = "openai" }, + { name = "playwright" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "pyyaml" }, + { name = "rich" }, + { name = "slack-sdk" }, + { name = "starlette" }, + { name = "uvicorn" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/75/e5/2eb6de46d3e061d376f0deb4dd4b36eb956d251a178b8d78582e594d4f71/kiss_agent_framework-0.2.27.tar.gz", hash = "sha256:bd25b4a83e9c35c703af709c335a916ccd3a612f3cf6474a840ecaabb837dd49", size = 1829934, upload-time = "2026-03-11T19:50:24.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/f1/4ab52070ec6cfa6da9728fcdb84399e8ca33057e1d686c777040449df2cb/kiss_agent_framework-0.2.27-py3-none-any.whl", hash = "sha256:8711004527fe1a2e59635b0c0c9abb776779f6abb02fcabb14040da4ef3191f2", size = 452809, upload-time = "2026-03-11T19:50:13.902Z" }, +] + [[package]] name = "litellm" version = "1.80.11" @@ -693,6 +902,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, ] +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + [[package]] name = "markupsafe" version = "3.0.3" @@ -723,6 +944,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, ] +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + [[package]] name = "multidict" version = "6.7.0" @@ -768,6 +998,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] +[[package]] +name = "networkx" +version = "3.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025, upload-time = "2025-12-08T17:02:39.908Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504, upload-time = "2025-12-08T17:02:38.159Z" }, +] + [[package]] name = "nodeenv" version = "1.10.0" @@ -874,25 +1113,25 @@ wheels = [ [[package]] name = "orjson" -version = "3.11.5" +version = "3.11.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/04/b8/333fdb27840f3bf04022d21b654a35f58e15407183aeb16f3b41aa053446/orjson-3.11.5.tar.gz", hash = "sha256:82393ab47b4fe44ffd0a7659fa9cfaacc717eb617c93cde83795f14af5c2e9d5", size = 5972347, upload-time = "2025-12-06T15:55:39.458Z" } +sdist = { url = "https://files.pythonhosted.org/packages/70/a3/4e09c61a5f0c521cba0bb433639610ae037437669f1a4cbc93799e731d78/orjson-3.11.6.tar.gz", hash = "sha256:0a54c72259f35299fd033042367df781c2f66d10252955ca1efb7db309b954cb", size = 6175856, upload-time = "2026-01-29T15:13:07.942Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/43/61a77040ce59f1569edf38f0b9faadc90c8cf7e9bec2e0df51d0132c6bb7/orjson-3.11.5-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:3b01799262081a4c47c035dd77c1301d40f568f77cc7ec1bb7db5d63b0a01629", size = 245271, upload-time = "2025-12-06T15:54:40.878Z" }, - { url = "https://files.pythonhosted.org/packages/55/f9/0f79be617388227866d50edd2fd320cb8fb94dc1501184bb1620981a0aba/orjson-3.11.5-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:61de247948108484779f57a9f406e4c84d636fa5a59e411e6352484985e8a7c3", size = 129422, upload-time = "2025-12-06T15:54:42.403Z" }, - { url = "https://files.pythonhosted.org/packages/77/42/f1bf1549b432d4a78bfa95735b79b5dac75b65b5bb815bba86ad406ead0a/orjson-3.11.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:894aea2e63d4f24a7f04a1908307c738d0dce992e9249e744b8f4e8dd9197f39", size = 132060, upload-time = "2025-12-06T15:54:43.531Z" }, - { url = "https://files.pythonhosted.org/packages/25/49/825aa6b929f1a6ed244c78acd7b22c1481fd7e5fda047dc8bf4c1a807eb6/orjson-3.11.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ddc21521598dbe369d83d4d40338e23d4101dad21dae0e79fa20465dbace019f", size = 130391, upload-time = "2025-12-06T15:54:45.059Z" }, - { url = "https://files.pythonhosted.org/packages/42/ec/de55391858b49e16e1aa8f0bbbb7e5997b7345d8e984a2dec3746d13065b/orjson-3.11.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cce16ae2f5fb2c53c3eafdd1706cb7b6530a67cc1c17abe8ec747f5cd7c0c51", size = 135964, upload-time = "2025-12-06T15:54:46.576Z" }, - { url = "https://files.pythonhosted.org/packages/1c/40/820bc63121d2d28818556a2d0a09384a9f0262407cf9fa305e091a8048df/orjson-3.11.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e46c762d9f0e1cfb4ccc8515de7f349abbc95b59cb5a2bd68df5973fdef913f8", size = 139817, upload-time = "2025-12-06T15:54:48.084Z" }, - { url = "https://files.pythonhosted.org/packages/09/c7/3a445ca9a84a0d59d26365fd8898ff52bdfcdcb825bcc6519830371d2364/orjson-3.11.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7345c759276b798ccd6d77a87136029e71e66a8bbf2d2755cbdde1d82e78706", size = 137336, upload-time = "2025-12-06T15:54:49.426Z" }, - { url = "https://files.pythonhosted.org/packages/9a/b3/dc0d3771f2e5d1f13368f56b339c6782f955c6a20b50465a91acb79fe961/orjson-3.11.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75bc2e59e6a2ac1dd28901d07115abdebc4563b5b07dd612bf64260a201b1c7f", size = 138993, upload-time = "2025-12-06T15:54:50.939Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a2/65267e959de6abe23444659b6e19c888f242bf7725ff927e2292776f6b89/orjson-3.11.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:54aae9b654554c3b4edd61896b978568c6daa16af96fa4681c9b5babd469f863", size = 141070, upload-time = "2025-12-06T15:54:52.414Z" }, - { url = "https://files.pythonhosted.org/packages/63/c9/da44a321b288727a322c6ab17e1754195708786a04f4f9d2220a5076a649/orjson-3.11.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4bdd8d164a871c4ec773f9de0f6fe8769c2d6727879c37a9666ba4183b7f8228", size = 413505, upload-time = "2025-12-06T15:54:53.67Z" }, - { url = "https://files.pythonhosted.org/packages/7f/17/68dc14fa7000eefb3d4d6d7326a190c99bb65e319f02747ef3ebf2452f12/orjson-3.11.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a261fef929bcf98a60713bf5e95ad067cea16ae345d9a35034e73c3990e927d2", size = 151342, upload-time = "2025-12-06T15:54:55.113Z" }, - { url = "https://files.pythonhosted.org/packages/c4/c5/ccee774b67225bed630a57478529fc026eda33d94fe4c0eac8fe58d4aa52/orjson-3.11.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c028a394c766693c5c9909dec76b24f37e6a1b91999e8d0c0d5feecbe93c3e05", size = 141823, upload-time = "2025-12-06T15:54:56.331Z" }, - { url = "https://files.pythonhosted.org/packages/67/80/5d00e4155d0cd7390ae2087130637671da713959bb558db9bac5e6f6b042/orjson-3.11.5-cp313-cp313-win32.whl", hash = "sha256:2cc79aaad1dfabe1bd2d50ee09814a1253164b3da4c00a78c458d82d04b3bdef", size = 135236, upload-time = "2025-12-06T15:54:57.507Z" }, - { url = "https://files.pythonhosted.org/packages/95/fe/792cc06a84808dbdc20ac6eab6811c53091b42f8e51ecebf14b540e9cfe4/orjson-3.11.5-cp313-cp313-win_amd64.whl", hash = "sha256:ff7877d376add4e16b274e35a3f58b7f37b362abf4aa31863dadacdd20e3a583", size = 133167, upload-time = "2025-12-06T15:54:58.71Z" }, - { url = "https://files.pythonhosted.org/packages/46/2c/d158bd8b50e3b1cfdcf406a7e463f6ffe3f0d167b99634717acdaf5e299f/orjson-3.11.5-cp313-cp313-win_arm64.whl", hash = "sha256:59ac72ea775c88b163ba8d21b0177628bd015c5dd060647bbab6e22da3aad287", size = 126712, upload-time = "2025-12-06T15:54:59.892Z" }, + { url = "https://files.pythonhosted.org/packages/ae/45/d9c71c8c321277bc1ceebf599bc55ba826ae538b7c61f287e9a7e71bd589/orjson-3.11.6-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e4ae1670caabb598a88d385798692ce2a1b2f078971b3329cfb85253c6097f5b", size = 249828, upload-time = "2026-01-29T15:12:20.14Z" }, + { url = "https://files.pythonhosted.org/packages/ac/7e/4afcf4cfa9c2f93846d70eee9c53c3c0123286edcbeb530b7e9bd2aea1b2/orjson-3.11.6-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:2c6b81f47b13dac2caa5d20fbc953c75eb802543abf48403a4703ed3bff225f0", size = 134339, upload-time = "2026-01-29T15:12:22.01Z" }, + { url = "https://files.pythonhosted.org/packages/40/10/6d2b8a064c8d2411d3d0ea6ab43125fae70152aef6bea77bb50fa54d4097/orjson-3.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:647d6d034e463764e86670644bdcaf8e68b076e6e74783383b01085ae9ab334f", size = 137662, upload-time = "2026-01-29T15:12:23.307Z" }, + { url = "https://files.pythonhosted.org/packages/5a/50/5804ea7d586baf83ee88969eefda97a24f9a5bdba0727f73e16305175b26/orjson-3.11.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8523b9cc4ef174ae52414f7699e95ee657c16aa18b3c3c285d48d7966cce9081", size = 134626, upload-time = "2026-01-29T15:12:25.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/2e/f0492ed43e376722bb4afd648e06cc1e627fc7ec8ff55f6ee739277813ea/orjson-3.11.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:313dfd7184cde50c733fc0d5c8c0e2f09017b573afd11dc36bd7476b30b4cb17", size = 140873, upload-time = "2026-01-29T15:12:26.369Z" }, + { url = "https://files.pythonhosted.org/packages/10/15/6f874857463421794a303a39ac5494786ad46a4ab46d92bda6705d78c5aa/orjson-3.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:905ee036064ff1e1fd1fb800055ac477cdcb547a78c22c1bc2bbf8d5d1a6fb42", size = 144044, upload-time = "2026-01-29T15:12:28.082Z" }, + { url = "https://files.pythonhosted.org/packages/d2/c7/b7223a3a70f1d0cc2d86953825de45f33877ee1b124a91ca1f79aa6e643f/orjson-3.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce374cb98411356ba906914441fc993f271a7a666d838d8de0e0900dd4a4bc12", size = 142396, upload-time = "2026-01-29T15:12:30.529Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/aa1b6d3ad3cd80f10394134f73ae92a1d11fdbe974c34aa199cc18bb5fcf/orjson-3.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cded072b9f65fcfd188aead45efa5bd528ba552add619b3ad2a81f67400ec450", size = 145600, upload-time = "2026-01-29T15:12:31.848Z" }, + { url = "https://files.pythonhosted.org/packages/f6/cf/e4aac5a46cbd39d7e769ef8650efa851dfce22df1ba97ae2b33efe893b12/orjson-3.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ab85bdbc138e1f73a234db6bb2e4cc1f0fcec8f4bd2bd2430e957a01aadf746", size = 146967, upload-time = "2026-01-29T15:12:33.203Z" }, + { url = "https://files.pythonhosted.org/packages/0b/04/975b86a4bcf6cfeda47aad15956d52fbeda280811206e9967380fa9355c8/orjson-3.11.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:351b96b614e3c37a27b8ab048239ebc1e0be76cc17481a430d70a77fb95d3844", size = 421003, upload-time = "2026-01-29T15:12:35.097Z" }, + { url = "https://files.pythonhosted.org/packages/28/d1/0369d0baf40eea5ff2300cebfe209883b2473ab4aa4c4974c8bd5ee42bb2/orjson-3.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f9959c85576beae5cdcaaf39510b15105f1ee8b70d5dacd90152617f57be8c83", size = 155695, upload-time = "2026-01-29T15:12:36.589Z" }, + { url = "https://files.pythonhosted.org/packages/ab/1f/d10c6d6ae26ff1d7c3eea6fd048280ef2e796d4fb260c5424fd021f68ecf/orjson-3.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:75682d62b1b16b61a30716d7a2ec1f4c36195de4a1c61f6665aedd947b93a5d5", size = 147392, upload-time = "2026-01-29T15:12:37.876Z" }, + { url = "https://files.pythonhosted.org/packages/8d/43/7479921c174441a0aa5277c313732e20713c0969ac303be9f03d88d3db5d/orjson-3.11.6-cp313-cp313-win32.whl", hash = "sha256:40dc277999c2ef227dcc13072be879b4cfd325502daeb5c35ed768f706f2bf30", size = 139718, upload-time = "2026-01-29T15:12:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/88/bc/9ffe7dfbf8454bc4e75bb8bf3a405ed9e0598df1d3535bb4adcd46be07d0/orjson-3.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:f0f6e9f8ff7905660bc3c8a54cd4a675aa98f7f175cf00a59815e2ff42c0d916", size = 136635, upload-time = "2026-01-29T15:12:40.593Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/51fa90b451470447ea5023b20d83331ec741ae28d1e6d8ed547c24e7de14/orjson-3.11.6-cp313-cp313-win_arm64.whl", hash = "sha256:1608999478664de848e5900ce41f25c4ecdfc4beacbc632b6fd55e1a586e5d38", size = 135175, upload-time = "2026-01-29T15:12:41.997Z" }, ] [[package]] @@ -913,6 +1152,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, ] +[[package]] +name = "playwright" +version = "1.58.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet" }, + { name = "pyee" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/c9/9c6061d5703267f1baae6a4647bfd1862e386fbfdb97d889f6f6ae9e3f64/playwright-1.58.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:96e3204aac292ee639edbfdef6298b4be2ea0a55a16b7068df91adac077cc606", size = 42251098, upload-time = "2026-01-30T15:09:24.028Z" }, + { url = "https://files.pythonhosted.org/packages/e0/40/59d34a756e02f8c670f0fee987d46f7ee53d05447d43cd114ca015cb168c/playwright-1.58.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:70c763694739d28df71ed578b9c8202bb83e8fe8fb9268c04dd13afe36301f71", size = 41039625, upload-time = "2026-01-30T15:09:27.558Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ee/3ce6209c9c74a650aac9028c621f357a34ea5cd4d950700f8e2c4b7fe2c4/playwright-1.58.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:185e0132578733d02802dfddfbbc35f42be23a45ff49ccae5081f25952238117", size = 42251098, upload-time = "2026-01-30T15:09:30.461Z" }, + { url = "https://files.pythonhosted.org/packages/f1/af/009958cbf23fac551a940d34e3206e6c7eed2b8c940d0c3afd1feb0b0589/playwright-1.58.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:c95568ba1eda83812598c1dc9be60b4406dffd60b149bc1536180ad108723d6b", size = 46235268, upload-time = "2026-01-30T15:09:33.787Z" }, + { url = "https://files.pythonhosted.org/packages/d9/a6/0e66ad04b6d3440dae73efb39540c5685c5fc95b17c8b29340b62abbd952/playwright-1.58.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f9999948f1ab541d98812de25e3a8c410776aa516d948807140aff797b4bffa", size = 45964214, upload-time = "2026-01-30T15:09:36.751Z" }, + { url = "https://files.pythonhosted.org/packages/0e/4b/236e60ab9f6d62ed0fd32150d61f1f494cefbf02304c0061e78ed80c1c32/playwright-1.58.0-py3-none-win32.whl", hash = "sha256:1e03be090e75a0fabbdaeab65ce17c308c425d879fa48bb1d7986f96bfad0b99", size = 36815998, upload-time = "2026-01-30T15:09:39.627Z" }, + { url = "https://files.pythonhosted.org/packages/41/f8/5ec599c5e59d2f2f336a05b4f318e733077cd5044f24adb6f86900c3e6a7/playwright-1.58.0-py3-none-win_amd64.whl", hash = "sha256:a2bf639d0ce33b3ba38de777e08697b0d8f3dc07ab6802e4ac53fb65e3907af8", size = 36816005, upload-time = "2026-01-30T15:09:42.449Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c4/cc0229fea55c87d6c9c67fe44a21e2cd28d1d558a5478ed4d617e9fb0c93/playwright-1.58.0-py3-none-win_arm64.whl", hash = "sha256:32ffe5c303901a13a0ecab91d1c3f74baf73b84f4bedbb6b935f5bc11cc98e1b", size = 33085919, upload-time = "2026-01-30T15:09:45.71Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -977,6 +1235,36 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] +[[package]] +name = "pyasn1" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + [[package]] name = "pydantic" version = "2.12.5" @@ -1017,18 +1305,44 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, ] +[[package]] +name = "pydantic-settings" +version = "2.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" }, +] + +[[package]] +name = "pyee" +version = "13.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/04/e7c1fe4dc78a6fdbfd6c337b1c3732ff543b8a397683ab38378447baa331/pyee-13.0.1.tar.gz", hash = "sha256:0b931f7c14535667ed4c7e0d531716368715e860b988770fc7eb8578d1f67fc8", size = 31655, upload-time = "2026-02-14T21:12:28.044Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/b4d4827c93ef43c01f599ef31453ccc1c132b353284fc6c87d535c233129/pyee-13.0.1-py3-none-any.whl", hash = "sha256:af2f8fede4171ef667dfded53f96e2ed0d6e6bd7ee3bb46437f77e3b57689228", size = 15659, upload-time = "2026-02-14T21:12:26.263Z" }, +] + [[package]] name = "pygments" -version = "2.19.2" +version = "2.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, ] [[package]] name = "pytest" -version = "8.4.0" +version = "9.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -1037,9 +1351,9 @@ dependencies = [ { name = "pluggy" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/aa/405082ce2749be5398045152251ac69c0f3578c7077efc53431303af97ce/pytest-8.4.0.tar.gz", hash = "sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6", size = 1515232, upload-time = "2025-06-02T17:36:30.03Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/de/afa024cbe022b1b318a3d224125aa24939e99b4ff6f22e0ba639a2eaee47/pytest-8.4.0-py3-none-any.whl", hash = "sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e", size = 363797, upload-time = "2025-06-02T17:36:27.859Z" }, + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, ] [[package]] @@ -1051,6 +1365,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, ] +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -1120,7 +1444,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.5" +version = "2.33.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1128,9 +1452,22 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, + { url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, ] [[package]] @@ -1204,6 +1541,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] +[[package]] +name = "slack-sdk" +version = "3.41.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/22/35/fc009118a13187dd9731657c60138e5a7c2dea88681a7f04dc406af5da7d/slack_sdk-3.41.0.tar.gz", hash = "sha256:eb61eb12a65bebeca9cb5d36b3f799e836ed2be21b456d15df2627cfe34076ca", size = 250568, upload-time = "2026-03-12T16:10:11.381Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/df/2e4be347ff98281b505cc0ccf141408cdd25eb5ca9f3830deb361b2472d3/slack_sdk-3.41.0-py2.py3-none-any.whl", hash = "sha256:bb18dcdfff1413ec448e759cf807ec3324090993d8ab9111c74081623b692a89", size = 313885, upload-time = "2026-03-12T16:10:09.811Z" }, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -1234,6 +1580,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bf/e1/3ccb13c643399d22289c6a9786c1a91e3dcbb68bce4beb44926ac2c557bf/sqlalchemy-2.0.45-py3-none-any.whl", hash = "sha256:5225a288e4c8cc2308dbdd874edad6e7d0fd38eac1e9e5f23503425c8eee20d0", size = 1936672, upload-time = "2025-12-09T21:54:52.608Z" }, ] +[[package]] +name = "starlette" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" }, +] + [[package]] name = "tenacity" version = "9.1.2" @@ -1295,6 +1653,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/72/f4/0de46cfa12cdcbcd464cc59fde36912af405696f687e53a091fb432f694c/tokenizers-0.22.2-cp39-abi3-win_arm64.whl", hash = "sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc", size = 2612133, upload-time = "2026-01-05T10:45:17.232Z" }, ] +[[package]] +name = "tomlkit" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/af/14b24e41977adb296d6bd1fb59402cf7d60ce364f90c890bd2ec65c43b5a/tomlkit-0.14.0.tar.gz", hash = "sha256:cf00efca415dbd57575befb1f6634c4f42d2d87dbba376128adb42c121b87064", size = 187167, upload-time = "2026-01-13T01:14:53.304Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/11/87d6d29fb5d237229d67973a6c9e06e048f01cf4994dee194ab0ea841814/tomlkit-0.14.0-py3-none-any.whl", hash = "sha256:592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680", size = 39310, upload-time = "2026-01-13T01:14:51.965Z" }, +] + [[package]] name = "tqdm" version = "4.67.1" @@ -1380,6 +1747,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] +[[package]] +name = "uvicorn" +version = "0.44.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/da/6eee1ff8b6cbeed47eeb5229749168e81eb4b7b999a1a15a7176e51410c9/uvicorn-0.44.0.tar.gz", hash = "sha256:6c942071b68f07e178264b9152f1f16dfac5da85880c4ce06366a96d70d4f31e", size = 86947, upload-time = "2026-04-06T09:23:22.826Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/23/a5bbd9600dd607411fa644c06ff4951bec3a4d82c4b852374024359c19c0/uvicorn-0.44.0-py3-none-any.whl", hash = "sha256:ce937c99a2cc70279556967274414c087888e8cec9f9c94644dfca11bd3ced89", size = 69425, upload-time = "2026-04-06T09:23:21.524Z" }, +] + [[package]] name = "virtualenv" version = "20.36.1" @@ -1394,6 +1774,36 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, ] +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] + +[[package]] +name = "werkzeug" +version = "3.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/b2/381be8cfdee792dd117872481b6e378f85c957dd7c5bca38897b08f765fd/werkzeug-3.1.8.tar.gz", hash = "sha256:9bad61a4268dac112f1c5cd4630a56ede601b6ed420300677a869083d70a4c44", size = 875852, upload-time = "2026-04-02T18:49:14.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/8c/2e650f2afeb7ee576912636c23ddb621c91ac6a98e66dc8d29c3c69446e1/werkzeug-3.1.8-py3-none-any.whl", hash = "sha256:63a77fb8892bf28ebc3178683445222aa500e48ebad5ec77b0ad80f8726b1f50", size = 226459, upload-time = "2026-04-02T18:49:12.72Z" }, +] + [[package]] name = "xxhash" version = "3.6.0"