From a92eb5d229ef3e7b48f0f71a460ae77bbbb244db Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Thu, 18 Sep 2025 13:18:16 -0400 Subject: [PATCH 01/17] docs: Add comprehensive literate documentation system for HyperBEAM - Add mdbook-based documentation framework with 158+ Erlang module docs - Configure mdbook with custom port 3471 and disabled chapter numbering - Set build directory to 'dist' to avoid confusion with source structure - Add gitignore entry to exclude generated documentation files - Include custom CSS and JavaScript for enhanced documentation experience - Organize modules by functional categories: Arweave Foundation, Device Framework, Codec Modules, Core Services, and HyperBEAM Core --- .gitignore | 3 + docs/book/README.md | 125 + docs/book/book.toml | 56 + docs/book/custom.css | 36 + docs/book/custom.js | 231 ++ docs/book/src/SUMMARY.md | 158 ++ docs/book/src/ar_bundles.erl.md | 2070 +++++++++++++++++ docs/book/src/ar_deep_hash.erl.md | 62 + docs/book/src/ar_rate_limiter.erl.md | 187 ++ docs/book/src/ar_timestamp.erl.md | 97 + docs/book/src/ar_tx.erl.md | 259 +++ docs/book/src/ar_wallet.erl.md | 456 ++++ docs/book/src/dev_apply.erl.md | 350 +++ docs/book/src/dev_arweave.erl.md | 355 +++ docs/book/src/dev_arweave_block_cache.erl.md | 99 + docs/book/src/dev_auth_hook.erl.md | 431 ++++ docs/book/src/dev_cache.erl.md | 310 +++ docs/book/src/dev_cacheviz.erl.md | 115 + docs/book/src/dev_codec_ans104.erl.md | 511 ++++ docs/book/src/dev_codec_ans104_from.erl.md | 355 +++ docs/book/src/dev_codec_ans104_to.erl.md | 240 ++ docs/book/src/dev_codec_cookie.erl.md | 570 +++++ docs/book/src/dev_codec_cookie_auth.erl.md | 329 +++ .../src/dev_codec_cookie_test_vectors.erl.md | 904 +++++++ docs/book/src/dev_codec_flat.erl.md | 266 +++ docs/book/src/dev_codec_http_auth.erl.md | 210 ++ docs/book/src/dev_codec_httpsig.erl.md | 447 ++++ docs/book/src/dev_codec_httpsig_conv.erl.md | 564 +++++ docs/book/src/dev_codec_httpsig_keyid.erl.md | 195 ++ docs/book/src/dev_codec_httpsig_proxy.erl.md | 54 + .../book/src/dev_codec_httpsig_siginfo.erl.md | 387 +++ docs/book/src/dev_codec_json.erl.md | 170 ++ docs/book/src/dev_codec_structured.erl.md | 500 ++++ docs/book/src/dev_copycat.erl.md | 42 + docs/book/src/dev_copycat_arweave.erl.md | 112 + docs/book/src/dev_copycat_graphql.erl.md | 219 ++ docs/book/src/dev_cron.erl.md | 400 ++++ docs/book/src/dev_cu.erl.md | 46 + docs/book/src/dev_dedup.erl.md | 93 + docs/book/src/dev_delegated_compute.erl.md | 242 ++ docs/book/src/dev_faff.erl.md | 62 + docs/book/src/dev_genesis_wasm.erl.md | 756 ++++++ docs/book/src/dev_green_zone.erl.md | 342 +++ docs/book/src/dev_hook.erl.md | 272 +++ docs/book/src/dev_hyperbuddy.erl.md | 291 +++ docs/book/src/dev_json_iface.erl.md | 685 ++++++ docs/book/src/dev_local_name.erl.md | 258 ++ docs/book/src/dev_lookup.erl.md | 91 + docs/book/src/dev_lua.erl.md | 977 ++++++++ docs/book/src/dev_lua_lib.erl.md | 199 ++ docs/book/src/dev_lua_test.erl.md | 166 ++ docs/book/src/dev_lua_test_ledgers.erl.md | 887 +++++++ docs/book/src/dev_manifest.erl.md | 188 ++ docs/book/src/dev_message.erl.md | 625 +++++ docs/book/src/dev_meta.erl.md | 981 ++++++++ docs/book/src/dev_monitor.erl.md | 75 + docs/book/src/dev_multipass.erl.md | 77 + docs/book/src/dev_name.erl.md | 200 ++ docs/book/src/dev_node_process.erl.md | 204 ++ docs/book/src/dev_p4.erl.md | 308 +++ docs/book/src/dev_patch.erl.md | 288 +++ docs/book/src/dev_poda.erl.md | 298 +++ docs/book/src/dev_process.erl.md | 1221 ++++++++++ docs/book/src/dev_process_cache.erl.md | 204 ++ docs/book/src/dev_process_worker.erl.md | 213 ++ docs/book/src/dev_profile.erl.md | 414 ++++ docs/book/src/dev_push.erl.md | 911 ++++++++ docs/book/src/dev_query.erl.md | 359 +++ docs/book/src/dev_query_arweave.erl.md | 546 +++++ docs/book/src/dev_query_graphql.erl.md | 497 ++++ docs/book/src/dev_query_test_vectors.erl.md | 842 +++++++ docs/book/src/dev_relay.erl.md | 301 +++ docs/book/src/dev_router.erl.md | 1450 ++++++++++++ docs/book/src/dev_scheduler.erl.md | 1859 +++++++++++++++ docs/book/src/dev_scheduler_cache.erl.md | 645 +++++ docs/book/src/dev_scheduler_formats.erl.md | 309 +++ docs/book/src/dev_scheduler_registry.erl.md | 186 ++ docs/book/src/dev_scheduler_server.erl.md | 238 ++ docs/book/src/dev_secret.erl.md | 845 +++++++ docs/book/src/dev_simple_pay.erl.md | 408 ++++ docs/book/src/dev_snp.erl.md | 693 ++++++ docs/book/src/dev_snp_nif.erl.md | 134 ++ docs/book/src/dev_stack.erl.md | 774 ++++++ docs/book/src/dev_test.erl.md | 333 +++ docs/book/src/dev_volume.erl.md | 576 +++++ docs/book/src/dev_wasi.erl.md | 364 +++ docs/book/src/dev_wasm.erl.md | 458 ++++ docs/book/src/dev_whois.erl.md | 94 + docs/book/src/hb.erl.md | 423 ++++ docs/book/src/hb_ao.erl.md | 1509 ++++++++++++ docs/book/src/hb_ao_test_vectors.erl.md | 892 +++++++ docs/book/src/hb_app.erl.md | 37 + docs/book/src/hb_beamr.erl.md | 443 ++++ docs/book/src/hb_beamr_io.erl.md | 238 ++ docs/book/src/hb_cache.erl.md | 967 ++++++++ docs/book/src/hb_cache_control.erl.md | 554 +++++ docs/book/src/hb_cache_render.erl.md | 511 ++++ docs/book/src/hb_client.erl.md | 244 ++ docs/book/src/hb_crypto.erl.md | 143 ++ docs/book/src/hb_debugger.erl.md | 242 ++ docs/book/src/hb_escape.erl.md | 394 ++++ docs/book/src/hb_event.erl.md | 505 ++++ docs/book/src/hb_examples.erl.md | 227 ++ docs/book/src/hb_features.erl.md | 132 ++ docs/book/src/hb_format.erl.md | 1201 ++++++++++ docs/book/src/hb_gateway_client.erl.md | 371 +++ docs/book/src/hb_http.erl.md | 914 ++++++++ docs/book/src/hb_http_benchmark_tests.erl.md | 8 + docs/book/src/hb_http_client.erl.md | 923 ++++++++ docs/book/src/hb_http_client_sup.erl.md | 34 + docs/book/src/hb_http_multi.erl.md | 393 ++++ docs/book/src/hb_http_server.erl.md | 607 +++++ docs/book/src/hb_json.erl.md | 39 + docs/book/src/hb_keccak.erl.md | 123 + docs/book/src/hb_link.erl.md | 264 +++ docs/book/src/hb_logger.erl.md | 130 ++ docs/book/src/hb_maps.erl.md | 398 ++++ docs/book/src/hb_message.erl.md | 1150 +++++++++ docs/book/src/hb_message_test_vectors.erl.md | 1736 ++++++++++++++ docs/book/src/hb_metrics_collector.erl.md | 76 + docs/book/src/hb_name.erl.md | 292 +++ docs/book/src/hb_opts.erl.md | 683 ++++++ docs/book/src/hb_path.erl.md | 695 ++++++ docs/book/src/hb_persistent.erl.md | 585 +++++ docs/book/src/hb_private.erl.md | 279 +++ docs/book/src/hb_process_monitor.erl.md | 106 + docs/book/src/hb_router.erl.md | 44 + docs/book/src/hb_singleton.erl.md | 1104 +++++++++ docs/book/src/hb_store.erl.md | 1123 +++++++++ docs/book/src/hb_store_fs.erl.md | 305 +++ docs/book/src/hb_store_gateway.erl.md | 388 +++ docs/book/src/hb_store_lmdb.erl.md | 1206 ++++++++++ docs/book/src/hb_store_lru.erl.md | 1078 +++++++++ docs/book/src/hb_store_opts.erl.md | 316 +++ docs/book/src/hb_store_remote_node.erl.md | 194 ++ docs/book/src/hb_store_rocksdb.erl.md | 884 +++++++ docs/book/src/hb_structured_fields.erl.md | 1426 ++++++++++++ docs/book/src/hb_sup.erl.md | 89 + docs/book/src/hb_test_utils.erl.md | 351 +++ docs/book/src/hb_tracer.erl.md | 186 ++ docs/book/src/hb_util.erl.md | 1666 +++++++++++++ docs/book/src/hb_volume.erl.md | 1047 +++++++++ docs/book/src/introduction.md | 37 + docs/book/src/rsa_pss.erl.md | 354 +++ 144 files changed, 65626 insertions(+) create mode 100644 docs/book/README.md create mode 100644 docs/book/book.toml create mode 100644 docs/book/custom.css create mode 100644 docs/book/custom.js create mode 100644 docs/book/src/SUMMARY.md create mode 100644 docs/book/src/ar_bundles.erl.md create mode 100644 docs/book/src/ar_deep_hash.erl.md create mode 100644 docs/book/src/ar_rate_limiter.erl.md create mode 100644 docs/book/src/ar_timestamp.erl.md create mode 100644 docs/book/src/ar_tx.erl.md create mode 100644 docs/book/src/ar_wallet.erl.md create mode 100644 docs/book/src/dev_apply.erl.md create mode 100644 docs/book/src/dev_arweave.erl.md create mode 100644 docs/book/src/dev_arweave_block_cache.erl.md create mode 100644 docs/book/src/dev_auth_hook.erl.md create mode 100644 docs/book/src/dev_cache.erl.md create mode 100644 docs/book/src/dev_cacheviz.erl.md create mode 100644 docs/book/src/dev_codec_ans104.erl.md create mode 100644 docs/book/src/dev_codec_ans104_from.erl.md create mode 100644 docs/book/src/dev_codec_ans104_to.erl.md create mode 100644 docs/book/src/dev_codec_cookie.erl.md create mode 100644 docs/book/src/dev_codec_cookie_auth.erl.md create mode 100644 docs/book/src/dev_codec_cookie_test_vectors.erl.md create mode 100644 docs/book/src/dev_codec_flat.erl.md create mode 100644 docs/book/src/dev_codec_http_auth.erl.md create mode 100644 docs/book/src/dev_codec_httpsig.erl.md create mode 100644 docs/book/src/dev_codec_httpsig_conv.erl.md create mode 100644 docs/book/src/dev_codec_httpsig_keyid.erl.md create mode 100644 docs/book/src/dev_codec_httpsig_proxy.erl.md create mode 100644 docs/book/src/dev_codec_httpsig_siginfo.erl.md create mode 100644 docs/book/src/dev_codec_json.erl.md create mode 100644 docs/book/src/dev_codec_structured.erl.md create mode 100644 docs/book/src/dev_copycat.erl.md create mode 100644 docs/book/src/dev_copycat_arweave.erl.md create mode 100644 docs/book/src/dev_copycat_graphql.erl.md create mode 100644 docs/book/src/dev_cron.erl.md create mode 100644 docs/book/src/dev_cu.erl.md create mode 100644 docs/book/src/dev_dedup.erl.md create mode 100644 docs/book/src/dev_delegated_compute.erl.md create mode 100644 docs/book/src/dev_faff.erl.md create mode 100644 docs/book/src/dev_genesis_wasm.erl.md create mode 100644 docs/book/src/dev_green_zone.erl.md create mode 100644 docs/book/src/dev_hook.erl.md create mode 100644 docs/book/src/dev_hyperbuddy.erl.md create mode 100644 docs/book/src/dev_json_iface.erl.md create mode 100644 docs/book/src/dev_local_name.erl.md create mode 100644 docs/book/src/dev_lookup.erl.md create mode 100644 docs/book/src/dev_lua.erl.md create mode 100644 docs/book/src/dev_lua_lib.erl.md create mode 100644 docs/book/src/dev_lua_test.erl.md create mode 100644 docs/book/src/dev_lua_test_ledgers.erl.md create mode 100644 docs/book/src/dev_manifest.erl.md create mode 100644 docs/book/src/dev_message.erl.md create mode 100644 docs/book/src/dev_meta.erl.md create mode 100644 docs/book/src/dev_monitor.erl.md create mode 100644 docs/book/src/dev_multipass.erl.md create mode 100644 docs/book/src/dev_name.erl.md create mode 100644 docs/book/src/dev_node_process.erl.md create mode 100644 docs/book/src/dev_p4.erl.md create mode 100644 docs/book/src/dev_patch.erl.md create mode 100644 docs/book/src/dev_poda.erl.md create mode 100644 docs/book/src/dev_process.erl.md create mode 100644 docs/book/src/dev_process_cache.erl.md create mode 100644 docs/book/src/dev_process_worker.erl.md create mode 100644 docs/book/src/dev_profile.erl.md create mode 100644 docs/book/src/dev_push.erl.md create mode 100644 docs/book/src/dev_query.erl.md create mode 100644 docs/book/src/dev_query_arweave.erl.md create mode 100644 docs/book/src/dev_query_graphql.erl.md create mode 100644 docs/book/src/dev_query_test_vectors.erl.md create mode 100644 docs/book/src/dev_relay.erl.md create mode 100644 docs/book/src/dev_router.erl.md create mode 100644 docs/book/src/dev_scheduler.erl.md create mode 100644 docs/book/src/dev_scheduler_cache.erl.md create mode 100644 docs/book/src/dev_scheduler_formats.erl.md create mode 100644 docs/book/src/dev_scheduler_registry.erl.md create mode 100644 docs/book/src/dev_scheduler_server.erl.md create mode 100644 docs/book/src/dev_secret.erl.md create mode 100644 docs/book/src/dev_simple_pay.erl.md create mode 100644 docs/book/src/dev_snp.erl.md create mode 100644 docs/book/src/dev_snp_nif.erl.md create mode 100644 docs/book/src/dev_stack.erl.md create mode 100644 docs/book/src/dev_test.erl.md create mode 100644 docs/book/src/dev_volume.erl.md create mode 100644 docs/book/src/dev_wasi.erl.md create mode 100644 docs/book/src/dev_wasm.erl.md create mode 100644 docs/book/src/dev_whois.erl.md create mode 100644 docs/book/src/hb.erl.md create mode 100644 docs/book/src/hb_ao.erl.md create mode 100644 docs/book/src/hb_ao_test_vectors.erl.md create mode 100644 docs/book/src/hb_app.erl.md create mode 100644 docs/book/src/hb_beamr.erl.md create mode 100644 docs/book/src/hb_beamr_io.erl.md create mode 100644 docs/book/src/hb_cache.erl.md create mode 100644 docs/book/src/hb_cache_control.erl.md create mode 100644 docs/book/src/hb_cache_render.erl.md create mode 100644 docs/book/src/hb_client.erl.md create mode 100644 docs/book/src/hb_crypto.erl.md create mode 100644 docs/book/src/hb_debugger.erl.md create mode 100644 docs/book/src/hb_escape.erl.md create mode 100644 docs/book/src/hb_event.erl.md create mode 100644 docs/book/src/hb_examples.erl.md create mode 100644 docs/book/src/hb_features.erl.md create mode 100644 docs/book/src/hb_format.erl.md create mode 100644 docs/book/src/hb_gateway_client.erl.md create mode 100644 docs/book/src/hb_http.erl.md create mode 100644 docs/book/src/hb_http_benchmark_tests.erl.md create mode 100644 docs/book/src/hb_http_client.erl.md create mode 100644 docs/book/src/hb_http_client_sup.erl.md create mode 100644 docs/book/src/hb_http_multi.erl.md create mode 100644 docs/book/src/hb_http_server.erl.md create mode 100644 docs/book/src/hb_json.erl.md create mode 100644 docs/book/src/hb_keccak.erl.md create mode 100644 docs/book/src/hb_link.erl.md create mode 100644 docs/book/src/hb_logger.erl.md create mode 100644 docs/book/src/hb_maps.erl.md create mode 100644 docs/book/src/hb_message.erl.md create mode 100644 docs/book/src/hb_message_test_vectors.erl.md create mode 100644 docs/book/src/hb_metrics_collector.erl.md create mode 100644 docs/book/src/hb_name.erl.md create mode 100644 docs/book/src/hb_opts.erl.md create mode 100644 docs/book/src/hb_path.erl.md create mode 100644 docs/book/src/hb_persistent.erl.md create mode 100644 docs/book/src/hb_private.erl.md create mode 100644 docs/book/src/hb_process_monitor.erl.md create mode 100644 docs/book/src/hb_router.erl.md create mode 100644 docs/book/src/hb_singleton.erl.md create mode 100644 docs/book/src/hb_store.erl.md create mode 100644 docs/book/src/hb_store_fs.erl.md create mode 100644 docs/book/src/hb_store_gateway.erl.md create mode 100644 docs/book/src/hb_store_lmdb.erl.md create mode 100644 docs/book/src/hb_store_lru.erl.md create mode 100644 docs/book/src/hb_store_opts.erl.md create mode 100644 docs/book/src/hb_store_remote_node.erl.md create mode 100644 docs/book/src/hb_store_rocksdb.erl.md create mode 100644 docs/book/src/hb_structured_fields.erl.md create mode 100644 docs/book/src/hb_sup.erl.md create mode 100644 docs/book/src/hb_test_utils.erl.md create mode 100644 docs/book/src/hb_tracer.erl.md create mode 100644 docs/book/src/hb_util.erl.md create mode 100644 docs/book/src/hb_volume.erl.md create mode 100644 docs/book/src/introduction.md create mode 100644 docs/book/src/rsa_pss.erl.md diff --git a/.gitignore b/.gitignore index 5823721c3..e910e01f0 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,9 @@ mkdocs-site/ mkdocs-site-id.txt mkdocs-site-manifest.csv +# mdbook generated documentation +docs/book/dist/ + !test/admissible-report-wallet.json !test/admissible-report.json !test/config.json \ No newline at end of file diff --git a/docs/book/README.md b/docs/book/README.md new file mode 100644 index 000000000..7a7e325db --- /dev/null +++ b/docs/book/README.md @@ -0,0 +1,125 @@ +# HyperBEAM Literate Documentation + +This repository contains the mdBook-based documentation for HyperBEAM, generated from Erlang source files using a literate programming approach. + +## Overview + +The documentation combines Erlang source code with comprehensive documentation in a format optimized for both reading and LLM consumption. Each `.erl.md` file represents a module from the HyperBEAM codebase with embedded documentation, function signatures, and implementation details. + +## Generation Process + +### 1. Source Documentation Generation + +Documentation is generated from the HyperBEAM repository using the literate Erlang script: + +```bash +# From the HyperBEAM repository +./docs/build-literate-erlang.sh +``` + +This script: +- Extracts module documentation from `%%%` comments +- Converts edoc tags (`@author`, `@copyright`, `@doc`, `@end`) to markdown format +- Processes function documentation and specifications +- Converts quote patterns (`'text'` to `text`) for proper backtick formatting +- Generates individual `.erl.md` files for each module + +### 2. Copy Generated Files + +Copy the generated documentation to this book's source directory: + +```bash +# Copy from HyperBEAM docs/literate-erlang/ to src/ +cp /path/to/HyperBEAM/docs/literate-erlang/*.erl.md src/ +``` + +### 3. Build the mdBook + +Generate the final documentation: + +```bash +mdbook build +``` + +This creates the static HTML documentation in the `book/` directory. + +## Features + +### Enhanced Copy Functionality + +The documentation includes a custom copy button (📋 icon) in the top-right corner that: +- Fetches the original markdown content from the `src/` directory +- Copies the raw markdown to clipboard for LLM use +- Preserves all formatting, code blocks, and structure exactly as written + +### Theme Support + +Supports all mdBook themes with HyperBEAM brand colors: +- **Neon Green**: `#00ff94` +- **Cyan**: `#00d4ff` +- **Yellow**: `#fff700` +- **Magenta**: `#ff006a` + +### Clean Documentation Structure + +Each module page includes: +- GitHub source link pointing to the `edge` branch +- **Author** and **Copyright** information (when available) +- Exported functions list +- Function documentation with signatures +- Implementation code blocks +- Test functions (when present) + +## Configuration + +### book.toml + +Key configuration options: + +```toml +[book] +title = "HyperBEAM Literate Documentation" +src = "src" + +[build] +build-dir = "book" + +[output.html] +additional-css = ["custom.css"] +additional-js = ["custom.js"] +edit-url-template = "https://github.com/permaweb/HyperBEAM/edit/edge/src/{path}" +git-repository-url = "https://github.com/permaweb/HyperBEAM" +``` + +### Custom Styling + +- `custom.css`: HyperBEAM brand colors for all themes +- `custom.js`: Copy functionality and theme detection + +## Development Workflow + +1. **Update source documentation**: Run `./docs/build-literate-erlang.sh` in HyperBEAM repo +2. **Copy to book**: Transfer generated `.erl.md` files to `src/` +3. **Build book**: Run `mdbook build` +4. **Serve locally**: Use `mdbook serve` for development + +## Repository Structure + +``` +HB-DevicesBook/ +├── src/ # Markdown source files +│ ├── *.erl.md # Generated module documentation +│ └── SUMMARY.md # Book structure +├── book/ # Generated HTML output +├── custom.css # HyperBEAM theme styling +├── custom.js # Copy functionality +├── book.toml # mdBook configuration +└── README.md # This file +``` + +## Notes + +- The documentation is generated from the HyperBEAM `edge` branch +- All GitHub links point to the source files in the HyperBEAM repository +- The copy functionality fetches original markdown for accurate LLM consumption +- Search is enabled with fuzzy matching and result limiting for performance \ No newline at end of file diff --git a/docs/book/book.toml b/docs/book/book.toml new file mode 100644 index 000000000..225ab784c --- /dev/null +++ b/docs/book/book.toml @@ -0,0 +1,56 @@ +[book] +authors = ["Dylan Shade", "HyperBEAM Team"] +language = "en" +src = "src" +title = "HyperBEAM Literate Documentation" +description = "Literate programming documentation for the HyperBEAM decentralized operating system, combining Erlang source code with comprehensive documentation." + +[build] +build-dir = "dist" + +[rust] +edition = "2021" + +[output.html] +additional-css = ["custom.css"] +additional-js = ["custom.js"] +mathjax-support = false +copy-fonts = true +site-url = "/HyperBEAM-Literate-Documentation/" +cname = "" +edit-url-template = "https://github.com/permaweb/HyperBEAM/edit/edge/src/{path}" +git-repository-url = "https://github.com/permaweb/HyperBEAM" +git-repository-icon = "fa-github" +no-section-label = true + +[output.html.serve] +port = 3471 + +[output.html.live-reload] +enable = true + +[output.html.fold] +enable = true +level = 1 + +[output.html.playground] +editable = false +copyable = true +copy-js = true +line-numbers = false +runnable = false + +[output.html.search] +enable = true +limit-results = 30 +teaser-word-count = 30 +use-boolean-and = true +boost-title = 2 +boost-hierarchy = 1 +boost-paragraph = 1 +expand = true +heading-split-level = 3 + +[preprocessor.links] + +[preprocessor.index] \ No newline at end of file diff --git a/docs/book/custom.css b/docs/book/custom.css new file mode 100644 index 000000000..79420c4d7 --- /dev/null +++ b/docs/book/custom.css @@ -0,0 +1,36 @@ +/* HyperBEAM Brand Extension for mdBook - MINIMAL approach */ +/* Only add brand colors, no layout modifications */ + +:root { + /* HyperBEAM brand palette */ + --hyperbeam-neon: #00ff94; + --hyperbeam-cyan: #00d4ff; + --hyperbeam-yellow: #fff700; + --hyperbeam-magenta: #ff006a; +} + +/* Only override accent colors, keep all layout/structure intact */ +.ayu { + --sidebar-active: var(--hyperbeam-neon); + --links: var(--hyperbeam-cyan); +} + +.coal { + --sidebar-active: var(--hyperbeam-cyan); + --links: var(--hyperbeam-neon); +} + +.light { + --sidebar-active: var(--hyperbeam-cyan); + --links: var(--hyperbeam-magenta); +} + +.navy { + --sidebar-active: var(--hyperbeam-cyan); + --links: var(--hyperbeam-neon); +} + +.rust { + --sidebar-active: var(--hyperbeam-neon); + --links: var(--hyperbeam-cyan); +} \ No newline at end of file diff --git a/docs/book/custom.js b/docs/book/custom.js new file mode 100644 index 000000000..a69e63e8a --- /dev/null +++ b/docs/book/custom.js @@ -0,0 +1,231 @@ +// HyperBEAM mdBook Enhancement +// Following mdBook best practices: minimal JavaScript additions + +document.addEventListener('DOMContentLoaded', function() { + // Initialize theme change detection + initThemeDetection(); + + // Add copy buttons to code blocks (disabled - using mdbook's built-in) + // addCopyButtons(); + + // Replace edit icon with page copy functionality + replaceEditWithCopy(); +}); + +function initThemeDetection() { + // Watch for theme changes via class changes on html element + const observer = new MutationObserver(function(mutations) { + mutations.forEach(function(mutation) { + if (mutation.type === 'attributes' && mutation.attributeName === 'class') { + handleThemeChange(); + } + }); + }); + + // Start observing + observer.observe(document.documentElement, { + attributes: true, + attributeFilter: ['class'] + }); + + // Also listen for storage changes (theme switching) + window.addEventListener('storage', function(e) { + if (e.key === 'mdbook-theme') { + handleThemeChange(); + } + }); + + // Initial setup + handleThemeChange(); +} + +function handleThemeChange() { + // Re-apply theme-aware styling to any dynamic elements + const copyButtons = document.querySelectorAll('.copy-code-btn'); + copyButtons.forEach(function(btn) { + // Update button colors to match current theme + btn.style.background = 'var(--theme-popup-bg, #333)'; + btn.style.color = 'var(--fg, #fff)'; + btn.style.borderColor = 'var(--theme-popup-border, #555)'; + }); + + // Log theme change for debugging + const currentTheme = document.documentElement.className || 'default'; + console.log('HyperBEAM: Theme changed to', currentTheme); +} + +function addCopyButtons() { + const codeBlocks = document.querySelectorAll('pre code'); + + codeBlocks.forEach(function(codeBlock) { + const pre = codeBlock.parentElement; + + // Skip if copy button already exists + if (pre.querySelector('.copy-code-btn')) return; + + const copyBtn = document.createElement('button'); + copyBtn.className = 'copy-code-btn'; + copyBtn.textContent = '📋'; + copyBtn.setAttribute('aria-label', 'Copy code to clipboard'); + copyBtn.style.cssText = ` + position: absolute; + top: 8px; + right: 8px; + background: var(--theme-popup-bg, #333); + color: var(--fg, #fff); + border: 1px solid var(--theme-popup-border, #555); + padding: 4px 6px; + border-radius: 4px; + font-size: 11px; + cursor: pointer; + opacity: 0; + transition: all 0.2s ease; + z-index: 10; + line-height: 1; + min-width: 24px; + height: 24px; + display: flex; + align-items: center; + justify-content: center; + `; + + pre.style.position = 'relative'; + pre.appendChild(copyBtn); + + // Show/hide on hover + pre.addEventListener('mouseenter', function() { + copyBtn.style.opacity = '0.8'; + }); + + pre.addEventListener('mouseleave', function() { + copyBtn.style.opacity = '0'; + }); + + // Hover effect + copyBtn.addEventListener('mouseenter', function() { + copyBtn.style.opacity = '1'; + copyBtn.style.background = 'var(--hyperbeam-accent-primary, #00ff94)'; + copyBtn.style.color = '#000'; + }); + + copyBtn.addEventListener('mouseleave', function() { + copyBtn.style.background = 'var(--theme-popup-bg, #333)'; + copyBtn.style.color = 'var(--fg, #fff)'; + }); + + // Copy functionality + copyBtn.addEventListener('click', function() { + navigator.clipboard.writeText(codeBlock.textContent).then(function() { + const originalText = copyBtn.textContent; + copyBtn.textContent = '✓'; + copyBtn.style.background = 'var(--hyperbeam-accent-primary, #00ff94)'; + copyBtn.style.color = '#000'; + + setTimeout(function() { + copyBtn.textContent = originalText; + copyBtn.style.background = 'var(--theme-popup-bg, #333)'; + copyBtn.style.color = 'var(--fg, #fff)'; + }, 1500); + }).catch(function() { + copyBtn.textContent = '✗'; + setTimeout(function() { + copyBtn.textContent = '📋'; + }, 1500); + }); + }); + }); +} + +function replaceEditWithCopy() { + // Remove the edit button and replace with copy page button + const editButton = document.querySelector('a[title="Suggest an edit"], a[href*="edit"]'); + if (editButton) { + // Create new copy button + const copyPageBtn = document.createElement('button'); + copyPageBtn.innerHTML = ``; + copyPageBtn.title = 'Copy page content for LLM'; + copyPageBtn.setAttribute('aria-label', 'Copy page content for LLM use'); + copyPageBtn.className = editButton.className; + copyPageBtn.style.cssText = ` + background: none; + border: none; + color: inherit; + font-size: inherit; + cursor: pointer; + padding: 8px; + border-radius: 4px; + transition: background-color 0.2s ease; + display: inline-flex; + align-items: center; + justify-content: center; + vertical-align: middle; + `; + + // Add hover effect + copyPageBtn.addEventListener('mouseenter', function() { + copyPageBtn.style.backgroundColor = 'var(--theme-hover, rgba(255,255,255,0.1))'; + }); + + copyPageBtn.addEventListener('mouseleave', function() { + copyPageBtn.style.backgroundColor = 'transparent'; + }); + + // Copy functionality + copyPageBtn.addEventListener('click', function() { + // Get the current page path and construct the markdown file URL + const currentPath = window.location.pathname; + const pathParts = currentPath.split('/'); + const fileName = pathParts[pathParts.length - 1] || pathParts[pathParts.length - 2]; + + // Construct the path to the original markdown file + let markdownPath = ''; + if (fileName && fileName.endsWith('.html')) { + markdownPath = fileName.replace('.html', '.md'); + } else { + // Handle index pages or other cases + markdownPath = 'index.md'; + } + + // Try to fetch the original markdown file + fetch(`src/${markdownPath}`) + .then(response => { + if (response.ok) { + return response.text(); + } + throw new Error('Could not fetch markdown file'); + }) + .then(markdownContent => { + navigator.clipboard.writeText(markdownContent).then(function() { + const originalContent = copyPageBtn.innerHTML; + copyPageBtn.innerHTML = ``; + copyPageBtn.style.color = 'var(--hyperbeam-accent-primary, #00ff94)'; + + setTimeout(function() { + copyPageBtn.innerHTML = originalContent; + copyPageBtn.style.color = 'inherit'; + }, 2000); + }).catch(function() { + showCopyError(); + }); + }) + .catch(function() { + showCopyError(); + }); + + function showCopyError() { + const originalContent = copyPageBtn.innerHTML; + copyPageBtn.innerHTML = ``; + copyPageBtn.style.color = '#ff6b6b'; + + setTimeout(function() { + copyPageBtn.innerHTML = originalContent; + copyPageBtn.style.color = 'inherit'; + }, 2000); + } + }); + + // Replace the edit button + editButton.parentNode.replaceChild(copyPageBtn, editButton); + } +} + diff --git a/docs/book/src/SUMMARY.md b/docs/book/src/SUMMARY.md new file mode 100644 index 000000000..93adc7dee --- /dev/null +++ b/docs/book/src/SUMMARY.md @@ -0,0 +1,158 @@ +# Summary + +[Introduction](introduction.md) + +# Arweave Foundation + +- [ar_bundles](ar_bundles.erl.md) +- [ar_deep_hash](ar_deep_hash.erl.md) +- [ar_rate_limiter](ar_rate_limiter.erl.md) +- [ar_timestamp](ar_timestamp.erl.md) +- [ar_tx](ar_tx.erl.md) +- [ar_wallet](ar_wallet.erl.md) + +# Device Framework + +- [dev_apply](dev_apply.erl.md) +- [dev_cache](dev_cache.erl.md) +- [dev_cacheviz](dev_cacheviz.erl.md) +- [dev_copycat](dev_copycat.erl.md) +- [dev_copycat_arweave](dev_copycat_arweave.erl.md) +- [dev_copycat_graphql](dev_copycat_graphql.erl.md) +- [dev_cron](dev_cron.erl.md) +- [dev_cu](dev_cu.erl.md) +- [dev_dedup](dev_dedup.erl.md) +- [dev_faff](dev_faff.erl.md) +- [dev_monitor](dev_monitor.erl.md) +- [dev_multipass](dev_multipass.erl.md) +- [dev_test](dev_test.erl.md) +- [dev_arweave](dev_arweave.erl.md) +- [dev_arweave_block_cache](dev_arweave_block_cache.erl.md) +- [dev_auth_hook](dev_auth_hook.erl.md) +- [dev_secret](dev_secret.erl.md) + +# Codec Modules + +- [dev_codec_ans104](dev_codec_ans104.erl.md) +- [dev_codec_ans104_from](dev_codec_ans104_from.erl.md) +- [dev_codec_ans104_to](dev_codec_ans104_to.erl.md) +- [dev_codec_cookie](dev_codec_cookie.erl.md) +- [dev_codec_cookie_auth](dev_codec_cookie_auth.erl.md) +- [dev_codec_cookie_test_vectors](dev_codec_cookie_test_vectors.erl.md) +- [dev_codec_flat](dev_codec_flat.erl.md) +- [dev_codec_http_auth](dev_codec_http_auth.erl.md) +- [dev_codec_httpsig](dev_codec_httpsig.erl.md) +- [dev_codec_httpsig_conv](dev_codec_httpsig_conv.erl.md) +- [dev_codec_httpsig_keyid](dev_codec_httpsig_keyid.erl.md) +- [dev_codec_httpsig_proxy](dev_codec_httpsig_proxy.erl.md) +- [dev_codec_httpsig_siginfo](dev_codec_httpsig_siginfo.erl.md) +- [dev_codec_json](dev_codec_json.erl.md) +- [dev_codec_structured](dev_codec_structured.erl.md) + +# Core Services + +- [dev_delegated_compute](dev_delegated_compute.erl.md) +- [dev_genesis_wasm](dev_genesis_wasm.erl.md) +- [dev_green_zone](dev_green_zone.erl.md) +- [dev_hook](dev_hook.erl.md) +- [dev_hyperbuddy](dev_hyperbuddy.erl.md) +- [dev_json_iface](dev_json_iface.erl.md) +- [dev_local_name](dev_local_name.erl.md) +- [dev_lookup](dev_lookup.erl.md) +- [dev_lua](dev_lua.erl.md) +- [dev_lua_lib](dev_lua_lib.erl.md) +- [dev_lua_test](dev_lua_test.erl.md) +- [dev_lua_test_ledgers](dev_lua_test_ledgers.erl.md) +- [dev_manifest](dev_manifest.erl.md) +- [dev_message](dev_message.erl.md) +- [dev_meta](dev_meta.erl.md) +- [dev_name](dev_name.erl.md) +- [dev_node_process](dev_node_process.erl.md) +- [dev_p4](dev_p4.erl.md) +- [dev_patch](dev_patch.erl.md) +- [dev_poda](dev_poda.erl.md) +- [dev_process](dev_process.erl.md) +- [dev_process_cache](dev_process_cache.erl.md) +- [dev_process_worker](dev_process_worker.erl.md) +- [dev_profile](dev_profile.erl.md) +- [dev_push](dev_push.erl.md) +- [dev_query](dev_query.erl.md) +- [dev_query_arweave](dev_query_arweave.erl.md) +- [dev_query_graphql](dev_query_graphql.erl.md) +- [dev_query_test_vectors](dev_query_test_vectors.erl.md) +- [dev_relay](dev_relay.erl.md) +- [dev_router](dev_router.erl.md) +- [dev_scheduler](dev_scheduler.erl.md) +- [dev_scheduler_cache](dev_scheduler_cache.erl.md) +- [dev_scheduler_formats](dev_scheduler_formats.erl.md) +- [dev_scheduler_registry](dev_scheduler_registry.erl.md) +- [dev_scheduler_server](dev_scheduler_server.erl.md) +- [dev_simple_pay](dev_simple_pay.erl.md) +- [dev_snp](dev_snp.erl.md) +- [dev_snp_nif](dev_snp_nif.erl.md) +- [dev_stack](dev_stack.erl.md) +- [dev_volume](dev_volume.erl.md) +- [dev_wasi](dev_wasi.erl.md) +- [dev_wasm](dev_wasm.erl.md) +- [dev_whois](dev_whois.erl.md) + +# HyperBEAM Core + +- [hb](hb.erl.md) +- [hb_ao](hb_ao.erl.md) +- [hb_ao_test_vectors](hb_ao_test_vectors.erl.md) +- [hb_app](hb_app.erl.md) +- [hb_beamr](hb_beamr.erl.md) +- [hb_beamr_io](hb_beamr_io.erl.md) +- [hb_cache](hb_cache.erl.md) +- [hb_cache_control](hb_cache_control.erl.md) +- [hb_cache_render](hb_cache_render.erl.md) +- [hb_client](hb_client.erl.md) +- [hb_crypto](hb_crypto.erl.md) +- [hb_debugger](hb_debugger.erl.md) +- [hb_escape](hb_escape.erl.md) +- [hb_event](hb_event.erl.md) +- [hb_examples](hb_examples.erl.md) +- [hb_features](hb_features.erl.md) +- [hb_format](hb_format.erl.md) +- [hb_gateway_client](hb_gateway_client.erl.md) +- [hb_http](hb_http.erl.md) +- [hb_http_benchmark_tests](hb_http_benchmark_tests.erl.md) +- [hb_http_client](hb_http_client.erl.md) +- [hb_http_client_sup](hb_http_client_sup.erl.md) +- [hb_http_multi](hb_http_multi.erl.md) +- [hb_http_server](hb_http_server.erl.md) +- [hb_json](hb_json.erl.md) +- [hb_keccak](hb_keccak.erl.md) +- [hb_link](hb_link.erl.md) +- [hb_logger](hb_logger.erl.md) +- [hb_maps](hb_maps.erl.md) +- [hb_message](hb_message.erl.md) +- [hb_message_test_vectors](hb_message_test_vectors.erl.md) +- [hb_metrics_collector](hb_metrics_collector.erl.md) +- [hb_name](hb_name.erl.md) +- [hb_opts](hb_opts.erl.md) +- [hb_path](hb_path.erl.md) +- [hb_persistent](hb_persistent.erl.md) +- [hb_private](hb_private.erl.md) +- [hb_process_monitor](hb_process_monitor.erl.md) +- [hb_router](hb_router.erl.md) +- [hb_singleton](hb_singleton.erl.md) +- [hb_store](hb_store.erl.md) +- [hb_store_fs](hb_store_fs.erl.md) +- [hb_store_gateway](hb_store_gateway.erl.md) +- [hb_store_lmdb](hb_store_lmdb.erl.md) +- [hb_store_lru](hb_store_lru.erl.md) +- [hb_store_opts](hb_store_opts.erl.md) +- [hb_store_remote_node](hb_store_remote_node.erl.md) +- [hb_store_rocksdb](hb_store_rocksdb.erl.md) +- [hb_structured_fields](hb_structured_fields.erl.md) +- [hb_sup](hb_sup.erl.md) +- [hb_test_utils](hb_test_utils.erl.md) +- [hb_tracer](hb_tracer.erl.md) +- [hb_util](hb_util.erl.md) +- [hb_volume](hb_volume.erl.md) + +# Other Modules + +- [rsa_pss](rsa_pss.erl.md) \ No newline at end of file diff --git a/docs/book/src/ar_bundles.erl.md b/docs/book/src/ar_bundles.erl.md new file mode 100644 index 000000000..4bcdb534f --- /dev/null +++ b/docs/book/src/ar_bundles.erl.md @@ -0,0 +1,2070 @@ +# ar_bundles + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_bundles.erl) + +Module for creating, signing, and verifying Arweave data items and bundles. + +--- + +## Exported Functions + +- `data_item_signature_data/1` +- `decode_tags/1` +- `deserialize/1` +- `deserialize/2` +- `encode_tags/1` +- `find/2` +- `format/1` +- `format/2` +- `format/3` +- `hd/1` +- `id/1` +- `id/2` +- `is_signed/1` +- `manifest_item/1` +- `manifest/1` +- `map/1` +- `member/2` +- `new_item/4` +- `normalize/1` +- `parse_manifest/1` +- `print/1` +- `reset_ids/1` +- `serialize/1` +- `serialize/2` +- `sign_item/2` +- `signer/1` +- `type/1` +- `verify_item/1` + +--- + +### print + +Module for creating, signing, and verifying Arweave data items and bundles. + +```erlang +print(Item) -> + io:format(standard_error, "~s", [lists:flatten(format(Item))]). +``` + +### format + +Module for creating, signing, and verifying Arweave data items and bundles. + +```erlang +format(Item) -> format(Item, 0). +``` + +### format + +Module for creating, signing, and verifying Arweave data items and bundles. + +```erlang +format(Item, Indent) -> format(Item, Indent, #{}). +``` + +### format + +Module for creating, signing, and verifying Arweave data items and bundles. + +```erlang +format(Item, Indent, Opts) when is_list(Item); is_map(Item) -> + format(normalize(Item), Indent, Opts); +``` + +### format + +Module for creating, signing, and verifying Arweave data items and bundles. + +```erlang +format(Item, Indent, Opts) when is_record(Item, tx) -> + MustVerify = hb_opts:get(debug_ids, true, Opts), + Valid = + if MustVerify -> verify_item(Item); + true -> true + end, + UnsignedID = + if MustVerify -> hb_util:encode(id(Item, unsigned)); + true -> <<"[SKIPPED ID]">> + end, + SignedID = + if MustVerify -> + case id(Item, signed) of + not_signed -> <<"[NOT SIGNED]">>; + ID -> hb_util:encode(ID) + end; + true -> <<"[SKIPPED ID]">> + end, + format_line( + "TX ( ~s: ~s ) {", + [ + if + MustVerify andalso Item#tx.signature =/= ?DEFAULT_SIG -> + lists:flatten( + io_lib:format( + "~s (signed) ~s (unsigned)", + [SignedID, UnsignedID] + ) + ); + true -> UnsignedID + end, + if + not MustVerify -> "[SKIPPED VERIFICATION]"; + Valid == true -> "[SIGNED+VALID]"; + true -> "[UNSIGNED/INVALID]" + end + ], + Indent + ) ++ + case MustVerify andalso (not Valid) andalso Item#tx.signature =/= ?DEFAULT_SIG of + true -> + format_line("!!! CAUTION: ITEM IS SIGNED BUT INVALID !!!", Indent + 1); + false -> [] + end ++ + case is_signed(Item) of + true -> + format_line("Signer: ~s", [hb_util:encode(signer(Item))], Indent + 1); + false -> [] + end ++ + format_line("Target: ~s", [ + case Item#tx.target of + <<>> -> "[NONE]"; + Target -> hb_util:id(Target) + end + ], Indent + 1) ++ + format_line("Last TX: ~s", [ + case Item#tx.anchor of + ?DEFAULT_LAST_TX -> "[NONE]"; + LastTX -> hb_util:encode(LastTX) + end + ], Indent + 1) ++ + format_line("Tags:", Indent + 1) ++ + lists:map( + fun({Key, Val}) -> format_line("~s -> ~s", [Key, Val], Indent + 2) end, + Item#tx.tags + ) ++ + format_line("Data:", Indent + 1) ++ format_data(Item, Indent + 2) ++ + format_line("}", Indent); +``` + +### format + +Module for creating, signing, and verifying Arweave data items and bundles. + +```erlang +format(Item, Indent, _Opts) -> + % Whatever we have, its not a tx... +``` + +### format_data + +```erlang +format_data(Item, Indent) when is_binary(Item#tx.data) -> + case lists:keyfind(<<"bundle-format">>, 1, Item#tx.tags) of + {_, _} -> + format_data(deserialize(serialize(Item)), Indent); + false -> + format_line( + "Binary: ~p... <~p bytes>", + [format_binary(Item#tx.data), byte_size(Item#tx.data)], + Indent + ) + end; +``` + +### format_data + +```erlang +format_data(Item, Indent) when is_map(Item#tx.data) -> + format_line("Map:", Indent) ++ + lists:map( + fun({Name, MapItem}) -> + format_line("~s ->", [Name], Indent + 1) ++ + format(MapItem, Indent + 2) + end, + maps:to_list(Item#tx.data) + ); +``` + +### format_data + +```erlang +format_data(Item, Indent) when is_list(Item#tx.data) -> + format_line("List:", Indent) ++ + lists:map( + fun(ListItem) -> + format(ListItem, Indent + 1) + end, + Item#tx.data + ). +``` + +### format_binary + +```erlang +format_binary(Bin) -> + lists:flatten( + io_lib:format( + "~p", + [ + binary:part( + Bin, + 0, + case byte_size(Bin) of + X when X < ?BIN_PRINT -> X; + _ -> ?BIN_PRINT + end + ) + ] + ) + ). +``` + +### format_line + +```erlang +format_line(Str, Indent) -> format_line(Str, "", Indent). +``` + +### format_line + +```erlang +format_line(RawStr, Fmt, Ind) -> + io_lib:format( + [$\s || _ <- lists:seq(1, Ind * ?INDENT_SPACES)] ++ + lists:flatten(RawStr) ++ "\n", + Fmt + ). +``` + +### signer + +Return the address of the signer of an item, if it is signed. + +```erlang +signer(#tx { owner = ?DEFAULT_OWNER }) -> undefined; +``` + +### signer + +Return the address of the signer of an item, if it is signed. +Check if an item is signed. + +```erlang +signer(Item) -> crypto:hash(sha256, Item#tx.owner). +``` + +### is_signed + +Return the address of the signer of an item, if it is signed. +Check if an item is signed. + +```erlang +is_signed(Item) -> + Item#tx.signature =/= ?DEFAULT_SIG. +``` + +### id + +Return the ID of an item -- either signed or unsigned as specified. + +```erlang +id(Item) -> id(Item, unsigned). +``` + +### id + +Return the ID of an item -- either signed or unsigned as specified. + +```erlang +id(Item, Type) when not is_record(Item, tx) -> + id(normalize(Item), Type); +``` + +### id + +Return the ID of an item -- either signed or unsigned as specified. + +```erlang +id(Item = #tx { unsigned_id = ?DEFAULT_ID }, unsigned) -> + CorrectedItem = reset_ids(Item), + CorrectedItem#tx.unsigned_id; +``` + +### id + +Return the ID of an item -- either signed or unsigned as specified. + +```erlang +id(#tx { unsigned_id = UnsignedID }, unsigned) -> + UnsignedID; +``` + +### id + +Return the ID of an item -- either signed or unsigned as specified. + +```erlang +id(#tx { id = ?DEFAULT_ID }, signed) -> + not_signed; +``` + +### id + +Return the ID of an item -- either signed or unsigned as specified. + +```erlang +id(#tx { id = ID }, signed) -> + ID. +``` + +### hd + +Return the first item in a bundle-map/list. + +```erlang +hd(#tx { data = #{ <<"1">> := Msg } }) -> Msg; +``` + +### hd + +Return the first item in a bundle-map/list. + +```erlang +hd(#tx { data = [First | _] }) -> First; +``` + +### hd + +Return the first item in a bundle-map/list. + +```erlang +hd(TX = #tx { data = Binary }) when is_binary(Binary) -> + ?MODULE:hd((deserialize(serialize(TX), binary))#tx.data); +``` + +### hd + +Return the first item in a bundle-map/list. + +```erlang +hd(#{ <<"1">> := Msg }) -> Msg; +``` + +### hd + +Return the first item in a bundle-map/list. +Convert an item containing a map or list into an Erlang map. + +```erlang +hd(_) -> undefined. +``` + +### map + +Return the first item in a bundle-map/list. +Convert an item containing a map or list into an Erlang map. + +```erlang +map(#tx { data = Map }) when is_map(Map) -> Map; +``` + +### map + +Return the first item in a bundle-map/list. +Convert an item containing a map or list into an Erlang map. + +```erlang +map(#tx { data = Data }) when is_list(Data) -> + maps:from_list( + lists:zipwith( + fun({Index, Item}) -> {integer_to_binary(Index), map(Item)} end, + lists:seq(1, length(Data)), + Data + ) + ); +``` + +### map + +Return the first item in a bundle-map/list. +Convert an item containing a map or list into an Erlang map. + +```erlang +map(Item = #tx { data = Data }) when is_binary(Data) -> + (maybe_unbundle(Item))#tx.data. +``` + +### member + +Check if an item exists in a bundle-map/list. + +```erlang +member(Key, Item) -> + find(Key, Item) =/= not_found. +``` + +### find + +Find an item in a bundle-map/list and return it. + +```erlang +find(Key, Map) when is_map(Map) -> + case maps:get(Key, Map, not_found) of + not_found -> find(Key, maps:values(Map)); + Item -> Item + end; +``` + +### find + +Find an item in a bundle-map/list and return it. + +```erlang +find(_Key, []) -> not_found; +``` + +### find + +Find an item in a bundle-map/list and return it. + +```erlang +find(Key, [Item|Rest]) -> + case find(Key, Item) of + not_found -> find(Key, Rest); + CorrectItem -> CorrectItem + end; +``` + +### find + +Find an item in a bundle-map/list and return it. + +```erlang +find(Key, Item = #tx { id = Key }) -> Item; +``` + +### find + +Find an item in a bundle-map/list and return it. + +```erlang +find(Key, Item = #tx { data = Data }) -> + case id(Item, unsigned) of + Key -> Item; + _ -> + case is_binary(Data) of + false -> find(Key, Data); + true -> not_found + end + end; +``` + +### find + +Find an item in a bundle-map/list and return it. + +```erlang +find(_Key, _) -> + not_found. +``` + +### manifest_item + +Return the manifest item in a bundle-map/list. + +```erlang +manifest_item(#tx { manifest = Manifest }) when is_record(Manifest, tx) -> + Manifest; +``` + +### manifest_item + +Return the manifest item in a bundle-map/list. +Create a new data item. Should only be used for testing. + +```erlang +manifest_item(_Item) -> undefined. +``` + +### new_item + +Return the manifest item in a bundle-map/list. +Create a new data item. Should only be used for testing. + +```erlang +new_item(Target, Anchor, Tags, Data) -> + reset_ids( + #tx{ + format = ans104, + target = Target, + anchor = Anchor, + tags = Tags, + data = Data, + data_size = byte_size(Data) + } + ). +``` + +### sign_item + +Sign a data item. + +```erlang +sign_item(_, undefined) -> throw(wallet_not_found); +``` + +### sign_item + +Sign a data item. + +```erlang +sign_item(RawItem, {PrivKey, {KeyType, Owner}}) -> + Item = (normalize_data(RawItem))#tx{format = ans104, owner = Owner, signature_type = KeyType}, + % Generate the signature from the data item's data segment in 'signed'-ready mode. +``` + +### verify_item + +Verify the validity of a data item. + +```erlang +verify_item(DataItem) -> + ValidID = verify_data_item_id(DataItem), + ValidSignature = verify_data_item_signature(DataItem), + ValidTags = verify_data_item_tags(DataItem), + ValidID andalso ValidSignature andalso ValidTags. +``` + +### type + +```erlang +type(Item) when is_record(Item, tx) -> + case lists:keyfind(<<"bundle-map">>, 1, Item#tx.tags) of + {<<"bundle-map">>, _} -> + case lists:keyfind(<<"map-format">>, 1, Item#tx.tags) of + {<<"map-format">>, <<"list">>} -> list; + _ -> map + end; + _ -> + binary + end; +``` + +### type + +```erlang +type(Data) when erlang:is_map(Data) -> + map; +``` + +### type + +```erlang +type(Data) when erlang:is_list(Data) -> + list; +``` + +### type + +```erlang +type(_) -> + binary. +``` + +### data_item_signature_data + +Generate the data segment to be signed for a data item. + +```erlang +data_item_signature_data(RawItem) -> + data_item_signature_data(RawItem, signed). +``` + +### data_item_signature_data + +```erlang +data_item_signature_data(RawItem, unsigned) -> + data_item_signature_data(RawItem#tx { owner = ?DEFAULT_OWNER }, signed); +``` + +### data_item_signature_data + +```erlang +data_item_signature_data(RawItem, signed) -> + true = enforce_valid_tx(RawItem), + NormItem = normalize_data(RawItem), + ar_deep_hash:hash([ + utf8_encoded("dataitem"), + utf8_encoded("1"), + %% Only SignatureType 1 is supported for now (RSA 4096) + utf8_encoded("1"), + <<(NormItem#tx.owner)/binary>>, + <<(NormItem#tx.target)/binary>>, + <<(NormItem#tx.anchor)/binary>>, + encode_tags(NormItem#tx.tags), + <<(NormItem#tx.data)/binary>> + ]). +``` + +### verify_data_item_id + +Verify the data item's ID matches the signature. + +```erlang +verify_data_item_id(DataItem) -> + ExpectedID = crypto:hash(sha256, DataItem#tx.signature), + DataItem#tx.id == ExpectedID. +``` + +### verify_data_item_signature + +Verify the data item's signature. + +```erlang +verify_data_item_signature(DataItem) -> + SignatureData = data_item_signature_data(DataItem), + %?event({unsigned_id, hb_util:encode(id(DataItem, unsigned)), hb_util:encode(SignatureData)}), + ar_wallet:verify( + {DataItem#tx.signature_type, DataItem#tx.owner}, SignatureData, DataItem#tx.signature + ). +``` + +### verify_data_item_tags + +Verify the validity of the data item's tags. + +```erlang +verify_data_item_tags(DataItem) -> + ValidCount = length(DataItem#tx.tags) =< 128, + ValidTags = lists:all( + fun({Name, Value}) -> + byte_size(Name) =< 1024 andalso byte_size(Value) =< 3072 + end, + DataItem#tx.tags + ), + ValidCount andalso ValidTags. +``` + +### normalize + +Ensure that a data item (potentially containing a map or list) has a + +```erlang +normalize(Item) -> reset_ids(normalize_data(Item)). +``` + +### normalize_data + +Ensure that a data item (potentially containing a map or list) has a + +```erlang +normalize_data(not_found) -> throw(not_found); +``` + +### normalize_data + +Ensure that a data item (potentially containing a map or list) has a + +```erlang +normalize_data(Item = #tx{data = Bin}) when is_binary(Bin) -> + ?event({normalize_data, binary, Item}), + normalize_data_size(Item); +``` + +### normalize_data + +Ensure that a data item (potentially containing a map or list) has a + +```erlang +normalize_data(Bundle) when is_list(Bundle); is_map(Bundle) -> + ?event({normalize_data, bundle, Bundle}), + normalize_data(#tx{ data = Bundle }); +``` + +### normalize_data + +Ensure that a data item (potentially containing a map or list) has a + +```erlang +normalize_data(Item = #tx { data = Data }) when is_list(Data) -> + ?event({normalize_data, list, Item}), + normalize_data( + Item#tx{ + tags = add_list_tags(Item#tx.tags), + data = + maps:from_list( + lists:zipwith( + fun(Index, MapItem) -> + { + integer_to_binary(Index), + update_ids(normalize_data(MapItem)) + } + end, + lists:seq(1, length(Data)), + Data + ) + ) + } + ); +``` + +### normalize_data + +Ensure that a data item (potentially containing a map or list) has a + +```erlang +normalize_data(Item = #tx{data = Data}) -> + ?event({normalize_data, map, Item}), + normalize_data_size( + case serialize_bundle_data(Data, Item#tx.manifest) of + {Manifest, Bin} -> + Item#tx{ + data = Bin, + manifest = Manifest, + tags = + add_manifest_tags( + add_bundle_tags(Item#tx.tags), + id(Manifest, unsigned) + ) + }; + DirectBin -> + Item#tx{ + data = DirectBin, + tags = add_bundle_tags(Item#tx.tags) + } + end + ). +``` + +### normalize_data_size + +Reset the data size of a data item. Assumes that the data is already normalized. + +```erlang +normalize_data_size(Item = #tx{data = Bin}) when is_binary(Bin) -> + Item#tx{data_size = byte_size(Bin)}; +``` + +### normalize_data_size + +Reset the data size of a data item. Assumes that the data is already normalized. +Convert a #tx record to its binary representation. + +```erlang +normalize_data_size(Item) -> Item. +``` + +### serialize + +Reset the data size of a data item. Assumes that the data is already normalized. +Convert a #tx record to its binary representation. + +```erlang +serialize(not_found) -> throw(not_found); +``` + +### serialize + +Reset the data size of a data item. Assumes that the data is already normalized. +Convert a #tx record to its binary representation. + +```erlang +serialize(TX) -> serialize(TX, binary). +``` + +### serialize + +Reset the data size of a data item. Assumes that the data is already normalized. +Convert a #tx record to its binary representation. + +```erlang +serialize(TX, binary) when is_binary(TX) -> TX; +``` + +### serialize + +Reset the data size of a data item. Assumes that the data is already normalized. +Convert a #tx record to its binary representation. + +```erlang +serialize(RawTX, binary) -> + true = enforce_valid_tx(RawTX), + TX = normalize(RawTX), + EncodedTags = encode_tags(TX#tx.tags), + << + (encode_signature_type(TX#tx.signature_type))/binary, + (TX#tx.signature)/binary, + (TX#tx.owner)/binary, + (encode_optional_field(TX#tx.target))/binary, + (encode_optional_field(TX#tx.anchor))/binary, + (encode_tags_size(TX#tx.tags, EncodedTags))/binary, + EncodedTags/binary, + (TX#tx.data)/binary + >>; +``` + +### serialize + +Reset the data size of a data item. Assumes that the data is already normalized. +Convert a #tx record to its binary representation. +Take an item and ensure that it is of valid form. Useful for ensuring + +```erlang +serialize(TX, json) -> + true = enforce_valid_tx(TX), + hb_json:encode(hb_message:convert(TX, <<"ans104@1.0">>, #{})). +``` + +### enforce_valid_tx + +Reset the data size of a data item. Assumes that the data is already normalized. +Convert a #tx record to its binary representation. +Take an item and ensure that it is of valid form. Useful for ensuring + +```erlang +enforce_valid_tx(List) when is_list(List) -> + lists:all(fun enforce_valid_tx/1, List); +``` + +### enforce_valid_tx + +Reset the data size of a data item. Assumes that the data is already normalized. +Convert a #tx record to its binary representation. +Take an item and ensure that it is of valid form. Useful for ensuring + +```erlang +enforce_valid_tx(Map) when is_map(Map) -> + lists:all(fun(Item) -> enforce_valid_tx(Item) end, maps:values(Map)); +``` + +### enforce_valid_tx + +Reset the data size of a data item. Assumes that the data is already normalized. +Convert a #tx record to its binary representation. +Take an item and ensure that it is of valid form. Useful for ensuring + +```erlang +enforce_valid_tx(TX) -> + ok_or_throw(TX, + check_type(TX, message), + {invalid_tx, TX} + ), + ok_or_throw(TX, + check_size(TX#tx.id, [0, 32]), + {invalid_field, id, TX#tx.id} + ), + ok_or_throw(TX, + check_size(TX#tx.unsigned_id, [0, 32]), + {invalid_field, unsigned_id, TX#tx.unsigned_id} + ), + ok_or_throw(TX, + check_size(TX#tx.anchor, [0, 32]), + {invalid_field, last_tx, TX#tx.anchor} + ), + ok_or_throw(TX, + check_size(TX#tx.owner, [0, byte_size(?DEFAULT_OWNER)]), + {invalid_field, owner, TX#tx.owner} + ), + ok_or_throw(TX, + check_size(TX#tx.target, [0, 32]), + {invalid_field, target, TX#tx.target} + ), + ok_or_throw(TX, + check_size(TX#tx.signature, [0, 65, byte_size(?DEFAULT_SIG)]), + {invalid_field, signature, TX#tx.signature} + ), + ok_or_throw(TX, + check_type(TX#tx.tags, list), + {invalid_field, tags, TX#tx.tags} + ), + lists:foreach( + fun({Name, Value}) -> + ok_or_throw(TX, + check_type(Name, binary), + {invalid_field, tag_name, Name} + ), + ok_or_throw(TX, + check_size(Name, {range, 0, ?MAX_TAG_NAME_SIZE}), + {invalid_field, tag_name, Name} + ), + ok_or_throw(TX, + check_type(Value, binary), + {invalid_field, tag_value, {Name, Value}} + ), + ok_or_throw(TX, + check_size(Value, {range, 0, ?MAX_TAG_VALUE_SIZE}), + {invalid_field, tag_value, {Name, Value}} + ); + (InvalidTagForm) -> + throw({invalid_field, tag, InvalidTagForm}) + end, + TX#tx.tags + ), + ok_or_throw( + TX, + check_type(TX#tx.data, binary) + orelse check_type(TX#tx.data, map) + orelse check_type(TX#tx.data, list), + {invalid_field, data, TX#tx.data} + ), + true. +``` + +### check_size + +Force that a binary is either empty or the given number of bytes. + +```erlang +check_size(Bin, {range, Start, End}) -> + check_type(Bin, binary) + andalso byte_size(Bin) >= Start + andalso byte_size(Bin) =< End; +``` + +### check_size + +Force that a binary is either empty or the given number of bytes. + +```erlang +check_size(Bin, Sizes) -> + check_type(Bin, binary) + andalso lists:member(byte_size(Bin), Sizes). +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, binary) when is_binary(Value) -> true; +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, _) when is_binary(Value) -> false; +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, list) when is_list(Value) -> true; +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, _) when is_list(Value) -> false; +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, map) when is_map(Value) -> true; +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, _) when is_map(Value) -> false; +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, message) -> + is_record(Value, tx) or is_map(Value) or is_list(Value); +``` + +### check_type + +Ensure that a value is of the given type. +Throw an error if the given value is not ok. + +```erlang +check_type(_Value, _) -> false. +``` + +### ok_or_throw + +Ensure that a value is of the given type. +Throw an error if the given value is not ok. + +```erlang +ok_or_throw(_, true, _) -> true; +``` + +### ok_or_throw + +Ensure that a value is of the given type. +Throw an error if the given value is not ok. + +```erlang +ok_or_throw(_TX, false, Error) -> + throw(Error). +``` + +### update_ids + +Take an item and ensure that both the unsigned and signed IDs are + +```erlang +update_ids(Item = #tx { unsigned_id = ?DEFAULT_ID }) -> + update_ids( + Item#tx { + unsigned_id = + crypto:hash( + sha256, + data_item_signature_data(Item, unsigned) + ) + } + ); +``` + +### update_ids + +Take an item and ensure that both the unsigned and signed IDs are + +```erlang +update_ids(Item = #tx { id = ?DEFAULT_ID, signature = ?DEFAULT_SIG }) -> + Item; +``` + +### update_ids + +Take an item and ensure that both the unsigned and signed IDs are + +```erlang +update_ids(Item = #tx { signature = ?DEFAULT_SIG }) -> + Item#tx { id = ?DEFAULT_ID }; +``` + +### update_ids + +Take an item and ensure that both the unsigned and signed IDs are + +```erlang +update_ids(Item = #tx { signature = Sig }) when Sig =/= ?DEFAULT_SIG -> + Item#tx { id = crypto:hash(sha256, Sig) }; +``` + +### update_ids + +Take an item and ensure that both the unsigned and signed IDs are +Re-calculate both of the IDs for an item. This is a wrapper + +```erlang +update_ids(TX) -> TX. +``` + +### reset_ids + +Take an item and ensure that both the unsigned and signed IDs are +Re-calculate both of the IDs for an item. This is a wrapper + +```erlang +reset_ids(Item) -> + update_ids(Item#tx { unsigned_id = ?DEFAULT_ID, id = ?DEFAULT_ID }). +``` + +### add_bundle_tags + +```erlang +add_bundle_tags(Tags) -> ?BUNDLE_TAGS ++ (Tags -- ?BUNDLE_TAGS). +``` + +### add_list_tags + +```erlang +add_list_tags(Tags) -> + (?BUNDLE_TAGS ++ (Tags -- ?BUNDLE_TAGS)) ++ ?LIST_TAGS. +``` + +### add_manifest_tags + +```erlang +add_manifest_tags(Tags, ManifestID) -> + lists:filter( + fun + ({<<"bundle-map">>, _}) -> false; + (_) -> true + end, + Tags + ) ++ [{<<"bundle-map">>, hb_util:encode(ManifestID)}]. +``` + +### finalize_bundle_data + +```erlang +finalize_bundle_data(Processed) -> + Length = <<(length(Processed)):256/integer>>, + Index = <<<<(byte_size(Data)):256/integer, ID/binary>> || {ID, Data} <- Processed>>, + Items = <<<> || {_, Data} <- Processed>>, + <>. +``` + +### to_serialized_pair + +```erlang +to_serialized_pair(Item) when is_binary(Item) -> + % Support bundling of bare binary payloads by wrapping them in a TX that + % is explicitly marked as a binary data item. +``` + +### to_serialized_pair + +```erlang +to_serialized_pair(Item) -> + % TODO: This is a hack to get the ID of the item. We need to do this because we may not + % have the ID in 'item' if it is just a map/list. We need to make this more efficient. +``` + +### serialize_bundle_data + +```erlang +serialize_bundle_data(Map, _Manifest) when is_map(Map) -> + % TODO: Make this compatible with the normal manifest spec. +``` + +### serialize_bundle_data + +```erlang +serialize_bundle_data(List, _Manifest) when is_list(List) -> + finalize_bundle_data(lists:map(fun to_serialized_pair/1, List)); +``` + +### serialize_bundle_data + +```erlang +serialize_bundle_data(Data, _Manifest) -> + throw({cannot_serialize_tx_data, must_be_map_or_list, Data}). +``` + +### new_manifest + +```erlang +new_manifest(Index) -> + TX = normalize(#tx{ + format = ans104, + tags = [ + {<<"data-protocol">>, <<"bundle-map">>}, + {<<"variant">>, <<"0.0.1">>} + ], + data = hb_json:encode(Index) + }), + TX. +``` + +### manifest + +```erlang +manifest(Map) when is_map(Map) -> Map; +``` + +### manifest + +```erlang +manifest(#tx { manifest = undefined }) -> undefined; +``` + +### manifest + +```erlang +manifest(#tx { manifest = ManifestTX }) -> + hb_json:decode(ManifestTX#tx.data). +``` + +### parse_manifest + +```erlang +parse_manifest(Item) when is_record(Item, tx) -> + parse_manifest(Item#tx.data); +``` + +### parse_manifest + +```erlang +parse_manifest(Bin) -> + hb_json:decode(Bin). +``` + +### encode_signature_type + +Only RSA 4096 is currently supported. + +```erlang +encode_signature_type({rsa, 65537}) -> + <<1, 0>>; +``` + +### encode_signature_type + +Only RSA 4096 is currently supported. + +```erlang +encode_signature_type(_) -> + unsupported_tx_format. +``` + +### encode_optional_field + +Encode an optional field (target, anchor) with a presence byte. + +```erlang +encode_optional_field(<<>>) -> + <<0>>; +``` + +### encode_optional_field + +Encode an optional field (target, anchor) with a presence byte. + +```erlang +encode_optional_field(Field) -> + <<1:8/integer, Field/binary>>. +``` + +### utf8_encoded + +Encode a UTF-8 string to binary. + +```erlang +utf8_encoded(String) -> + unicode:characters_to_binary(String, utf8). +``` + +### encode_tags_size + +```erlang +encode_tags_size([], <<>>) -> + <<0:64/little-integer, 0:64/little-integer>>; +``` + +### encode_tags_size + +```erlang +encode_tags_size(Tags, EncodedTags) -> + <<(length(Tags)):64/little-integer, (byte_size(EncodedTags)):64/little-integer>>. +``` + +### encode_tags + +Encode tags into a binary format using Apache Avro. + +```erlang +encode_tags([]) -> + <<>>; +``` + +### encode_tags + +Encode tags into a binary format using Apache Avro. + +```erlang +encode_tags(Tags) -> + EncodedBlocks = lists:flatmap( + fun({Name, Value}) -> + Res = [encode_avro_name(Name), encode_avro_value(Value)], + case lists:member(error, Res) of + true -> + throw({cannot_encode_empty_string, Name, Value}); + false -> + Res + end + end, + Tags + ), + TagCount = length(Tags), + ZigZagCount = encode_zigzag(TagCount), + <>. +``` + +### encode_avro_name + +Encode a string for Avro using ZigZag and VInt encoding. + +```erlang +encode_avro_name(<<>>) -> + % Zero length names are treated as a special case, due to the Avro encoder. +``` + +### encode_avro_name + +```erlang +encode_avro_name(String) -> + StringBytes = utf8_encoded(String), + Length = byte_size(StringBytes), + <<(encode_zigzag(Length))/binary, StringBytes/binary>>. +``` + +### encode_avro_value + +```erlang +encode_avro_value(<<>>) -> + % Zero length values are treated as a special case, due to the Avro encoder. +``` + +### encode_avro_value + +```erlang +encode_avro_value(Value) when is_binary(Value) -> + % Tag values can be raw binaries + Length = byte_size(Value), + <<(encode_zigzag(Length))/binary, Value/binary>>. +``` + +### encode_zigzag + +Encode an integer using ZigZag encoding. + +```erlang +encode_zigzag(Int) when Int >= 0 -> + encode_vint(Int bsl 1); +``` + +### encode_zigzag + +Encode an integer using ZigZag encoding. + +```erlang +encode_zigzag(Int) -> + encode_vint(Int bsl 1, -1). +``` + +### encode_vint + +Encode a ZigZag integer to VInt binary format. + +```erlang +encode_vint(ZigZag) -> + encode_vint(ZigZag, []). +``` + +### encode_vint + +```erlang +encode_vint(0, Acc) -> + list_to_binary(lists:reverse(Acc)); +``` + +### encode_vint + +```erlang +encode_vint(ZigZag, Acc) -> + VIntByte = ZigZag band 16#7F, + ZigZagShifted = ZigZag bsr 7, + case ZigZagShifted of + 0 -> encode_vint(0, [VIntByte | Acc]); + _ -> encode_vint(ZigZagShifted, [VIntByte bor 16#80 | Acc]) + end. +``` + +### deserialize + +Convert binary data back to a #tx record. + +```erlang +deserialize(not_found) -> throw(not_found); +``` + +### deserialize + +Convert binary data back to a #tx record. + +```erlang +deserialize(Binary) -> deserialize(Binary, binary). +``` + +### deserialize + +Convert binary data back to a #tx record. + +```erlang +deserialize(Item, binary) when is_record(Item, tx) -> + maybe_unbundle(Item); +``` + +### deserialize + +Convert binary data back to a #tx record. + +```erlang +deserialize(Binary, binary) -> + %try + {SignatureType, Signature, Owner, Rest} = decode_signature(Binary), + {Target, Rest2} = decode_optional_field(Rest), + {Anchor, Rest3} = decode_optional_field(Rest2), + {Tags, Data} = decode_tags(Rest3), + maybe_unbundle( + reset_ids(#tx{ + format = ans104, + signature_type = SignatureType, + signature = Signature, + owner = Owner, + target = Target, + anchor = Anchor, + tags = Tags, + data = Data, + data_size = byte_size(Data) + }) + ); +%catch +% _:_:_Stack -> +% {error, invalid_item} +%end; +``` + +### deserialize + +Convert binary data back to a #tx record. + +```erlang +deserialize(Bin, json) -> + try + Map = hb_json:decode(Bin), + hb_message:convert(Map, <<"ans104@1.0">>, #{}) + catch + _:_:_Stack -> + {error, invalid_item} + end. +``` + +### maybe_unbundle + +```erlang +maybe_unbundle(Item) -> + Format = lists:keyfind(<<"bundle-format">>, 1, Item#tx.tags), + Version = lists:keyfind(<<"bundle-version">>, 1, Item#tx.tags), + case {Format, Version} of + {{<<"bundle-format">>, <<"binary">>}, {<<"bundle-version">>, <<"2.0.0">>}} -> + maybe_map_to_list(maybe_unbundle_map(Item)); + _ -> + Item + end. +``` + +### maybe_map_to_list + +```erlang +maybe_map_to_list(Item) -> + case lists:keyfind(<<"map-format">>, 1, Item#tx.tags) of + {<<"map-format">>, <<"List">>} -> + unbundle_list(Item); + _ -> + Item + end. +``` + +### unbundle_list + +```erlang +unbundle_list(Item) -> + Item#tx{ + data = + lists:map( + fun(Index) -> + maps:get(list_to_binary(integer_to_list(Index)), Item#tx.data) + end, + lists:seq(1, maps:size(Item#tx.data)) + ) + }. +``` + +### maybe_unbundle_map + +```erlang +maybe_unbundle_map(Bundle) -> + case lists:keyfind(<<"bundle-map">>, 1, Bundle#tx.tags) of + {<<"bundle-map">>, MapTXID} -> + case unbundle(Bundle) of + detached -> Bundle#tx { data = detached }; + Items -> + MapItem = find_single_layer(hb_util:decode(MapTXID), Items), + Map = hb_json:decode(MapItem#tx.data), + Bundle#tx{ + manifest = MapItem, + data = + maps:map( + fun(_K, TXID) -> + find_single_layer(hb_util:decode(TXID), Items) + end, + Map + ) + } + end; + _ -> + unbundle(Bundle) + end. +``` + +### find_single_layer + +An internal helper for finding an item in a single-layer of a bundle. + +```erlang +find_single_layer(UnsignedID, TX) when is_record(TX, tx) -> + find_single_layer(UnsignedID, TX#tx.data); +``` + +### find_single_layer + +An internal helper for finding an item in a single-layer of a bundle. + +```erlang +find_single_layer(UnsignedID, Items) -> + TX = lists:keyfind(UnsignedID, #tx.unsigned_id, Items), + case is_record(TX, tx) of + true -> TX; + false -> + throw({cannot_find_item, hb_util:encode(UnsignedID)}) + end. +``` + +### unbundle + +```erlang +unbundle(Item = #tx{data = <>}) -> + {ItemsBin, Items} = decode_bundle_header(Count, Content), + Item#tx{data = decode_bundle_items(Items, ItemsBin)}; +``` + +### unbundle + +```erlang +unbundle(#tx{data = <<>>}) -> detached. +``` + +### decode_bundle_items + +```erlang +decode_bundle_items([], <<>>) -> + []; +``` + +### decode_bundle_items + +```erlang +decode_bundle_items([{_ID, Size} | RestItems], ItemsBin) -> + [ + deserialize(binary:part(ItemsBin, 0, Size)) + | + decode_bundle_items( + RestItems, + binary:part( + ItemsBin, + Size, + byte_size(ItemsBin) - Size + ) + ) + ]. +``` + +### decode_bundle_header + +```erlang +decode_bundle_header(Count, Bin) -> decode_bundle_header(Count, Bin, []). +``` + +### decode_bundle_header + +```erlang +decode_bundle_header(0, ItemsBin, Header) -> + {ItemsBin, lists:reverse(Header)}; +``` + +### decode_bundle_header + +```erlang +decode_bundle_header(Count, <>, Header) -> + decode_bundle_header(Count - 1, Rest, [{ID, Size} | Header]). +``` + +### decode_signature + +Decode the signature from a binary format. Only RSA 4096 is currently supported. + +```erlang +decode_signature(<<1, 0, Signature:512/binary, Owner:512/binary, Rest/binary>>) -> + {{rsa, 65537}, Signature, Owner, Rest}; +``` + +### decode_signature + +Decode the signature from a binary format. Only RSA 4096 is currently supported. + +```erlang +decode_signature(Other) -> + ?event({error_decoding_signature, Other}), + unsupported_tx_format. +``` + +### decode_tags + +Decode tags from a binary format using Apache Avro. + +```erlang +decode_tags(<<0:64/little-integer, 0:64/little-integer, Rest/binary>>) -> + {[], Rest}; +``` + +### decode_tags + +Decode tags from a binary format using Apache Avro. + +```erlang +decode_tags(<<_TagCount:64/little-integer, _TagSize:64/little-integer, Binary/binary>>) -> + {Count, BlocksBinary} = decode_zigzag(Binary), + {Tags, Rest} = decode_avro_tags(BlocksBinary, Count), + %% Pull out the terminating zero + {0, Rest2} = decode_zigzag(Rest), + {Tags, Rest2}. +``` + +### decode_optional_field + +```erlang +decode_optional_field(<<0, Rest/binary>>) -> + {<<>>, Rest}; +``` + +### decode_optional_field + +```erlang +decode_optional_field(<<1:8/integer, Field:32/binary, Rest/binary>>) -> + {Field, Rest}. +``` + +### decode_avro_tags + +Decode Avro blocks (for tags) from binary. + +```erlang +decode_avro_tags(<<>>, _) -> + {[], <<>>}; +``` + +### decode_avro_tags + +Decode Avro blocks (for tags) from binary. + +```erlang +decode_avro_tags(Binary, Count) when Count =:= 0 -> + {[], Binary}; +``` + +### decode_avro_tags + +Decode Avro blocks (for tags) from binary. + +```erlang +decode_avro_tags(Binary, Count) -> + {NameSize, Rest} = decode_zigzag(Binary), + decode_avro_name(NameSize, Rest, Count). +``` + +### decode_avro_name + +```erlang +decode_avro_name(0, Rest, _) -> + {[], Rest}; +``` + +### decode_avro_name + +```erlang +decode_avro_name(NameSize, Rest, Count) -> + <> = Rest, + {ValueSize, Rest3} = decode_zigzag(Rest2), + decode_avro_value(ValueSize, Name, Rest3, Count). +``` + +### decode_avro_value + +```erlang +decode_avro_value(0, Name, Rest, Count) -> + {DecodedTags, NonAvroRest} = decode_avro_tags(Rest, Count - 1), + {[{Name, <<>>} | DecodedTags], NonAvroRest}; +``` + +### decode_avro_value + +```erlang +decode_avro_value(ValueSize, Name, Rest, Count) -> + <> = Rest, + {DecodedTags, NonAvroRest} = decode_avro_tags(Rest2, Count - 1), + {[{Name, Value} | DecodedTags], NonAvroRest}. +``` + +### decode_zigzag + +Decode a VInt encoded ZigZag integer from binary. + +```erlang +decode_zigzag(Binary) -> + {ZigZag, Rest} = decode_vint(Binary, 0, 0), + case ZigZag band 1 of + 1 -> {-(ZigZag bsr 1) - 1, Rest}; + 0 -> {ZigZag bsr 1, Rest} + end. +``` + +### decode_vint + +```erlang +decode_vint(<<>>, Result, _Shift) -> + {Result, <<>>}; +``` + +### decode_vint + +```erlang +decode_vint(<>, Result, Shift) -> + VIntPart = Byte band 16#7F, + NewResult = Result bor (VIntPart bsl Shift), + case Byte band 16#80 of + 0 -> {NewResult, Rest}; + _ -> decode_vint(Rest, NewResult, Shift + 7) + end. +``` + +### ar_bundles_test_ + +```erlang +ar_bundles_test_() -> + [ + {timeout, 30, fun test_no_tags/0}, + {timeout, 30, fun test_with_tags/0}, + {timeout, 30, fun test_with_zero_length_tag/0}, + {timeout, 30, fun test_unsigned_data_item_id/0}, + {timeout, 30, fun test_unsigned_data_item_normalization/0}, + {timeout, 30, fun test_empty_bundle/0}, + {timeout, 30, fun test_bundle_with_one_item/0}, + {timeout, 30, fun test_bundle_with_two_items/0}, + {timeout, 30, fun test_recursive_bundle/0}, + {timeout, 30, fun test_bundle_map/0}, + {timeout, 30, fun test_basic_member_id/0}, + {timeout, 30, fun test_deep_member/0}, + {timeout, 30, fun test_extremely_large_bundle/0}, + {timeout, 30, fun test_serialize_deserialize_deep_signed_bundle/0}, + {timeout, 30, fun test_encode_tags/0} + ]. +``` + +### test_encode_tags + +```erlang +test_encode_tags() -> + BinValue = <<1, 2, 3, 255, 254>>, + TestCases = [ + {simple_string_tags, [{<<"tag1">>, <<"value1">>}]}, + {binary_value_tag, [{<<"binary-tag">>, BinValue}]}, + {mixed_tags, + [ + {<<"string-tag">>, <<"string-value">>}, + {<<"binary-tag">>, BinValue} + ] + }, + {empty_value_tag, [{<<"empty-value-tag">>, <<>>}]}, + {unicode_tag, [{<<"unicode-tag">>, <<"你好世界">>}]} + ], + lists:foreach( + fun({Label, InputTags}) -> + Encoded = encode_tags(InputTags), + Wrapped = + << + (length(InputTags)):64/little, + (byte_size(Encoded)):64/little, + Encoded/binary + >>, + {DecodedTags, <<>>} = decode_tags(Wrapped), + ?assertEqual(InputTags, DecodedTags, Label) + end, + TestCases + ), + % Test case: Empty tags list + EmptyTags = [], + EncodedEmpty = encode_tags(EmptyTags), + ?assertEqual(<<>>, EncodedEmpty), + WrappedEmpty = <<0:64/little, 0:64/little>>, + {[], <<>>} = decode_tags(WrappedEmpty). +``` + +### run_test + +```erlang +run_test() -> + test_with_zero_length_tag(). +``` + +### test_no_tags + +```erlang +test_no_tags() -> + {Priv, Pub} = ar_wallet:new(), + {KeyType, Owner} = Pub, + Target = crypto:strong_rand_bytes(32), + Anchor = crypto:strong_rand_bytes(32), + DataItem = new_item(Target, Anchor, [], <<"data">>), + SignedDataItem = sign_item(DataItem, {Priv, Pub}), + ?assertEqual(true, verify_item(SignedDataItem)), + assert_data_item(KeyType, Owner, Target, Anchor, [], <<"data">>, SignedDataItem), + SignedDataItem2 = deserialize(serialize(SignedDataItem)), + ?assertEqual(SignedDataItem, SignedDataItem2), + ?assertEqual(true, verify_item(SignedDataItem2)), + assert_data_item(KeyType, Owner, Target, Anchor, [], <<"data">>, SignedDataItem2). +``` + +### test_with_tags + +```erlang +test_with_tags() -> + {Priv, Pub} = ar_wallet:new(), + {KeyType, Owner} = Pub, + Target = crypto:strong_rand_bytes(32), + Anchor = crypto:strong_rand_bytes(32), + Tags = [{<<"tag1">>, <<"value1">>}, {<<"tag2">>, <<"value2">>}], + DataItem = new_item(Target, Anchor, Tags, <<"taggeddata">>), + SignedDataItem = sign_item(DataItem, {Priv, Pub}), + ?assertEqual(true, verify_item(SignedDataItem)), + assert_data_item(KeyType, Owner, Target, Anchor, Tags, <<"taggeddata">>, SignedDataItem), + SignedDataItem2 = deserialize(serialize(SignedDataItem)), + ?assertEqual(SignedDataItem, SignedDataItem2), + ?assertEqual(true, verify_item(SignedDataItem2)), + assert_data_item(KeyType, Owner, Target, Anchor, Tags, <<"taggeddata">>, SignedDataItem2). +``` + +### test_with_zero_length_tag + +```erlang +test_with_zero_length_tag() -> + Item = normalize(#tx{ + format = ans104, + tags = [ + {<<"normal-tag-1">>, <<"tag1">>}, + {<<"empty-tag">>, <<>>}, + {<<"normal-tag-2">>, <<"tag2">>} + ], + data = <<"Typical data field.">> + }), + Serialized = serialize(Item), + Deserialized = deserialize(Serialized), + ?assertEqual(Item, Deserialized). +``` + +### test_unsigned_data_item_id + +```erlang +test_unsigned_data_item_id() -> + Item1 = deserialize( + serialize(reset_ids(#tx{format = ans104, data = <<"data1">>})) + ), + Item2 = deserialize( + serialize(reset_ids(#tx{format = ans104, data = <<"data2">>}))), + ?assertNotEqual(Item1#tx.unsigned_id, Item2#tx.unsigned_id). +``` + +### test_unsigned_data_item_normalization + +```erlang +test_unsigned_data_item_normalization() -> + NewItem = normalize(#tx{ format = ans104, data = <<"Unsigned data">> }), + ReNormItem = deserialize(serialize(NewItem)), + ?assertEqual(NewItem, ReNormItem). +``` + +### assert_data_item + +```erlang +assert_data_item(KeyType, Owner, Target, Anchor, Tags, Data, DataItem) -> + ?assertEqual(KeyType, DataItem#tx.signature_type), + ?assertEqual(Owner, DataItem#tx.owner), + ?assertEqual(Target, DataItem#tx.target), + ?assertEqual(Anchor, DataItem#tx.anchor), + ?assertEqual(Tags, DataItem#tx.tags), + ?assertEqual(Data, DataItem#tx.data), + ?assertEqual(byte_size(Data), DataItem#tx.data_size). +``` + +### test_empty_bundle + +```erlang +test_empty_bundle() -> + Bundle = serialize([]), + BundleItem = deserialize(Bundle), + ?assertEqual(#{}, BundleItem#tx.data). +``` + +### test_bundle_with_one_item + +```erlang +test_bundle_with_one_item() -> + Item = new_item( + crypto:strong_rand_bytes(32), + crypto:strong_rand_bytes(32), + [], + ItemData = crypto:strong_rand_bytes(1000) + ), + ?event({item, Item}), + Bundle = serialize([Item]), + ?event({bundle, Bundle}), + BundleItem = deserialize(Bundle), + ?event({bundle_item, BundleItem}), + ?assertEqual(ItemData, (maps:get(<<"1">>, BundleItem#tx.data))#tx.data). +``` + +### test_bundle_with_two_items + +```erlang +test_bundle_with_two_items() -> + Item1 = new_item( + crypto:strong_rand_bytes(32), + crypto:strong_rand_bytes(32), + [], + ItemData1 = crypto:strong_rand_bytes(32) + ), + Item2 = new_item( + crypto:strong_rand_bytes(32), + crypto:strong_rand_bytes(32), + [{<<"tag1">>, <<"value1">>}, {<<"tag2">>, <<"value2">>}], + ItemData2 = crypto:strong_rand_bytes(32) + ), + Bundle = serialize([Item1, Item2]), + BundleItem = deserialize(Bundle), + ?assertEqual(ItemData1, (maps:get(<<"1">>, BundleItem#tx.data))#tx.data), + ?assertEqual(ItemData2, (maps:get(<<"2">>, BundleItem#tx.data))#tx.data). +``` + +### test_recursive_bundle + +```erlang +test_recursive_bundle() -> + W = ar_wallet:new(), + Item1 = sign_item(#tx{ + id = crypto:strong_rand_bytes(32), + anchor = crypto:strong_rand_bytes(32), + data = <<1:256/integer>> + }, W), + Item2 = sign_item(#tx{ + id = crypto:strong_rand_bytes(32), + anchor = crypto:strong_rand_bytes(32), + data = [Item1] + }, W), + Item3 = sign_item(#tx{ + id = crypto:strong_rand_bytes(32), + anchor = crypto:strong_rand_bytes(32), + data = [Item2] + }, W), + Bundle = serialize([Item3]), + BundleItem = deserialize(Bundle), + #{<<"1">> := UnbundledItem3} = BundleItem#tx.data, + #{<<"1">> := UnbundledItem2} = UnbundledItem3#tx.data, + #{<<"1">> := UnbundledItem1} = UnbundledItem2#tx.data, + ?assert(verify_item(UnbundledItem1)), + % TODO: Verify bundled lists... +``` + +### test_bundle_map + +```erlang +test_bundle_map() -> + W = ar_wallet:new(), + Item1 = sign_item(#tx{ + format = ans104, + data = <<"item1_data">> + }, W), + Item2 = sign_item(#tx{ + format = ans104, + anchor = crypto:strong_rand_bytes(32), + data = #{<<"key1">> => Item1} + }, W), + Bundle = serialize(Item2), + BundleItem = deserialize(Bundle), + ?assertEqual(Item1#tx.data, (maps:get(<<"key1">>, BundleItem#tx.data))#tx.data), + ?assert(verify_item(BundleItem)). +``` + +### test_extremely_large_bundle + +```erlang +test_extremely_large_bundle() -> + W = ar_wallet:new(), + Data = crypto:strong_rand_bytes(100_000_000), + Norm = normalize(#tx { data = #{ <<"key">> => #tx { data = Data } } }), + Signed = sign_item(Norm, W), + Serialized = serialize(Signed), + Deserialized = deserialize(Serialized), + ?assert(verify_item(Deserialized)). +``` + +### test_basic_member_id + +```erlang +test_basic_member_id() -> + W = ar_wallet:new(), + Item = sign_item( + #tx{ + data = <<"data">> + }, + W + ), + ?assertEqual(true, member(Item#tx.id, Item)), + ?assertEqual(true, member(id(Item, unsigned), Item)), + ?assertEqual(false, member(crypto:strong_rand_bytes(32), Item)). +``` + +### test_deep_member + +```erlang +test_deep_member() -> + W = ar_wallet:new(), + Item = sign_item( + #tx{ + data = + #{<<"key1">> => + sign_item(#tx{ + data = <<"data">> + }, W) + } + }, + W + ), + Item2 = deserialize(serialize(sign_item( + #tx{ + data = #{ <<"key2">> => Item } + }, + W + ))), + ?assertEqual(true, member(<<"key1">>, Item2)), + ?assertEqual(true, member(<<"key2">>, Item2)), + ?assertEqual(true, member(Item#tx.id, Item2)), + ?assertEqual(true, member(Item2#tx.id, Item2)), + ?assertEqual(true, member(id(Item, unsigned), Item2)), + ?assertEqual(true, member(id(Item2, unsigned), Item2)), + ?assertEqual(false, member(crypto:strong_rand_bytes(32), Item2)). +``` + +### test_serialize_deserialize_deep_signed_bundle + +```erlang +test_serialize_deserialize_deep_signed_bundle() -> + W = ar_wallet:new(), + % Test that we can serialize, deserialize, and get the same IDs back. +``` + +--- + +*Generated from [ar_bundles.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_bundles.erl)* diff --git a/docs/book/src/ar_deep_hash.erl.md b/docs/book/src/ar_deep_hash.erl.md new file mode 100644 index 000000000..3cf098728 --- /dev/null +++ b/docs/book/src/ar_deep_hash.erl.md @@ -0,0 +1,62 @@ +# ar_deep_hash + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_deep_hash.erl) + +INTERNAL + +--- + +## Exported Functions + +- `hash/1` + +--- + +### hash + +```erlang +hash(List) when is_list(List) -> hash_bin_or_list(List). +%%% INTERNAL +``` + +### hash_bin_or_list + +```erlang +hash_bin_or_list(Bin) when is_binary(Bin) -> + Tag = <<"blob", (integer_to_binary(byte_size(Bin)))/binary>>, + hash_bin(<<(hash_bin(Tag))/binary, (hash_bin(Bin))/binary>>); +``` + +### hash_bin_or_list + +```erlang +hash_bin_or_list(List) when is_list(List) -> + Tag = <<"list", (integer_to_binary(length(List)))/binary>>, + hash_list(List, hash_bin(Tag)). +``` + +### hash_list + +```erlang +hash_list([], Acc) -> + Acc; +``` + +### hash_list + +```erlang +hash_list([Head | List], Acc) -> + HashPair = <>, + NewAcc = hash_bin(HashPair), + hash_list(List, NewAcc). +``` + +### hash_bin + +```erlang +hash_bin(Bin) when is_binary(Bin) -> +``` + +--- + +*Generated from [ar_deep_hash.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_deep_hash.erl)* diff --git a/docs/book/src/ar_rate_limiter.erl.md b/docs/book/src/ar_rate_limiter.erl.md new file mode 100644 index 000000000..70f86b8c9 --- /dev/null +++ b/docs/book/src/ar_rate_limiter.erl.md @@ -0,0 +1,187 @@ +# ar_rate_limiter + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_rate_limiter.erl) + +=================================================================== +Public interface. +=================================================================== + +--- + +## Exported Functions + +- `handle_call/3` +- `handle_cast/2` +- `handle_info/2` +- `init/1` +- `off/0` +- `on/0` +- `start_link/1` +- `terminate/2` +- `throttle/3` + +--- + +### start_link + +```erlang +start_link(Opts) -> + gen_server:start_link({local, ?MODULE}, ?MODULE, Opts, []). +``` + +### throttle + +Hang until it is safe to make another request to the given Peer with the + +```erlang +throttle(Peer, Path, Opts) -> + case lists:member(Peer, hb_opts:get(throttle_exempt_peers, [], Opts)) of + true -> + ok; + false -> + throttle2(Peer, Path, Opts) + end. +``` + +### throttle2 + +```erlang +throttle2(Peer, Path, Opts) -> + Routes = hb_opts:get(throttle_exempt_paths, [], Opts), + IsExempt = + lists:any(fun(Route) -> hb_path:regex_matches(Path, Route) end, Routes), + case IsExempt of + true -> ok; + false -> + Res = catch gen_server:call(?MODULE, {throttle, Peer, Path}, infinity), + case Res of + {'EXIT', {noproc, {gen_server, call, _}}} -> + ok; + {'EXIT', Reason} -> + exit(Reason); + _ -> + ok + end + end. +``` + +### off + +Turn rate limiting off. + +```erlang +off() -> + gen_server:cast(?MODULE, turn_off). +``` + +### on + +Turn rate limiting on. + +```erlang +on() -> + gen_server:cast(?MODULE, turn_on). +``` + +### init + +```erlang +init(Opts) -> + process_flag(trap_exit, true), + {ok, #state{ traces = #{}, off = false, opts = Opts }}. +``` + +### handle_call + +```erlang +handle_call({throttle, _Peer, _Path}, _From, #state{ off = true } = State) -> + {reply, ok, State}; +``` + +### handle_call + +```erlang +handle_call({throttle, Peer, Path}, From, State) -> + gen_server:cast(?MODULE, {throttle, Peer, Path, From}), + {noreply, State}; +``` + +### handle_call + +```erlang +handle_call(Request, _From, State) -> + ?event(warning, {unhandled_call, {module, ?MODULE}, {request, Request}}), + {reply, ok, State}. +``` + +### handle_cast + +```erlang +handle_cast({throttle, Peer, Path, From}, State) -> + #state{ traces = Traces, opts = Opts } = State, + {Type, Limit} = hb_opts:get(throttle_rpm_by_path, Path, Opts), + Now = os:system_time(millisecond), + case hb_maps:get({Peer, Type}, Traces, not_found, Opts) of + not_found -> + gen_server:reply(From, ok), + Traces2 = hb_maps:put({Peer, Type}, {1, queue:from_list([Now])}, Traces, Opts), + {noreply, State#state{ traces = Traces2 }}; + {N, Trace} -> + {N2, Trace2} = cut_trace(N, queue:in(Now, Trace), Now, Opts), + %% The macro specifies requests per minute while the throttling window + %% is 30 seconds. +``` + +### handle_cast + +```erlang +handle_cast(turn_off, State) -> + {noreply, State#state{ off = true }}; +``` + +### handle_cast + +```erlang +handle_cast(turn_on, State) -> + {noreply, State#state{ off = false }}; +``` + +### handle_cast + +```erlang +handle_cast(Cast, State) -> + ?event(warning, {unhandled_cast, {module, ?MODULE}, {cast, Cast}}), + {noreply, State}. +``` + +### handle_info + +```erlang +handle_info(Message, State) -> + ?event(warning, {unhandled_info, {module, ?MODULE}, {message, Message}}), + {noreply, State}. +``` + +### terminate + +```erlang +terminate(_Reason, _State) -> + ok. +``` + +### cut_trace + +```erlang +cut_trace(N, Trace, Now, Opts) -> + {{value, Timestamp}, Trace2} = queue:out(Trace), + case Timestamp < Now - hb_opts:get(throttle_period, 30000, Opts) of + true -> + cut_trace(N - 1, Trace2, Now, Opts); + false -> + {N, Trace} + end. +``` + +--- + +*Generated from [ar_rate_limiter.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_rate_limiter.erl)* diff --git a/docs/book/src/ar_timestamp.erl.md b/docs/book/src/ar_timestamp.erl.md new file mode 100644 index 000000000..ddf4894c6 --- /dev/null +++ b/docs/book/src/ar_timestamp.erl.md @@ -0,0 +1,97 @@ +# ar_timestamp + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_timestamp.erl) + +A simple Erlang server that caches the current Arweave timestamp and +refreshes it periodically. + +--- + +## Exported Functions + +- `get/0` +- `start/0` + +--- + +### start + +Check if the server is already running, and if not, start it. + +```erlang +start() -> + ?event(starting_ar_timestamp_server), + case whereis(?MODULE) of + undefined -> spawn_server(); + PID -> + case is_process_alive(PID) of + true -> PID; + false -> spawn_server() + end + end. +``` + +### spawn_server + +Spawn a new server and its refresher. + +```erlang +spawn_server() -> + TSServer = + spawn(fun() -> cache(hb_client:arweave_timestamp()) end), + spawn(fun() -> refresher(TSServer) end), + register(?MODULE, TSServer), + TSServer. +``` + +### get + +Get the current timestamp from the server, starting the server if it + +```erlang +get() -> + ?event(getting_ar_timestamp), + PID = start(), + ?event({got_ar_timestamp_pid, PID}), + PID ! {get, self()}, + ?event(waiting_for_ar_timestamp), + receive + {timestamp, Timestamp} -> + ?event({got_ar_timestamp, Timestamp}), + Timestamp + end. +``` + +### cache + +Cache the current timestamp from Arweave. + +```erlang +cache(Current) -> + ?event(cache_waiting), + receive + {get, Pid} -> + ?event({got_get_request, Pid}), + Pid ! {timestamp, Current}, + ?event({sent_timestamp, Current}), + cache(Current); + {refresh, New} -> + ?event({refreshed_ar_timestamp, New}), + cache(New) + end. +``` + +### refresher + +Refresh the timestamp cache periodically. + +```erlang +refresher(TSServer) -> + timer:sleep(?TIMEOUT), + TS = hb_client:arweave_timestamp(), + TSServer ! {refresh, TS}, +``` + +--- + +*Generated from [ar_timestamp.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_timestamp.erl)* diff --git a/docs/book/src/ar_tx.erl.md b/docs/book/src/ar_tx.erl.md new file mode 100644 index 000000000..72b14d7cb --- /dev/null +++ b/docs/book/src/ar_tx.erl.md @@ -0,0 +1,259 @@ +# ar_tx + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_tx.erl) + +The module with utilities for transaction creation, signing, and verification. + +--- + +## Exported Functions + +- `json_struct_to_tx/1` +- `new/4` +- `new/5` +- `sign/2` +- `tx_to_json_struct/1` +- `verify_tx_id/2` +- `verify/1` + +--- + +### new + +The module with utilities for transaction creation, signing, and verification. +Create a new transaction. + +```erlang +new(Dest, Reward, Qty, Last) -> + #tx{ + id = crypto:strong_rand_bytes(32), + anchor = Last, + quantity = Qty, + target = Dest, + data = <<>>, + data_size = 0, + reward = Reward + }. +``` + +### new + +```erlang +new(Dest, Reward, Qty, Last, SigType) -> + #tx{ + id = crypto:strong_rand_bytes(32), + anchor = Last, + quantity = Qty, + target = Dest, + data = <<>>, + data_size = 0, + reward = Reward, + signature_type = SigType + }. +``` + +### sign + +Cryptographically sign (claim ownership of) a transaction. + +```erlang +sign(TX, {PrivKey, {KeyType, Owner}}) -> + NewTX = TX#tx{ owner = Owner, signature_type = KeyType }, + Sig = ar_wallet:sign(PrivKey, signature_data_segment(NewTX)), + ID = crypto:hash(sha256, <>), + NewTX#tx{ id = ID, signature = Sig }. +``` + +### verify + +Verify whether a transaction is valid. + +```erlang +verify(TX) -> + do_verify(TX, verify_signature). +``` + +### verify_tx_id + +Verify the given transaction actually has the given identifier. + +```erlang +verify_tx_id(ExpectedID, #tx{ id = ID } = TX) -> + ExpectedID == ID andalso verify_signature(TX, verify_signature) andalso verify_hash(TX). +``` + +### signature_data_segment + +Generate the data segment to be signed for a given TX. + +```erlang +signature_data_segment(TX) -> + List = [ + << (integer_to_binary(TX#tx.format))/binary >>, + << (TX#tx.owner)/binary >>, + << (TX#tx.target)/binary >>, + << (list_to_binary(integer_to_list(TX#tx.quantity)))/binary >>, + << (list_to_binary(integer_to_list(TX#tx.reward)))/binary >>, + << (TX#tx.anchor)/binary >>, + << (integer_to_binary(TX#tx.data_size))/binary >>, + << (TX#tx.data_root)/binary >> + ], + ar_deep_hash:hash(List). +``` + +### verify_signature + +Verify the transaction's signature. + +```erlang +verify_signature(TX = #tx{ signature_type = SigType }, verify_signature) -> + SignatureDataSegment = signature_data_segment(TX), + ar_wallet:verify({SigType, TX#tx.owner}, SignatureDataSegment, TX#tx.signature). +``` + +### verify_hash + +Verify that the transaction's ID is a hash of its signature. + +```erlang +verify_hash(#tx{ signature = Sig, id = ID }) -> + ID == crypto:hash(sha256, << Sig/binary >>). +``` + +### do_verify + +Verify transaction. + +```erlang +do_verify(TX, VerifySignature) -> + From = ar_wallet:to_address(TX#tx.owner, TX#tx.signature_type), + Checks = [ + {"quantity_negative", TX#tx.quantity >= 0}, + {"same_owner_as_target", (From =/= TX#tx.target)}, + {"tx_id_not_valid", verify_hash(TX)}, + {"tx_signature_not_valid", verify_signature(TX, VerifySignature)}, + {"tx_data_size_negative", TX#tx.data_size >= 0}, + {"tx_data_size_data_root_mismatch", (TX#tx.data_size == 0) == (TX#tx.data_root == <<>>)} + ], + collect_validation_results(TX#tx.id, Checks). +``` + +### collect_validation_results + +```erlang +collect_validation_results(_TXID, Checks) -> + KeepFailed = fun + ({_, true}) -> false; + ({ErrorCode, false}) -> {true, ErrorCode} + end, + case lists:filtermap(KeepFailed, Checks) of + [] -> true; + _ -> false + end. +``` + +### json_struct_to_tx + +```erlang +json_struct_to_tx(TXStruct) -> + Tags = + case hb_util:find_value(<<"tags">>, TXStruct) of + undefined -> + []; + Xs -> + Xs + end, + Data = hb_util:decode(hb_util:find_value(<<"data">>, TXStruct)), + Format = + case hb_util:find_value(<<"format">>, TXStruct) of + undefined -> + 1; + N when is_integer(N) -> + N; + N when is_binary(N) -> + binary_to_integer(N) + end, + Denomination = + case hb_util:find_value(<<"denomination">>, TXStruct) of + undefined -> + 0; + EncodedDenomination -> + MaybeDenomination = binary_to_integer(EncodedDenomination), + true = MaybeDenomination > 0, + MaybeDenomination + end, + TXID = hb_util:decode(hb_util:find_value(<<"id">>, TXStruct)), + 32 = byte_size(TXID), + #tx{ + format = Format, + id = TXID, + anchor = hb_util:decode(hb_util:find_value(<<"anchor">>, TXStruct)), + owner = hb_util:decode(hb_util:find_value(<<"owner">>, TXStruct)), + tags = [{hb_util:decode(Name), hb_util:decode(Value)} + %% Only the elements matching this pattern are included in the list. +``` + +### tx_to_json_struct + +```erlang +tx_to_json_struct( + #tx{ + id = ID, + format = Format, + anchor = Anchor, + owner = Owner, + tags = Tags, + target = Target, + quantity = Quantity, + data = Data, + reward = Reward, + signature = Sig, + data_size = DataSize, + data_root = DataRoot, + denomination = Denomination + }) -> + Fields = [ + {format, + case Format of + undefined -> + 1; + _ -> + Format + end}, + {id, hb_util:encode(ID)}, + {anchor, hb_util:encode(Anchor)}, + {owner, hb_util:encode(Owner)}, + {tags, + lists:map( + fun({Name, Value}) -> + { + [ + {name, hb_util:encode(Name)}, + {value, hb_util:encode(Value)} + ] + } + end, + Tags + ) + }, + {target, hb_util:encode(Target)}, + {quantity, integer_to_binary(Quantity)}, + {data, hb_util:encode(Data)}, + {data_size, integer_to_binary(DataSize)}, + {data_tree, []}, + {data_root, hb_util:encode(DataRoot)}, + {reward, integer_to_binary(Reward)}, + {signature, hb_util:encode(Sig)} + ], + Fields2 = + case Denomination > 0 of + true -> + Fields ++ [{denomination, integer_to_binary(Denomination)}]; + false -> + Fields + end, +``` + +--- + +*Generated from [ar_tx.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_tx.erl)* diff --git a/docs/book/src/ar_wallet.erl.md b/docs/book/src/ar_wallet.erl.md new file mode 100644 index 000000000..390076def --- /dev/null +++ b/docs/book/src/ar_wallet.erl.md @@ -0,0 +1,456 @@ +# ar_wallet + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_wallet.erl) + +Utilities for manipulating wallets. + +--- + +## Exported Functions + +- `from_json/1` +- `from_json/2` +- `hmac/1` +- `hmac/2` +- `load_key/1` +- `load_key/2` +- `load_keyfile/1` +- `load_keyfile/2` +- `new_keyfile/2` +- `new/0` +- `new/1` +- `sign/2` +- `sign/3` +- `to_address/1` +- `to_address/2` +- `to_json/1` +- `to_pubkey/1` +- `to_pubkey/2` +- `verify/3` +- `verify/4` + +--- + +### new + +Utilities for manipulating wallets. + +```erlang +new() -> + new({rsa, 65537}). +``` + +### new + +```erlang +new(KeyType = {KeyAlg, PublicExpnt}) when KeyType =:= {rsa, 65537} -> + {[_, Pub], [_, Pub, Priv|_]} = {[_, Pub], [_, Pub, Priv|_]} + = crypto:generate_key(KeyAlg, {4096, PublicExpnt}), + {{KeyType, Priv, Pub}, {KeyType, Pub}}. +``` + +### sign + +Sign some data with a private key. + +```erlang +sign(Key, Data) -> + sign(Key, Data, sha256). +``` + +### sign + +sign some data, hashed using the provided DigestType. + +```erlang +sign({{rsa, PublicExpnt}, Priv, Pub}, Data, DigestType) when PublicExpnt =:= 65537 -> + rsa_pss:sign( + Data, + DigestType, + #'RSAPrivateKey'{ + publicExponent = PublicExpnt, + modulus = binary:decode_unsigned(Pub), + privateExponent = binary:decode_unsigned(Priv) + } + ); +``` + +### sign + +sign some data, hashed using the provided DigestType. + +```erlang +sign({{KeyType, Priv, Pub}, {KeyType, Pub}}, Data, DigestType) -> + sign({KeyType, Priv, Pub}, Data, DigestType). +``` + +### hmac + +```erlang +hmac(Data) -> + hmac(Data, sha256). +``` + +### hmac + +Verify that a signature is correct. + +```erlang +hmac(Data, DigestType) -> crypto:mac(hmac, DigestType, <<"ar">>, Data). +``` + +### verify + +Verify that a signature is correct. + +```erlang +verify(Key, Data, Sig) -> + verify(Key, Data, Sig, sha256). +``` + +### verify + +```erlang +verify({{rsa, PublicExpnt}, Pub}, Data, Sig, DigestType) when PublicExpnt =:= 65537 -> + rsa_pss:verify( + Data, + DigestType, + Sig, + #'RSAPublicKey'{ + publicExponent = PublicExpnt, + modulus = binary:decode_unsigned(Pub) + } + ). +``` + +### to_pubkey + +Find a public key from a wallet. + +```erlang +to_pubkey(Pubkey) -> + to_pubkey(Pubkey, ?DEFAULT_KEY_TYPE). +``` + +### to_pubkey + +```erlang +to_pubkey(PubKey, {rsa, 65537}) when bit_size(PubKey) == 256 -> + % Small keys are not secure, nobody is using them, the clause + % is for backwards-compatibility. +``` + +### to_pubkey + +```erlang +to_pubkey({{_, _, PubKey}, {_, PubKey}}, {rsa, 65537}) -> + PubKey; +``` + +### to_pubkey + +```erlang +to_pubkey(PubKey, {rsa, 65537}) -> + PubKey. +``` + +### to_address + +Generate an address from a public key. + +```erlang +to_address(Pubkey) -> + to_address(Pubkey, ?DEFAULT_KEY_TYPE). +``` + +### to_address + +```erlang +to_address(PubKey, {rsa, 65537}) when bit_size(PubKey) == 256 -> + PubKey; +``` + +### to_address + +```erlang +to_address({{_, _, PubKey}, {_, PubKey}}, _) -> + to_address(PubKey); +``` + +### to_address + +```erlang +to_address(PubKey, {rsa, 65537}) -> + to_rsa_address(PubKey); +``` + +### to_address + +```erlang +to_address(PubKey, {ecdsa, 256}) -> + to_ecdsa_address(PubKey). +``` + +### new_keyfile + +Generate a new wallet public and private key, with a corresponding keyfile. + +```erlang +new_keyfile(KeyType, WalletName) when is_list(WalletName) -> + new_keyfile(KeyType, list_to_binary(WalletName)); +``` + +### new_keyfile + +Generate a new wallet public and private key, with a corresponding keyfile. + +```erlang +new_keyfile(KeyType, WalletName) -> + {Pub, Priv, Key} = + case KeyType of + {?RSA_SIGN_ALG, PublicExpnt} -> + {[Expnt, Pb], [Expnt, Pb, Prv, P1, P2, E1, E2, C]} = + crypto:generate_key(rsa, {?RSA_PRIV_KEY_SZ, PublicExpnt}), + PrivKey = {KeyType, Prv, Pb}, + Ky = to_json(PrivKey), + {Pb, Prv, Ky}; + {?ECDSA_SIGN_ALG, secp256k1} -> + {OrigPub, Prv} = crypto:generate_key(ecdh, secp256k1), + CompressedPub = compress_ecdsa_pubkey(OrigPub), + PrivKey = {KeyType, Prv, CompressedPub}, + Ky = to_json(PrivKey), + {CompressedPub, Prv, Ky}; + {?EDDSA_SIGN_ALG, ed25519} -> + {{_, Prv, Pb}, _} = new(KeyType), + PrivKey = {KeyType, Prv, Pb}, + Ky = to_json(PrivKey), + {Pb, Prv, Ky} + end, + Filename = wallet_filepath(WalletName, Pub, KeyType), + filelib:ensure_dir(Filename), + file:write_file(Filename, Key), + {{KeyType, Priv, Pub}, {KeyType, Pub}}. +``` + +### wallet_filepath + +```erlang +wallet_filepath(Wallet) -> + filename:join([?WALLET_DIR, binary_to_list(Wallet)]). +``` + +### wallet_filepath2 + +```erlang +wallet_filepath2(Wallet) -> + filename:join([?WALLET_DIR, binary_to_list(Wallet)]). +``` + +### load_key + +Read the keyfile for the key with the given address from disk. + +```erlang +load_key(Addr) -> + load_key(Addr, #{}). +``` + +### load_key + +Read the keyfile for the key with the given address from disk. + +```erlang +load_key(Addr, Opts) -> + Path = hb_util:encode(Addr), + case filelib:is_file(Path) of + false -> + Path2 = wallet_filepath2(hb_util:encode(Addr)), + case filelib:is_file(Path2) of + false -> + not_found; + true -> + load_keyfile(Path2, Opts) + end; + true -> + load_keyfile(Path, Opts) + end. +``` + +### load_keyfile + +Extract the public and private key from a keyfile. + +```erlang +load_keyfile(File) -> + load_keyfile(File, #{}). +``` + +### load_keyfile + +Extract the public and private key from a keyfile. + +```erlang +load_keyfile(File, Opts) -> + {ok, Body} = file:read_file(File), + from_json(Body, Opts). +``` + +### to_json + +Convert a wallet private key to JSON (JWK) format + +```erlang +to_json({PrivKey, _PubKey}) -> + to_json(PrivKey); +``` + +### to_json + +Convert a wallet private key to JSON (JWK) format + +```erlang +to_json({{?RSA_SIGN_ALG, PublicExpnt}, Priv, Pub}) when PublicExpnt =:= 65537 -> + hb_json:encode(#{ + kty => <<"RSA">>, + ext => true, + e => hb_util:encode(<>), + n => hb_util:encode(Pub), + d => hb_util:encode(Priv) + }); +``` + +### to_json + +Convert a wallet private key to JSON (JWK) format + +```erlang +to_json({{?ECDSA_SIGN_ALG, secp256k1}, Priv, CompressedPub}) -> + % For ECDSA, we need to expand the compressed pubkey to get X,Y coordinates + % This is a simplified version - ideally we'd implement pubkey expansion + hb_json:encode(#{ + kty => <<"EC">>, + crv => <<"secp256k1">>, + d => hb_util:encode(Priv) + % TODO: Add x and y coordinates from expanded pubkey + }); +``` + +### to_json + +Convert a wallet private key to JSON (JWK) format + +```erlang +to_json({{?EDDSA_SIGN_ALG, ed25519}, Priv, Pub}) -> + hb_json:encode(#{ + kty => <<"OKP">>, + alg => <<"EdDSA">>, + crv => <<"Ed25519">>, + x => hb_util:encode(Pub), + d => hb_util:encode(Priv) + }). +``` + +### from_json + +Parse a wallet from JSON (JWK) format + +```erlang +from_json(JsonBinary) -> + from_json(JsonBinary, #{}). +``` + +### from_json + +Parse a wallet from JSON (JWK) format with options + +```erlang +from_json(JsonBinary, Opts) -> + Key = hb_json:decode(JsonBinary), + {Pub, Priv, KeyType} = + case hb_maps:get(<<"kty">>, Key, undefined, Opts) of + <<"EC">> -> + XEncoded = hb_maps:get(<<"x">>, Key, undefined, Opts), + YEncoded = hb_maps:get(<<"y">>, Key, undefined, Opts), + PrivEncoded = hb_maps:get(<<"d">>, Key, undefined, Opts), + OrigPub = iolist_to_binary([<<4:8>>, hb_util:decode(XEncoded), + hb_util:decode(YEncoded)]), + Pb = compress_ecdsa_pubkey(OrigPub), + Prv = hb_util:decode(PrivEncoded), + KyType = {?ECDSA_SIGN_ALG, secp256k1}, + {Pb, Prv, KyType}; + <<"OKP">> -> + PubEncoded = hb_maps:get(<<"x">>, Key, undefined, Opts), + PrivEncoded = hb_maps:get(<<"d">>, Key, undefined, Opts), + Pb = hb_util:decode(PubEncoded), + Prv = hb_util:decode(PrivEncoded), + KyType = {?EDDSA_SIGN_ALG, ed25519}, + {Pb, Prv, KyType}; + _ -> + PubEncoded = hb_maps:get(<<"n">>, Key, undefined, Opts), + PrivEncoded = hb_maps:get(<<"d">>, Key, undefined, Opts), + Pb = hb_util:decode(PubEncoded), + Prv = hb_util:decode(PrivEncoded), + KyType = {?RSA_SIGN_ALG, 65537}, + {Pb, Prv, KyType} + end, + {{KeyType, Priv, Pub}, {KeyType, Pub}}. +``` + +### to_rsa_address + +```erlang +to_rsa_address(PubKey) -> + hash_address(PubKey). +``` + +### hash_address + +```erlang +hash_address(PubKey) -> + crypto:hash(sha256, PubKey). +``` + +### to_ecdsa_address + +```erlang +to_ecdsa_address(PubKey) -> + hb_keccak:key_to_ethereum_address(PubKey). +``` + +### wallet_filepath + +```erlang +wallet_filepath(WalletName, PubKey, KeyType) -> + wallet_filepath(wallet_name(WalletName, PubKey, KeyType)). +``` + +### wallet_name + +```erlang +wallet_name(wallet_address, PubKey, KeyType) -> + hb_util:encode(to_address(PubKey, KeyType)); +``` + +### wallet_name + +```erlang +wallet_name(WalletName, _, _) -> + WalletName. +``` + +### compress_ecdsa_pubkey + +```erlang +compress_ecdsa_pubkey(<<4:8, PubPoint/binary>>) -> + PubPointMid = byte_size(PubPoint) div 2, + <> = PubPoint, + PubKeyHeader = + case Y rem 2 of + 0 -> <<2:8>>; + 1 -> <<3:8>> + end, +``` + +--- + +*Generated from [ar_wallet.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_wallet.erl)* diff --git a/docs/book/src/dev_apply.erl.md b/docs/book/src/dev_apply.erl.md new file mode 100644 index 000000000..7b838406c --- /dev/null +++ b/docs/book/src/dev_apply.erl.md @@ -0,0 +1,350 @@ +# dev_apply + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_apply.erl) + +A device that executes AO resolutions. It can be passed a key that +refers to a path stored in the base message to execute upon the base or +message referenced by the `source` key. +Alternatively, a `base` and `request` pair can be passed to execute +together via invoking the `pair` key. +When given a message with a `base` and `request` key, the default handler +will invoke `pair` upon it, setting the `path` in the resulting request to +the key that `apply` was invoked with. +Paths found in keys interpreted by this device can contain a `base:` or +`request:` prefix to indicate the message from which the path should be +retrieved. If no such prefix is present, the `Request` message is checked +first, and the `Base` message is checked second. + +--- + +## Exported Functions + +- `default/4` +- `info/1` +- `pair/3` + +--- + +### info + +A device that executes AO resolutions. It can be passed a key that +The device info. Forwards all keys aside `pair`, `keys` and `set` are + +```erlang +info(_) -> + #{ + excludes => [<<"keys">>, <<"set">>, <<"set_path">>, <<"remove">>], + default => fun default/4 + }. +``` + +### default + +The default handler. If the `base` and `request` keys are present in + +```erlang +default(Key, Base, Request, Opts) -> + ?event(debug_apply, {req, {key, Key}, {base, Base}, {request, Request}}), + FoundBase = hb_maps:get(<<"base">>, Request, not_found, Opts), + FoundRequest = hb_maps:get(<<"request">>, Request, not_found, Opts), + case {FoundBase, FoundRequest} of + {B, R} when B =/= not_found andalso R =/= not_found -> + pair(Key, Base, Request, Opts); + _ -> + eval(Base, Request#{ <<"apply-path">> => Key }, Opts) + end. +``` + +### eval + +Apply a request. We source the `base` message for the request either + +```erlang +eval(Base, Request, Opts) -> + maybe + ?event({eval, {base, Base}, {request, Request}}), + {ok, ApplyBase} ?= + case find_path(<<"source">>, Base, Request, Opts) of + {ok, SourcePath} -> + find_key(SourcePath, Base, Request, Opts); + {error, path_not_found, _} -> + % If the base is not found, we return the base for this + % request, minus the device (which will, inherently, be + % `apply@1.0' and cause recursion). +``` + +### pair + +Apply the message found at `request` to the message found at `base`. + +```erlang +pair(Base, Request, Opts) -> + pair(<<"undefined">>, Base, Request, Opts). +``` + +### pair + +Apply the message found at `request` to the message found at `base`. + +```erlang +pair(PathToSet, Base, Request, Opts) -> + maybe + {ok, RequestPath} ?= find_path(<<"request">>, Base, Request, Opts), + {ok, BasePath} ?= find_path(<<"base">>, Base, Request, Opts), + ?event({eval_pair, {base_source, BasePath}, {request_source, RequestPath}}), + {ok, RequestSource} ?= find_key(RequestPath, Base, Request, Opts), + {ok, BaseSource} ?= find_key(BasePath, Base, Request, Opts), + PreparedRequest = + case PathToSet of + <<"undefined">> -> RequestSource; + _ -> RequestSource#{ <<"path">> => PathToSet } + end, + ?event({eval_pair, {base, BaseSource}, {request, PreparedRequest}}), + hb_ao:resolve(BaseSource, PreparedRequest, Opts) + else + Error -> error_to_message(Error) + end. +``` + +### find_path + +Resolve the given path on the message as `message@1.0`. + +```erlang +find_path(Path, Base, Request, Opts) -> + Res = + hb_ao:get_first( + [ + {{as, <<"message@1.0">>, Request}, Path}, + {{as, <<"message@1.0">>, Base}, Path} + ], + path_not_found, + Opts + ), + case Res of + path_not_found -> {error, path_not_found, Path}; + Value -> {ok, Value} + end. +``` + +### find_key + +Find the value of the source key, supporting `base:` and `request:` + +```erlang +find_key(Path, Base, Request, Opts) -> + BaseAs = {as, <<"message@1.0">>, Base}, + RequestAs = {as, <<"message@1.0">>, Request}, + MaybeResolve = + case hb_path:term_to_path_parts(Path) of + [BinKey|RestKeys] -> + case binary:split(BinKey, <<":">>) of + [<<"base">>, <<"">>] -> + {message, Base}; + [<<"request">>, <<"">>] -> + {message, Request}; + [<<"base">>, Key] -> + {resolve, [{BaseAs, normalize_path([Key|RestKeys])}]}; + [Req, Key] when Req == <<"request">> orelse Req == <<"req">> -> + {resolve, [{RequestAs, normalize_path([Key|RestKeys])}]}; + [_] -> + {resolve, [ + {RequestAs, normalize_path(Path)}, + {BaseAs, normalize_path(Path)} + ]} + end; + _ -> {error, invalid_path, Path} + end, + case MaybeResolve of + Err = {error, _, _} -> Err; + {message, Message} -> {ok, Message}; + {resolve, Sources} -> + ?event( + {resolving_from_sources, + {path, Path}, + {sources, Sources} + } + ), + case hb_ao:get_first(Sources, source_not_found, Opts) of + source_not_found -> {error, source_not_found, Path}; + Source -> {ok, Source} + end + end. +``` + +### normalize_path + +Normalize the path. + +```erlang +normalize_path(Path) -> + case hb_path:to_binary(Path) of + <<"">> -> <<"/">>; + P -> P + end. +``` + +### error_to_message + +Convert an error to a message. + +```erlang +error_to_message({error, invalid_path, ErrPath}) -> + {error, #{ + <<"body">> => + <<"Path `", (normalize_path(ErrPath))/binary, "` is invalid.">> + }}; +``` + +### error_to_message + +Convert an error to a message. + +```erlang +error_to_message({error, source_not_found, ErrPath}) -> + {error, #{ + <<"body">> => + << + "Source path `", + (normalize_path(ErrPath))/binary, + "` to apply not found." + >> + }}; +``` + +### error_to_message + +Convert an error to a message. + +```erlang +error_to_message({error, path_not_found, ErrPath}) -> + {error, #{ + <<"body">> => + << + "Path `", + (normalize_path(ErrPath))/binary, + "` to apply not found." + >> + }}; +``` + +### error_to_message + +Convert an error to a message. + +```erlang +error_to_message(Error) -> + Error. +``` + +### resolve_key_test + +```erlang +resolve_key_test() -> + hb:init(), + Base = #{ + <<"device">> => <<"apply@1.0">>, + <<"body">> => <<"/~meta@1.0/build/node">>, + <<"irrelevant">> => <<"irrelevant">> + }, + Request = #{ + <<"irrelevant2">> => <<"irrelevant2">>, + <<"path">> => <<"body">> + }, + ?assertEqual({ok, <<"HyperBEAM">>}, hb_ao:resolve(Base, Request, #{})). +``` + +### resolve_pair_test + +```erlang +resolve_pair_test() -> + Base = #{ + <<"device">> => <<"apply@1.0">>, + <<"data-container">> => #{ <<"relevant">> => <<"DATA">> }, + <<"base">> => <<"data-container">>, + <<"irrelevant">> => <<"irrelevant">> + }, + Request = #{ + <<"irrelevant2">> => <<"irrelevant2">>, + <<"data-path">> => <<"relevant">>, + <<"request">> => <<"data-path">>, + <<"path">> => <<"pair">> + }, + ?assertEqual({ok, <<"DATA">>}, hb_ao:resolve(Base, Request, #{})). +``` + +### reverse_resolve_pair_test + +```erlang +reverse_resolve_pair_test() -> + ?assertEqual( + {ok, <<"TEST">>}, + hb_ao:resolve( + << + "/~meta@1.0/build", + "/node~apply@1.0&node=TEST&base=request:&request=base:" + >>, + #{} + ) + ). +``` + +### resolve_with_prefix_test + +```erlang +resolve_with_prefix_test() -> + ShortTraceLen = hb_opts:get(short_trace_len), + Node = hb_http_server:start_node(), + ?assertEqual( + {ok, ShortTraceLen}, + hb_http:request( + <<"GET">>, + Node, + <<"/~meta@1.0/info/request:debug-info~apply@1.0">>, + #{ + <<"debug-info">> => <<"short_trace_len">> + }, + #{} + ) + ). +``` + +### apply_over_http_test + +```erlang +apply_over_http_test() -> + Node = hb_http_server:start_node(), + Signed = + hb_message:commit( + #{ + <<"device">> => <<"apply@1.0">>, + <<"user-path">> => <<"/user-request/test-key">>, + <<"user-request">> => + #{ + <<"test-key">> => <<"DATA">> + } + }, + #{ priv_wallet => hb:wallet() } + ), + ?assertEqual( + {ok, <<"DATA">>}, + hb_ao:resolve( + Signed#{ <<"path">> => <<"/user-path">> }, + #{ priv_wallet => hb:wallet() } + ) + ), + ?assertEqual( + {ok, <<"DATA">>}, + hb_http:request( + <<"GET">>, + Node, + <<"/user-path">>, + Signed, + #{ priv_wallet => hb:wallet() } + ) + ). +``` + +--- + +*Generated from [dev_apply.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_apply.erl)* diff --git a/docs/book/src/dev_arweave.erl.md b/docs/book/src/dev_arweave.erl.md new file mode 100644 index 000000000..9177e68dc --- /dev/null +++ b/docs/book/src/dev_arweave.erl.md @@ -0,0 +1,355 @@ +# dev_arweave + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_arweave.erl) + +A device that provides access to Arweave network information, relayed +from a designated node. +The node(s) that are used to query data may be configured by altering the +`/arweave` route in the node`s configuration message. + +--- + +## Exported Functions + +- `block/3` +- `current/3` +- `status/3` +- `tx/3` + +--- + +### status + +A device that provides access to Arweave network information, relayed +Proxy the `/info` endpoint from the Arweave node. +Returns the given transaction, if known to the client node(s), as an + +```erlang +status(_Base, _Request, Opts) -> + request(<<"GET">>, <<"/info">>, Opts). +``` + +### tx + +A device that provides access to Arweave network information, relayed +Proxy the `/info` endpoint from the Arweave node. +Returns the given transaction, if known to the client node(s), as an + +```erlang +tx(Base, Request, Opts) -> + case hb_maps:get(<<"method">>, Request, <<"GET">>, Opts) of + <<"POST">> -> post_tx(Base, Request, Opts); + <<"GET">> -> get_tx(Base, Request, Opts) + end. +``` + +### post_tx + +Upload a transaction to Arweave, using the node's default bundler (see + +```erlang +post_tx(_Base, Request, Opts) -> + case hb_client:upload(Request, Opts) of + Res = {ok, _} -> + ?event(arweave, {uploaded, Request}), + CacheRes = hb_cache:write(Request, Opts), + ?event(arweave, + {cache_uploaded_message, + {msg, Request}, + {status, + case CacheRes of {ok, _} -> ok; + _ -> failed + end + } + } + ), + Res; + Res -> + Res + end. +``` + +### get_tx + +Get a transaction ID from the Arweave node, as indicated by the `tx` key + +```erlang +get_tx(Base, Request, Opts) -> + case find_txid(Base, Request, Opts) of + not_found -> {error, not_found}; + TXID -> + case request(<<"GET">>, <<"/tx/", TXID/binary>>, Opts) of + {ok, TXHeader} -> + ?event(arweave, {retrieved_tx_header, {tx, TXID}}), + maybe_add_data(TXID, TXHeader, Base, Request, Opts); + Other -> Other + end + end. +``` + +### maybe_add_data + +Handle the optional adding of data to the transaction header, depending + +```erlang +maybe_add_data(TXID, Header, Base, Request, Opts) -> + GetData = + hb_util:atom(hb_ao:get_first( + [ + {Request, <<"data">>}, + {Base, <<"data">>} + ], + true, + Opts + )), + case hb_util:atom(GetData) of + false -> + {ok, Header}; + _ -> + case data(Base, Request, Opts) of + {ok, Data} -> + FullMessage = Header#{ <<"data">> => Data }, + ?event( + arweave, + {retrieved_tx_with_data, + {id, TXID}, + {data_size, byte_size(Data)}, + {message, FullMessage} + } + ), + {ok, FullMessage}; + {error, Reason} -> + ?event(arweave, + {data_retrieval_failed_after_header, + {id, TXID}, + {error, Reason} + } + ), + if GetData =/= always -> {ok, Header}; + true -> {error, Reason} + end + end + end. +``` + +### data + +Retrieve the data of a transaction from Arweave. + +```erlang +data(Base, Request, Opts) -> + case find_txid(Base, Request, Opts) of + not_found -> {error, not_found}; + TXID -> + ?event(arweave, {retrieving_tx_data, {tx, TXID}}), + request(<<"GET">>, <<"/raw/", TXID/binary>>, Opts) + end. +``` + +### block + +Retrieve (and cache) block information from Arweave. If the `block` key + +```erlang +block(Base, Request, Opts) -> + Block = + hb_ao:get_first( + [ + {Request, <<"block">>}, + {Base, <<"block">>} + ], + not_found, + Opts + ), + case Block of + <<"current">> -> current(Base, Request, Opts); + not_found -> current(Base, Request, Opts); + ID when ?IS_ID(ID) -> block({id, ID}, Opts); + MaybeHeight -> + try hb_util:int(MaybeHeight) of + Int -> block({height, Int}, Opts) + catch + _:_ -> + { + error, + <<"Invalid block reference `", MaybeHeight/binary, "`">> + } + end + end. +``` + +### block + +```erlang +block({id, ID}, Opts) -> + case hb_cache:read(ID, Opts) of + {ok, Block} -> + ?event(arweave, {retrieved_block_from_cache, {id, ID}}), + {ok, Block}; + not_found -> + request(<<"GET">>, <<"/block/hash/", ID/binary>>, Opts) + end; +``` + +### block + +```erlang +block({height, Height}, Opts) -> + case dev_arweave_block_cache:read(Height, Opts) of + {ok, Block} -> + ?event(arweave, {retrieved_block_from_cache, {height, Height}}), + {ok, Block}; + not_found -> + request( + <<"GET">>, + <<"/block/height/", (hb_util:bin(Height))/binary>>, + Opts + ) + end. +``` + +### current + +Retrieve the current block information from Arweave. +Find the transaction ID to retrieve from Arweave based on the request or + +```erlang +current(_Base, _Request, Opts) -> + request(<<"GET">>, <<"/block/current">>, Opts). +%%% Internal Functions +``` + +### find_txid + +Retrieve the current block information from Arweave. +Find the transaction ID to retrieve from Arweave based on the request or + +```erlang +find_txid(Base, Request, Opts) -> + hb_ao:get_first( + [ + {Request, <<"tx">>}, + {Base, <<"tx">>} + ], + not_found, + Opts + ). +``` + +### request + +Make a request to the Arweave node and parse the response into an + +```erlang +request(Method, Path, Opts) -> + ?event(arweave, {arweave_request, {method, Method}, {path, Path}}), + Res = + hb_http:request( + #{ + <<"path">> => <<"/arweave", Path/binary>>, + <<"method">> => Method + }, + Opts + ), + to_message(Path, Res, Opts). +``` + +### to_message + +Transform a response from the Arweave node into an AO-Core message. + +```erlang +to_message(Path = <<"/raw/", _/binary>>, {ok, #{ <<"body">> := Body }}, _Opts) -> + ?event(arweave, + {arweave_raw_response, + {path, Path}, + {data_size, byte_size(Body)} + } + ), + {ok, Body}; +``` + +### to_message + +Transform a response from the Arweave node into an AO-Core message. + +```erlang +to_message(Path = <<"/block/", _/binary>>, {ok, #{ <<"body">> := Body }}, Opts) -> + Block = hb_message:convert(Body, <<"structured@1.0">>, <<"json@1.0">>, Opts), + ?event(arweave, + {arweave_block_response, + {path, Path}, + {block, Block} + } + ), + CacheRes = dev_arweave_block_cache:write(Block, Opts), + ?event(arweave, + {cached_arweave_block, + {path, Path}, + {result, CacheRes} + } + ), + {ok, Block}; +``` + +### to_message + +Transform a response from the Arweave node into an AO-Core message. + +```erlang +to_message(Path, {ok, #{ <<"body">> := Body }}, Opts) -> + % All other responses that are `OK' status are converted from JSON to an + % AO-Core message. +``` + +### post_ans104_tx_test + +```erlang +post_ans104_tx_test() -> + ServerOpts = #{ store => [hb_test_utils:test_store()] }, + Server = hb_http_server:start_node(ServerOpts), + ClientOpts = + #{ + store => [hb_test_utils:test_store()], + priv_wallet => hb:wallet() + }, + Msg = + hb_message:commit( + #{ + <<"variant">> => <<"ao.N.1">>, + <<"type">> => <<"Process">>, + <<"data">> => <<"test-data">> + }, + ClientOpts, + #{ <<"commitment-device">> => <<"ans104@1.0">> } + ), + {ok, PostRes} = + hb_http:post( + Server, + Msg#{ + <<"path">> => <<"/~arweave@2.9-pre/tx">>, + <<"codec-device">> => <<"ans104@1.0">> + }, + ClientOpts + ), + ?assertMatch(#{ <<"status">> := 200 }, PostRes), + SignedID = hb_message:id(Msg, signed, ClientOpts), + {ok, GetRes} = + hb_http:get( + Server, <<"/", SignedID/binary>>, + ClientOpts + ), + ?assertMatch( + #{ + <<"status">> := 200, + <<"variant">> := <<"ao.N.1">>, + <<"type">> := <<"Process">>, + <<"data">> := <<"test-data">> + }, + GetRes + ), +``` + +--- + +*Generated from [dev_arweave.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_arweave.erl)* diff --git a/docs/book/src/dev_arweave_block_cache.erl.md b/docs/book/src/dev_arweave_block_cache.erl.md new file mode 100644 index 000000000..4977f2067 --- /dev/null +++ b/docs/book/src/dev_arweave_block_cache.erl.md @@ -0,0 +1,99 @@ +# dev_arweave_block_cache + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_arweave_block_cache.erl) + +A module that performs caching operations for the Arweave device, +focused on ensuring that block metadata is queriable via pseudo-paths. + +--- + +## Exported Functions + +- `heights/1` +- `latest/1` +- `path/2` +- `read/2` +- `write/2` + +--- + +### latest + +A module that performs caching operations for the Arweave device, +The pseudo-path prefix which the Arweave block cache should use. +Get the latest block from the cache. + +```erlang +latest(Opts) -> + case heights(Opts) of + {ok, []} -> + ?event(arweave_cache, no_blocks_in_cache), + not_found; + {ok, Blocks} -> + Latest = lists:max(Blocks), + ?event(arweave_cache, {latest_block_from_cache, {latest, Latest}}), + {ok, Latest} + end. +``` + +### heights + +Get the list of blocks from the cache. + +```erlang +heights(Opts) -> + AllBlocks = + hb_cache:list_numbered( + hb_store:path(hb_opts:get(store, no_viable_store, Opts), [ + ?ARWEAVE_BLOCK_CACHE_PREFIX, + <<"block">>, + <<"height">> + ]), + Opts + ), + ?event(arweave_cache, {listed_blocks, length(AllBlocks)}), + {ok, AllBlocks}. +``` + +### read + +Read a block from the cache. + +```erlang +read(Block, Opts) -> + Res = hb_cache:read(path(Block, Opts), Opts), + ?event(arweave_cache, {read_block, {reference, Block}, {result, Res}}), + Res. +``` + +### path + +Return the path of a block that will be used in the cache. + +```erlang +path(Block, Opts) when is_integer(Block) -> + hb_store:path(hb_opts:get(store, no_viable_store, Opts), [ + ?ARWEAVE_BLOCK_CACHE_PREFIX, + <<"block">>, + <<"height">>, + hb_util:bin(Block) + ]). +``` + +### write + +Write a block to the cache and create pseudo-paths for it. + +```erlang +write(Block, Opts) -> + {ok, Height} = hb_maps:find(<<"height">>, Block, Opts), + {ok, BlockID} = hb_maps:find(<<"indep_hash">>, Block, Opts), + {ok, BlockHash} = hb_maps:find(<<"hash">>, Block, Opts), + {ok, MsgID} = hb_cache:write(Block, Opts), + % Link the independent hash and the dependent hash to the written AO-Core + % message ID. +``` + +--- + +*Generated from [dev_arweave_block_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_arweave_block_cache.erl)* diff --git a/docs/book/src/dev_auth_hook.erl.md b/docs/book/src/dev_auth_hook.erl.md new file mode 100644 index 000000000..b83cc5828 --- /dev/null +++ b/docs/book/src/dev_auth_hook.erl.md @@ -0,0 +1,431 @@ +# dev_auth_hook + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_auth_hook.erl) + +A device offering an on-request hook that signs incoming messages with +node-hosted wallets, in accordance with the node operator's configuration. +It is intended for deployment in environments where a node's users have +intrinsic reasons for trusting the node outside of the scope of this device. +For example, if executed on a node running in a Trusted Execution Environment +with `~snp@1.0`, or a node they operate or is operated by a trusted +third-party. +This device utilizes the `generator` interface type which other devices may +implement. The generator is used to find/create a secret based on a user's +request, which is then passed to the `~proxy-wallet@1.0` device and matched +with a wallet which is used to sign the request. The `generator` interface +may implement the following keys: +
+    `generate` (optional): A key that generates a secret based on a
+                           user's request. May return either the secret
+                           directly, or a message with a `secret` key. If 
+                           a message is returned, it is assumed to be a
+                           modified version of the user's request and is
+                           used for further processing.
+    `finalize` (optional): A key that takes the message sequence after this
+                           device has processed it and returns it in a
+                           modified form.
+
+At present, the `~cookie-secret@1.0` and `~http-auth@1.0` devices implement +the `generator` interface. For example, the following hook definition will +use the `~cookie-secret@1.0` device to generate and manage wallets for +users, with authentication details stored in cookies: +
+  "on": {
+    "request": {
+      "device": "auth-hook@1.0",
+      "secret-provider": {
+        "device": "cookie-secret@1.0"
+      }
+    }
+  }
+
+`~auth-hook@1.0` expects to receive a `secret-provider` key in the hook +base message. It may optionally also take a `generate-path` and +`finalize-path`, which are used to generate the secret and post-process the +response. If either `X-path` keys are not present, the `generate` and +`finalize` paths are used upon the `secret-provider` message. If the secret +provider's device does not implement these keys, the operations are skipped. +Node operators may also specify a `when` message inside their hook definition +which is used to determine when messages should be signed. The supported keys +are: +
+    `committers`: always | uncommitted | [committer1, or committer2, or ...]
+    `keys`: always | [key1, or key2, or ...]
+
+Both keys are optional and can be combined to form 'and' conditions. For +example, the following hook definition will sign all uncommitted requests +that have the `Authorization` header: +
+  "on": {
+    "request": {
+      "device": "auth-hook@1.0",
+      "when": {
+            "keys": ["authorization"],
+            "committers": "uncommitted"
+        }
+      }
+    }
+
+ +--- + +## Exported Functions + +- `request/3` + +--- + +### request + +A device offering an on-request hook that signs incoming messages with +Process an incoming request through a key provider. The key provider + +```erlang +request(Base, HookReq, Opts) -> + ?event({auth_hook_request, {base, Base}, {hook_req, HookReq}}), + maybe + % Get the key provider from options and short-circuit if none is + % provided. +``` + +### is_relevant + +Check if the request is relevant to the hook base. Node operators may + +```erlang +is_relevant(Base, Request, MessageSequence, Opts) -> + Committers = is_relevant_from_committers(Base, Request, Opts), + Keys = + lists:any( + fun(Msg) -> is_relevant_from_keys(Base, Msg, Opts) end, + [Request | MessageSequence] + ), + ?event({auth_hook_is_relevant, {committers, Committers}, {keys, Keys}}), + if Committers andalso Keys -> true; + true -> {skip, {committers, Committers}, {keys, Keys}} + end. +``` + +### is_relevant_from_committers + +Check if the request is relevant to the hook base based on the committers + +```erlang +is_relevant_from_committers(Base, Request, Opts) -> + Config = + hb_util:deep_get( + [<<"when">>, <<"committers">>], + Base, + <<"uncommitted">>, + Opts + ), + ?event({auth_hook_is_relevant_from_committers, {config, Config}, {base, Base}}), + case Config of + <<"always">> -> true; + <<"uncommitted">> -> hb_message:signers(Request, Opts) == []; + RelevantCommitters -> + lists:any( + fun(Signer) -> + lists:member(Signer, RelevantCommitters) + end, + hb_message:signers(Request, Opts) + ) + end. +``` + +### is_relevant_from_keys + +Check if the request is relevant to the hook base based on the presence + +```erlang +is_relevant_from_keys(_Base, ID, _Opts) when is_binary(ID) -> + false; +``` + +### is_relevant_from_keys + +Check if the request is relevant to the hook base based on the presence + +```erlang +is_relevant_from_keys(Base, {as, _, Msg}, Opts) -> + is_relevant_from_keys(Base, Msg, Opts); +``` + +### is_relevant_from_keys + +Check if the request is relevant to the hook base based on the presence + +```erlang +is_relevant_from_keys(Base, {resolve, Msg}, Opts) -> + is_relevant_from_keys(Base, Msg, Opts); +``` + +### is_relevant_from_keys + +Check if the request is relevant to the hook base based on the presence + +```erlang +is_relevant_from_keys(Base, Request, Opts) -> + Config = hb_util:deep_get([<<"when">>, <<"keys">>], Base, <<"always">>, Opts), + ?event( + { + auth_hook_is_relevant_from_keys, + {config, Config}, + {base, Base}, + {request, Request} + } + ), + case Config of + <<"always">> -> true; + RelevantKeys -> + lists:any( + fun(Key) -> + case hb_maps:find(Key, Request, Opts) of + {ok, _} -> true; + error -> false + end + end, + RelevantKeys + ) + end. +``` + +### generate_secret + +Normalize authentication credentials, generating new ones if needed. + +```erlang +generate_secret(Provider, Request, Opts) -> + case call_provider(<<"generate">>, Provider, Request, Opts) of + {error, not_found} -> + ?event({no_generate_handler, Provider}), + {ok, Provider, strip_sensitive(Request, Opts)}; + {error, Err} -> + % Forward the error. The main handler will fail to match this and + % return the error to the user. +``` + +### strip_sensitive + +Strip the `secret` field from a request. +Generate a wallet with the key if the `wallet` field is not present in + +```erlang +strip_sensitive(Request, Opts) -> + hb_maps:without([<<"secret">>], Request, Opts). +``` + +### generate_wallet + +Strip the `secret` field from a request. +Generate a wallet with the key if the `wallet` field is not present in + +```erlang +generate_wallet(Provider, Request, Opts) -> + {ok, #{ <<"body">> := WalletID }} = + dev_secret:generate(Provider, Request, Opts), + ?event({generated_wallet, WalletID}), + {ok, Provider, refresh_opts(Opts)}. +``` + +### sign_request + +Sign a request using the configured key provider + +```erlang +sign_request(Provider, Msg, Opts) -> + case hb_maps:get(<<"skip-commit">>, Provider, true, Opts) of + false -> + % Skip signing and return the normalized message. +``` + +### maybe_sign_messages + +Process a sequence of messages, signing those marked for signing + +```erlang +maybe_sign_messages(Provider, SignedReq, Opts) -> + Parsed = hb_singleton:from(SignedReq, Opts), + ?event({auth_hook_parsed_messages, {sequence_length, length(Parsed)}}), + SignKey = hb_opts:get(auth_hook_commit_key, ?DEFAULT_COMMIT_KEY, Opts), + Processed = maybe_sign_messages(Provider, SignKey, Parsed, Opts), + {ok, Processed}. +``` + +### maybe_sign_messages + +```erlang +maybe_sign_messages(_Provider, _Key, [], _Opts) -> []; +``` + +### maybe_sign_messages + +```erlang +maybe_sign_messages(Provider, Key, [Msg | Rest], Opts) when is_map(Msg) -> + case hb_util:atom(hb_maps:get(Key, Msg, false, Opts)) of + true -> + Uncommitted = hb_message:uncommitted(Msg, Opts), + ?event({auth_hook_signing_message, {uncommitted, Msg}}), + case sign_request(Provider, Uncommitted, Opts) of + {ok, Signed} -> + [ + Signed + | + maybe_sign_messages(Provider, Key, Rest, Opts) + ]; + {error, Err} -> + ?event({auth_hook_sign_error, Err}), + [{error, Err}] + end; + _ -> + [Msg | maybe_sign_messages(Provider, Key, Rest, Opts)] + end; +``` + +### maybe_sign_messages + +```erlang +maybe_sign_messages(Provider, Key, [Msg | Rest], Opts) -> + [Msg | maybe_sign_messages(Provider, Key, Rest, Opts)]. +``` + +### finalize + +Finalize the response by adding authentication state + +```erlang +finalize(KeyProvider, SignedReq, MessageSequence, Opts) -> + % Add the signed request and message sequence to the response, mirroring the + % structure of a normal `~hook@1.0' on-request hook. +``` + +### refresh_opts + +Refresh the options and log an event if they have changed. + +```erlang +refresh_opts(Opts) -> + NewOpts = hb_http_server:get_opts(Opts), + case NewOpts of + Opts -> ?event(auth_hook_no_opts_change); + _ -> + ?event( + {auth_hook_opts_changed, + {size_diff, + erlang:external_size(NewOpts) - + erlang:external_size(Opts) + } + } + ) + end, + NewOpts. +``` + +### find_provider + +Get the key provider from the base message or the defaults. + +```erlang +find_provider(Base, Opts) -> + case hb_maps:get(<<"secret-provider">>, Base, no_key_provider, Opts) of + no_key_provider -> + case hb_opts:get(hook_secret_provider, no_key_provider, Opts) of + no_key_provider -> {error, no_key_provider}; + SecretProvider -> SecretProvider + end; + SecretProvider when is_binary(SecretProvider) -> + {ok, #{ <<"device">> => SecretProvider }}; + SecretProvider when is_map(SecretProvider) -> + {ok, SecretProvider}; + _ -> + {error, invalid_auth_provider} + end. +``` + +### call_provider + +Find the appropriate handler for a key in the key provider. + +```erlang +call_provider(Key, Provider, Request, Opts) -> + ?event({call_provider, {key, Key}, {provider, Provider}, {req, Request}}), + ExecKey = hb_maps:get(<< Key/binary, "-path">>, Provider, Key, Opts), + ?event({call_provider, {exec_key, ExecKey}}), + case hb_ao:resolve(Provider, Request#{ <<"path">> => ExecKey }, Opts) of + {ok, Msg} when is_map(Msg) -> + % The result is a message. We revert the path to its original value. +``` + +### ignored_keys + +Default keys to ignore when signing + +```erlang +ignored_keys(Msg, Opts) -> + hb_maps:get( + <<"ignored-keys">>, + Msg, + hb_opts:get( + hook_auth_ignored_keys, + ?DEFAULT_IGNORED_KEYS, + Opts + ) + ). +``` + +### cookie_test + +```erlang +cookie_test() -> + % Start a node with a secret-provider that uses the cookie device. +``` + +### http_auth_test + +```erlang +http_auth_test() -> + % Start a node with the `~http-auth@1.0' device as the secret-provider. +``` + +### chained_preprocess_test + +```erlang +chained_preprocess_test() -> + % Start a node with the `~http-auth@1.0' device as the secret-provider, with + % a router chained afterwards in the request hook. +``` + +### when_test + +```erlang +when_test() -> + % Start a node with the `~http-auth@1.0' device as the secret-provider. Only + % request commitment with the hook if the `Authorization' header is present. +``` + +### signers_from_commitments_response + +The cookie hook test(s) call `GET /commitments`, which returns the + +```erlang +signers_from_commitments_response(Response, ServerWallet) -> + ServerAddress = ar_wallet:to_address(ServerWallet), + hb_maps:values(hb_maps:filtermap( + fun(Key, Value) when ?IS_ID(Key) -> + Type = hb_maps:get(<<"type">>, Value, not_found, #{}), + Committer = hb_maps:get(<<"committer">>, Value, not_found, #{}), + case {Type, Committer} of + {<<"rsa-pss-sha512">>, ServerAddress} -> false; + {<<"rsa-pss-sha512">>, _} -> {true, Committer}; + _ -> false + end; + (_Key, _Value) -> + false + end, + Response, + #{} +``` + +--- + +*Generated from [dev_auth_hook.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_auth_hook.erl)* diff --git a/docs/book/src/dev_cache.erl.md b/docs/book/src/dev_cache.erl.md new file mode 100644 index 000000000..a3eae0380 --- /dev/null +++ b/docs/book/src/dev_cache.erl.md @@ -0,0 +1,310 @@ +# dev_cache + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cache.erl) + +A device that looks up an ID from a local store and returns it, +honoring the `accept` key to return the correct format. The cache also +supports writing messages to the store, if the node message has the +writer's address in its `cache_writers` key. + +--- + +## Exported Functions + +- `link/3` +- `read/3` +- `write/3` + +--- + +### read + +A device that looks up an ID from a local store and returns it, +Read data from the cache. + +```erlang +read(_M1, M2, Opts) -> + Location = hb_ao:get(<<"target">>, M2, Opts), + ?event({read, {key_extracted, Location}}), + ?event(debug_gateway, cache_read), + case hb_cache:read(Location, Opts) of + {ok, Res} -> + ?event({read, {cache_result, ok, Res}}), + case hb_ao:get(<<"accept">>, M2, Opts) of + <<"application/aos-2">> -> + ?event(dev_cache, + {read, + {accept_header, <<"application/aos-2">>} + } + ), + JSONMsg = dev_json_iface:message_to_json_struct(Res, Opts), + ?event(dev_cache, {read, {json_message, JSONMsg}}), + {ok, + #{ + <<"body">> => hb_json:encode(JSONMsg), + <<"content-type">> => <<"application/aos-2">> + } + }; + _ -> + {ok, Res} + end; + not_found -> + % The cache does not have this ID,but it may still be an explicit + % `data/' path. +``` + +### write + +Write data to the cache. + +```erlang +write(_M1, M2, Opts) -> + case is_trusted_writer(M2, Opts) of + true -> + ?event(dev_cache, {write, {trusted_writer, true}}), + Type = hb_ao:get(<<"type">>, M2, <<"single">>, Opts), + ?event(dev_cache, {write, {write_type, Type}}), + case Type of + <<"single">> -> + ?event(dev_cache, {write, {write_single_called}}), + write_single(M2, Opts); + <<"batch">> -> + ?event(dev_cache, {write, {write_batch_called}}), + hb_maps:map( + fun(_, Value) -> + ?event(dev_cache, {write, {batch_item, Value}}), + write_single(Value, Opts) + end, + hb_ao:get(<<"body">>, M2, Opts), + Opts + ); + _ -> + ?event(dev_cache, {write, {invalid_write_type, Type}}), + {error, + #{ + <<"status">> => 400, + <<"body">> => <<"Invalid write type.">> + } + } + end; + false -> + ?event(dev_cache, {write, {trusted_writer, false}}), + {error, + #{ + <<"status">> => 403, + <<"body">> => <<"Not authorized to write to the cache.">> + } + } + end. +``` + +### link + +Link a source to a destination in the cache. + +```erlang +link(_Base, Req, Opts) -> + case is_trusted_writer(Req, Opts) of + true -> + Source = hb_ao:get(<<"source">>, Req, Opts), + Destination = hb_ao:get(<<"destination">>, Req, Opts), + write_single(#{ + <<"operation">> => <<"link">>, + <<"source">> => Source, + <<"destination">> => Destination + }, Opts); + false -> + {error, not_authorized} + end. +``` + +### write_single + +Helper function to write a single data item to the cache. + +```erlang +write_single(Msg, Opts) -> + Body = hb_ao:get(<<"body">>, Msg, Opts), + ?event(dev_cache, {write_single, {body_extracted, Body}}), + Location = hb_ao:get(<<"location">>, Msg, Opts), + ?event(dev_cache, {write_single, {location_extracted, Location}}), + Operation = hb_ao:get(<<"operation">>, Msg, <<"write">>, Opts), + ?event(dev_cache, {write_single, {operation, Operation}}), + case {Operation, Body, Location} of + {<<"write">>, not_found, _} -> + ?event(dev_cache, {write_single, {error, "No body to write"}}), + {error, + #{ + <<"status">> => 400, + <<"body">> => <<"No body to write.">> + } + }; + {<<"write">>, Binary, not_found} when is_binary(Binary) -> + % When asked to write only a binary, we do not calculate any + % alternative IDs. +``` + +### is_trusted_writer + +Verify that the request originates from a trusted writer. + +```erlang +is_trusted_writer(Req, Opts) -> + Signers = hb_message:signers(Req, Opts), + ?event(dev_cache, {is_trusted_writer, {signers, Signers}, {req, Req}}), + CacheWriters = hb_opts:get(cache_writers, [], Opts), + ?event(dev_cache, {is_trusted_writer, {cache_writers, CacheWriters}}), + AnyTrusted = lists:any(fun(Signer) -> lists:member(Signer, CacheWriters) end, Signers), + case AnyTrusted of + true -> + ?event(dev_cache, {is_trusted_writer, {trusted, true}}), + true; + _ -> + ?event(dev_cache, {is_trusted_writer, {trusted, false}}), + false + end. +``` + +### setup_test_env + +Create a test environment with a local store and node. + +```erlang +setup_test_env() -> + Timestamp = integer_to_binary(os:system_time(millisecond)), + StorePrefix = <<"cache-TEST/remote-", Timestamp/binary>>, + ?event(dev_cache, {setup_test_env, {start, StorePrefix}}), + application:ensure_all_started(hb), + ?event(dev_cache, {setup_test_env, {hb_started}}), + LocalStore = + #{ <<"store-module">> => hb_store_fs, <<"name">> => StorePrefix }, + ?event(dev_cache, {setup_test_env, {local_store_configured, LocalStore}}), + hb_store:reset(LocalStore), + ?event(dev_cache, {setup_test_env, {store_reset}}), + Wallet = ar_wallet:new(), + Address = hb_util:human_id(ar_wallet:to_address(Wallet)), + ?event(dev_cache, {setup_test_env, {address, Address}}), + Node = hb_http_server:start_node(#{ + cache_control => [<<"no-cache">>, <<"no-store">>], + store => LocalStore, + cache_writers => [ + Address, + hb_util:human_id(ar_wallet:to_address(hb:wallet())) + ], + store_all_signed => false + }), + ?event(dev_cache, {setup_test_env, {node_started, Node}}), + TestOpts = #{ + cache_control => [<<"no-cache">>, <<"no-store">>], + store_all_signed => false, + store => [ + #{ + <<"store-module">> => hb_store_remote_node, + <<"node">> => Node, + priv_wallet => Wallet + } + ] + }, + {ok, TestOpts, [LocalStore, Wallet, Address, Node]}. +``` + +### write_to_cache + +Write data to the cache via HTTP. + +```erlang +write_to_cache(Node, Data, Wallet) -> + ?event(dev_cache, {write_to_cache, {start, Node}}), + WriteMsg = #{ + <<"path">> => <<"/~cache@1.0/write">>, + <<"method">> => <<"POST">>, + <<"body">> => Data + }, + ?event(dev_cache, {write_to_cache, {message_created, WriteMsg}}), + SignedMsg = hb_message:commit(WriteMsg, Wallet), + ?event(dev_cache, {write_to_cache, {message_signed}}), + WriteResult = hb_http:post(Node, SignedMsg, #{}), + ?event(dev_cache, {write_to_cache, {http_post, WriteResult}}), + {ok, WriteResponse} = WriteResult, + ?event(dev_cache, {write_to_cache, {response_received, WriteResponse}}), + Status = hb_ao:get(<<"status">>, WriteResponse, 0, #{}), + ?assertEqual(200, Status), + Path = hb_ao:get(<<"path">>, WriteResponse, not_found, #{}), + ?assertNotEqual(not_found, Path), + ?event(dev_cache, {write_to_cache, {write_success, Path}}), + {WriteResponse, Path}. +``` + +### read_from_cache + +Read data from the cache via HTTP. + +```erlang +read_from_cache(Node, Path) -> + ?event(dev_cache, {read_from_cache, {start, Node, Path}}), + ReadMsg = #{ + <<"path">> => <<"/~cache@1.0/read">>, + <<"method">> => <<"GET">>, + <<"target">> => Path + }, + ?event(dev_cache, {read_from_cache, {request_created, ReadMsg}}), + ?event({test_read, request, ReadMsg}), + ReadResult = hb_http:get(Node, ReadMsg, #{}), + ?event(dev_cache, {read_from_cache, {http_get, ReadResult}}), + case ReadResult of + ReadResponse when is_binary(ReadResponse) -> + ?event(dev_cache, + {read_from_cache, + {response_binary, ReadResponse} + } + ), + ReadResponse; + {ok, ReadResponse} -> + ?event(dev_cache, {read_from_cache, {response_ok, ReadResponse}}), + ReadResponse; + {error, Reason} -> + ?event(dev_cache, {read_from_cache, {response_error, Reason}}), + {error, Reason} + end. +``` + +### cache_write_message_test + +Test that the cache can be written to and read from using the hb_cache + +```erlang +cache_write_message_test() -> + ?event(dev_cache, {cache_api_test, {start}}), + {ok, Opts, _} = setup_test_env(), + TestData = #{ + <<"test_key">> => <<"test_value">> + }, + ?event(dev_cache, {cache_api_test, {opts, Opts}}), + {ok, Path} = hb_cache:write(TestData, Opts), + ?event(dev_cache, {cache_api_test, {data_written, Path}}), + {ok, ReadData} = hb_cache:read(Path, Opts), + ?event(dev_cache, {cache_api_test, {data_read, ReadData}}), + ?assert(hb_message:match(TestData, ReadData, only_present, Opts)), + ?event(dev_cache, {cache_api_test}), + ok. +``` + +### cache_write_binary_test + +Ensure that we can write direct binaries to the cache. + +```erlang +cache_write_binary_test() -> + ?event(dev_cache, {cache_api_test, {start}}), + {ok, Opts, _} = setup_test_env(), + TestData = <<"test_binary">>, + {ok, Path} = hb_cache:write(TestData, Opts), + {ok, ReadData} = hb_cache:read(Path, Opts), + ?event(dev_cache, {cache_api_test, {data_read, ReadData}}), + ?assertEqual(TestData, ReadData), + ?event(dev_cache, {cache_api_test}), +``` + +--- + +*Generated from [dev_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cache.erl)* diff --git a/docs/book/src/dev_cacheviz.erl.md b/docs/book/src/dev_cacheviz.erl.md new file mode 100644 index 000000000..331f2a566 --- /dev/null +++ b/docs/book/src/dev_cacheviz.erl.md @@ -0,0 +1,115 @@ +# dev_cacheviz + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cacheviz.erl) + +A device that generates renders (or renderable dot output) of a node's +cache. + +--- + +## Exported Functions + +- `dot/3` +- `index/3` +- `js/3` +- `json/3` +- `svg/3` + +--- + +### dot + +A device that generates renders (or renderable dot output) of a node's +Output the dot representation of the cache, or a specific path within +Output the SVG representation of the cache, or a specific path within + +```erlang +dot(_, Req, Opts) -> + Target = hb_ao:get(<<"target">>, Req, all, Opts), + Dot = + hb_cache_render:cache_path_to_dot( + Target, + #{ + render_data => + hb_util:atom( + hb_ao:get(<<"render-data">>, Req, false, Opts) + ) + }, + Opts + ), + {ok, #{ <<"content-type">> => <<"text/vnd.graphviz">>, <<"body">> => Dot }}. +``` + +### svg + +A device that generates renders (or renderable dot output) of a node's +Output the dot representation of the cache, or a specific path within +Output the SVG representation of the cache, or a specific path within +Return a JSON representation of the cache graph, suitable for use with + +```erlang +svg(Base, Req, Opts) -> + {ok, #{ <<"body">> := Dot }} = dot(Base, Req, Opts), + ?event(cacheviz, {dot, Dot}), + Svg = hb_cache_render:dot_to_svg(Dot), + {ok, #{ <<"content-type">> => <<"image/svg+xml">>, <<"body">> => Svg }}. +``` + +### json + +A device that generates renders (or renderable dot output) of a node's +Output the dot representation of the cache, or a specific path within +Output the SVG representation of the cache, or a specific path within +Return a JSON representation of the cache graph, suitable for use with + +```erlang +json(Base, Req, Opts) -> + ?event({json, {base, Base}, {req, Req}}), + Target = + case hb_ao:get(<<"target">>, Req, Opts) of + not_found -> + case map_size(maps:without([<<"device">>], hb_private:reset(Base))) of + 0 -> + all; + _ -> + ?event({writing_base_for_rendering, Base}), + {ok, Path} = hb_cache:write(Base, Opts), + ?event({wrote_message, Path}), + ID = hb_message:id(Base, all, Opts), + ?event({generated_id, ID}), + ID + end; + <<".">> -> all; + ReqTarget -> ReqTarget + end, + MaxSize = hb_util:int(hb_ao:get(<<"max-size">>, Req, 250, Opts)), + ?event({max_size, MaxSize}), + ?event({generating_json_for, {target, Target}}), + Res = hb_cache_render:get_graph_data(Target, MaxSize, Opts), + ?event({graph_data, Res}), + Res. +``` + +### index + +Return a renderer in HTML form for the JSON format. +Return a JS library that can be used to render the JSON format. + +```erlang +index(Base, _, _Opts) -> + ?event({cacheviz_index, {base, Base}}), + dev_hyperbuddy:return_file(<<"cacheviz@1.0">>, <<"graph.html">>). +``` + +### js + +Return a renderer in HTML form for the JSON format. +Return a JS library that can be used to render the JSON format. + +```erlang +js(_, _, _Opts) -> +``` + +--- + +*Generated from [dev_cacheviz.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cacheviz.erl)* diff --git a/docs/book/src/dev_codec_ans104.erl.md b/docs/book/src/dev_codec_ans104.erl.md new file mode 100644 index 000000000..2eda359ed --- /dev/null +++ b/docs/book/src/dev_codec_ans104.erl.md @@ -0,0 +1,511 @@ +# dev_codec_ans104 + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104.erl) + +Codec for managing transformations from `ar_bundles`-style Arweave TX +records to and from TABMs. + +--- + +## Exported Functions + +- `commit/3` +- `content_type/1` +- `deserialize/3` +- `from/3` +- `serialize/3` +- `to/3` +- `verify/3` + +--- + +### content_type + +Codec for managing transformations from `ar_bundles`-style Arweave TX +Return the content type for the codec. +Serialize a message or TX to a binary. + +```erlang +content_type(_) -> {ok, <<"application/ans104">>}. +``` + +### serialize + +Codec for managing transformations from `ar_bundles`-style Arweave TX +Return the content type for the codec. +Serialize a message or TX to a binary. + +```erlang +serialize(Msg, Req, Opts) when is_map(Msg) -> + serialize(to(Msg, Req, Opts), Req, Opts); +``` + +### serialize + +Codec for managing transformations from `ar_bundles`-style Arweave TX +Return the content type for the codec. +Serialize a message or TX to a binary. + +```erlang +serialize(TX, _Req, _Opts) when is_record(TX, tx) -> + {ok, ar_bundles:serialize(TX)}. +``` + +### deserialize + +Deserialize a binary ans104 message to a TABM. + +```erlang +deserialize(#{ <<"body">> := Binary }, Req, Opts) -> + deserialize(Binary, Req, Opts); +``` + +### deserialize + +Deserialize a binary ans104 message to a TABM. + +```erlang +deserialize(Binary, Req, Opts) when is_binary(Binary) -> + deserialize(ar_bundles:deserialize(Binary), Req, Opts); +``` + +### deserialize + +Deserialize a binary ans104 message to a TABM. + +```erlang +deserialize(TX, Req, Opts) when is_record(TX, tx) -> + from(TX, Req, Opts). +``` + +### commit + +Sign a message using the `priv_wallet` key in the options. Supports both + +```erlang +commit(Msg, Req = #{ <<"type">> := <<"unsigned">> }, Opts) -> + commit(Msg, Req#{ <<"type">> => <<"unsigned-sha256">> }, Opts); +``` + +### commit + +Sign a message using the `priv_wallet` key in the options. Supports both + +```erlang +commit(Msg, Req = #{ <<"type">> := <<"signed">> }, Opts) -> + commit(Msg, Req#{ <<"type">> => <<"rsa-pss-sha256">> }, Opts); +``` + +### commit + +Sign a message using the `priv_wallet` key in the options. Supports both + +```erlang +commit(Msg, Req = #{ <<"type">> := <<"rsa-pss-sha256">> }, Opts) -> + % Convert the given message to an ANS-104 TX record, sign it, and convert + % it back to a structured message. +``` + +### commit + +```erlang +commit(Msg, #{ <<"type">> := <<"unsigned-sha256">> }, Opts) -> + % Remove the commitments from the message, convert it to ANS-104, then back. +``` + +### verify + +Verify an ANS-104 commitment. + +```erlang +verify(Msg, Req, Opts) -> + ?event({verify, {base, Msg}, {req, Req}}), + OnlyWithCommitment = + hb_private:reset( + hb_message:with_commitments( + Req, + Msg, + Opts + ) + ), + ?event({verify, {only_with_commitment, OnlyWithCommitment}}), + {ok, TX} = to(OnlyWithCommitment, Req, Opts), + ?event({verify, {encoded, TX}}), + Res = ar_bundles:verify_item(TX), + {ok, Res}. +``` + +### from + +Convert a #tx record into a message map recursively. + +```erlang +from(Binary, _Req, _Opts) when is_binary(Binary) -> {ok, Binary}; +``` + +### from + +Convert a #tx record into a message map recursively. + +```erlang +from(TX, Req, Opts) when is_record(TX, tx) -> + case lists:keyfind(<<"ao-type">>, 1, TX#tx.tags) of + false -> + do_from(TX, Req, Opts); + {<<"ao-type">>, <<"binary">>} -> + {ok, TX#tx.data} + end. +``` + +### do_from + +```erlang +do_from(RawTX, Req, Opts) -> + % Ensure the TX is fully deserialized. +``` + +### to + +Internal helper to translate a message to its #tx record representation, + +```erlang +to(Binary, _Req, _Opts) when is_binary(Binary) -> + % ar_bundles cannot serialize just a simple binary or get an ID for it, so + % we turn it into a TX record with a special tag, tx_to_message will + % identify this tag and extract just the binary. +``` + +### to + +```erlang +to(TX, _Req, _Opts) when is_record(TX, tx) -> {ok, TX}; +``` + +### to + +```erlang +to(RawTABM, Req, Opts) when is_map(RawTABM) -> + % Ensure that the TABM is fully loaded if the `bundle` key is set to true. +``` + +### to + +```erlang +to(Other, _Req, _Opts) -> + throw({invalid_tx, Other}). +``` + +### normal_tags_test + +```erlang +normal_tags_test() -> + Msg = #{ + <<"first-tag">> => <<"first-value">>, + <<"second-tag">> => <<"second-value">> + }, + {ok, Encoded} = to(Msg, #{}, #{}), + ?event({encoded, Encoded}), + {ok, Decoded} = from(Encoded, #{}, #{}), + ?event({decoded, Decoded}), + ?assert(hb_message:match(Msg, Decoded)). +``` + +### from_maintains_tag_name_case_test + +```erlang +from_maintains_tag_name_case_test() -> + TX = #tx { + tags = [ + {<<"Test-Tag">>, <<"test-value">>} + ] + }, + SignedTX = ar_bundles:sign_item(TX, hb:wallet()), + ?event({signed_tx, SignedTX}), + ?assert(ar_bundles:verify_item(SignedTX)), + TABM = hb_util:ok(from(SignedTX, #{}, #{})), + ?event({tabm, TABM}), + ConvertedTX = hb_util:ok(to(TABM, #{}, #{})), + ?event({converted_tx, ConvertedTX}), + ?assert(ar_bundles:verify_item(ConvertedTX)), + ?assertEqual(ConvertedTX, ar_bundles:normalize(SignedTX)). +``` + +### restore_tag_name_case_from_cache_test + +```erlang +restore_tag_name_case_from_cache_test() -> + Opts = #{ store => hb_test_utils:test_store() }, + TX = #tx { + tags = [ + {<<"Test-Tag">>, <<"test-value">>}, + {<<"test-tag-2">>, <<"test-value-2">>} + ] + }, + SignedTX = ar_bundles:sign_item(TX, ar_wallet:new()), + SignedMsg = + hb_message:convert( + SignedTX, + <<"structured@1.0">>, + <<"ans104@1.0">>, + Opts + ), + SignedID = hb_message:id(SignedMsg, all), + ?event({signed_msg, SignedMsg}), + OnlyCommitted = hb_message:with_only_committed(SignedMsg, Opts), + ?event({only_committed, OnlyCommitted}), + {ok, ID} = hb_cache:write(SignedMsg, Opts), + ?event({id, ID}), + {ok, ReadMsg} = hb_cache:read(SignedID, Opts), + ?event({restored_msg, ReadMsg}), + {ok, ReadTX} = to(ReadMsg, #{}, Opts), + ?event({restored_tx, ReadTX}), + ?assert(hb_message:match(ReadMsg, SignedMsg)), + ?assert(ar_bundles:verify_item(ReadTX)). +``` + +### unsigned_duplicated_tag_name_test + +```erlang +unsigned_duplicated_tag_name_test() -> + TX = ar_bundles:reset_ids(ar_bundles:normalize(#tx { + tags = [ + {<<"Test-Tag">>, <<"test-value">>}, + {<<"test-tag">>, <<"test-value-2">>} + ] + })), + Msg = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), + ?event({msg, Msg}), + TX2 = hb_message:convert(Msg, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), + ?event({tx2, TX2}), + ?assertEqual(TX, TX2). +``` + +### signed_duplicated_tag_name_test + +```erlang +signed_duplicated_tag_name_test() -> + TX = ar_bundles:sign_item(#tx { + tags = [ + {<<"Test-Tag">>, <<"test-value">>}, + {<<"test-tag">>, <<"test-value-2">>} + ] + }, ar_wallet:new()), + Msg = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), + ?event({msg, Msg}), + TX2 = hb_message:convert(Msg, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), + ?event({tx2, TX2}), + ?assertEqual(TX, TX2), + ?assert(ar_bundles:verify_item(TX2)). +``` + +### simple_to_conversion_test + +```erlang +simple_to_conversion_test() -> + Msg = #{ + <<"first-tag">> => <<"first-value">>, + <<"second-tag">> => <<"second-value">> + }, + {ok, Encoded} = to(Msg, #{}, #{}), + ?event({encoded, Encoded}), + {ok, Decoded} = from(Encoded, #{}, #{}), + ?event({decoded, Decoded}), + ?assert(hb_message:match(Msg, hb_message:uncommitted(Decoded, #{}))). +``` + +### external_item_with_target_field_test + +Ensure that items with an explicitly defined target field lead to: + +```erlang +external_item_with_target_field_test() -> + TX = + ar_bundles:sign_item( + #tx { + target = crypto:strong_rand_bytes(32), + tags = [ + {<<"test-tag">>, <<"test-value">>}, + {<<"test-tag-2">>, <<"test-value-2">>} + ], + data = <<"test-data">> + }, + ar_wallet:new() + ), + EncodedTarget = hb_util:encode(TX#tx.target), + ?event({tx, TX}), + Decoded = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), + ?event({decoded, Decoded}), + ?assertEqual(EncodedTarget, hb_maps:get(<<"target">>, Decoded, undefined, #{})), + {ok, OnlyCommitted} = hb_message:with_only_committed(Decoded, #{}), + ?event({only_committed, OnlyCommitted}), + ?assertEqual(EncodedTarget, hb_maps:get(<<"target">>, OnlyCommitted, undefined, #{})), + Encoded = hb_message:convert(OnlyCommitted, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), + ?assertEqual(TX#tx.target, Encoded#tx.target), + ?event({result, {initial, TX}, {result, Encoded}}), + ?assertEqual(TX, Encoded). +``` + +### generate_item_with_target_tag_test + +Ensure that items made inside HyperBEAM use the tags to encode `target` + +```erlang +generate_item_with_target_tag_test() -> + Msg = + #{ + <<"target">> => Target = <<"NON-ID-TARGET">>, + <<"other-key">> => <<"other-value">> + }, + {ok, TX} = to(Msg, #{}, #{}), + ?event({encoded_tx, TX}), + % The encoded TX should have ignored the `target' field, setting a tag instead. +``` + +### generate_item_with_target_field_test + +```erlang +generate_item_with_target_field_test() -> + Msg = + hb_message:commit( + #{ + <<"target">> => Target = hb_util:encode(crypto:strong_rand_bytes(32)), + <<"other-key">> => <<"other-value">> + }, + #{ priv_wallet => hb:wallet() }, + <<"ans104@1.0">> + ), + {ok, TX} = to(Msg, #{}, #{}), + ?event({encoded_tx, TX}), + ?assertEqual(Target, hb_util:encode(TX#tx.target)), + Decoded = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), + ?event({decoded, Decoded}), + ?assertEqual(Target, hb_maps:get(<<"target">>, Decoded, undefined, #{})), + {ok, OnlyCommitted} = hb_message:with_only_committed(Decoded, #{}), + ?event({only_committed, OnlyCommitted}), + ?assertEqual(Target, hb_maps:get(<<"target">>, OnlyCommitted, undefined, #{})), + Encoded = hb_message:convert(OnlyCommitted, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), + ?event({result, {initial, TX}, {result, Encoded}}), + ?assertEqual(TX, Encoded). +``` + +### type_tag_test + +```erlang +type_tag_test() -> + TX = + ar_bundles:sign_item( + #tx { + tags = [{<<"type">>, <<"test-value">>}] + }, + ar_wallet:new() + ), + ?event({tx, TX}), + Structured = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), + ?event({structured, Structured}), + TX2 = hb_message:convert(Structured, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), + ?event({after_conversion, TX2}), + ?assertEqual(TX, TX2). +``` + +### ao_data_key_test + +```erlang +ao_data_key_test() -> + Msg = + hb_message:commit( + #{ + <<"other-key">> => <<"Normal value">>, + <<"body">> => <<"Body value">> + }, + #{ priv_wallet => hb:wallet() }, + <<"ans104@1.0">> + ), + ?event({msg, Msg}), + Enc = hb_message:convert(Msg, <<"ans104@1.0">>, #{}), + ?event({enc, Enc}), + ?assertEqual(<<"Body value">>, Enc#tx.data), + Dec = hb_message:convert(Enc, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), + ?event({dec, Dec}), + ?assert(hb_message:verify(Dec, all, #{})). +``` + +### simple_signed_to_httpsig_test + +```erlang +simple_signed_to_httpsig_test() -> + Structured = + hb_message:commit( + #{ <<"test-tag">> => <<"test-value">> }, + #{ priv_wallet => ar_wallet:new() }, + #{ + <<"commitment-device">> => <<"ans104@1.0">> + } + ), + ?event(debug_test, {msg, Structured}), + HTTPSig = + hb_message:convert( + Structured, + <<"httpsig@1.0">>, + <<"structured@1.0">>, + #{} + ), + ?event(debug_test, {httpsig, HTTPSig}), + Structured2 = + hb_message:convert( + HTTPSig, + <<"structured@1.0">>, + <<"httpsig@1.0">>, + #{} + ), + ?event(debug_test, {decoded, Structured2}), + Match = hb_message:match(Structured, Structured2, #{}), + ?assert(Match), + ?assert(hb_message:verify(Structured2, all, #{})), + HTTPSig2 = hb_message:convert(Structured2, <<"httpsig@1.0">>, <<"structured@1.0">>, #{}), + ?event(debug_test, {httpsig2, HTTPSig2}), + ?assert(hb_message:verify(HTTPSig2, all, #{})), + ?assert(hb_message:match(HTTPSig, HTTPSig2)). +``` + +### unsorted_tag_map_test + +```erlang +unsorted_tag_map_test() -> + TX = + ar_bundles:sign_item( + #tx{ + format = ans104, + tags = [ + {<<"z">>, <<"position-1">>}, + {<<"a">>, <<"position-2">>} + ], + data = <<"data">> + }, + ar_wallet:new() + ), + ?assert(ar_bundles:verify_item(TX)), + ?event(debug_test, {tx, TX}), + {ok, TABM} = dev_codec_ans104:from(TX, #{}, #{}), + ?event(debug_test, {tabm, TABM}), + {ok, Decoded} = dev_codec_ans104:to(TABM, #{}, #{}), + ?event(debug_test, {decoded, Decoded}), + ?assert(ar_bundles:verify_item(Decoded)). +``` + +### field_and_tag_ordering_test + +```erlang +field_and_tag_ordering_test() -> + UnsignedTABM = #{ + <<"a">> => <<"value1">>, + <<"z">> => <<"value2">>, + <<"target">> => <<"NON-ID-TARGET">> + }, + Wallet = hb:wallet(), + SignedTABM = hb_message:commit( + UnsignedTABM, #{priv_wallet => Wallet}, <<"ans104@1.0">>), +``` + +--- + +*Generated from [dev_codec_ans104.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104.erl)* diff --git a/docs/book/src/dev_codec_ans104_from.erl.md b/docs/book/src/dev_codec_ans104_from.erl.md new file mode 100644 index 000000000..d9b42e17e --- /dev/null +++ b/docs/book/src/dev_codec_ans104_from.erl.md @@ -0,0 +1,355 @@ +# dev_codec_ans104_from + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104_from.erl) + +Library functions for decoding ANS-104-style data items to TABM form. + +--- + +## Exported Functions + +- `base/5` +- `committed/5` +- `data/4` +- `fields/2` +- `tags/2` +- `with_commitments/5` + +--- + +### fields + +Library functions for decoding ANS-104-style data items to TABM form. +Return a TABM message containing the fields of the given decoded + +```erlang +fields(Item, _Opts) -> + case Item#tx.target of + ?DEFAULT_TARGET -> #{}; + Target -> + #{ + <<"target">> => hb_util:encode(Target) + } + end. +``` + +### tags + +Return a TABM of the raw tags of the item, including all metadata + +```erlang +tags(Item, Opts) -> + Tags = hb_ao:normalize_keys( + deduplicating_from_list(Item#tx.tags, Opts), + Opts + ), + ao_types(Tags, Opts). +``` + +### ao_types + +Ensure the encoded keys in the `ao-types` field are lowercased and + +```erlang +ao_types(#{ <<"ao-types">> := AoTypes } = Tags, Opts) -> + AOTypes = dev_codec_structured:decode_ao_types(AoTypes, Opts), + % Normalize all keys in the ao-types map and re-encode + NormAOTypes = + maps:fold( + fun(Key, Val, Acc) -> + NormKey = hb_util:to_lower(hb_ao:normalize_key(Key)), + Acc#{ NormKey => Val } + end, + #{}, + AOTypes + ), + EncodedAOTypes = dev_codec_structured:encode_ao_types(NormAOTypes, Opts), + Tags#{ <<"ao-types">> := EncodedAOTypes }; +``` + +### ao_types + +Ensure the encoded keys in the `ao-types` field are lowercased and + +```erlang +ao_types(Tags, _Opts) -> + Tags. +``` + +### data + +Return a TABM of the keys and values found in the data field of the item. + +```erlang +data(Item, Req, Tags, Opts) -> + % If the data field is empty, we return an empty map. If it is a map, we + % return it as such. Otherwise, we return a map with the data key set to + % the raw data value. This handles unbundling nested messages, as well as + % applying the `ao-data-key' tag if given. +``` + +### committed + +Calculate the list of committed keys for an item, based on its + +```erlang +committed(Item, Fields, Tags, Data, Opts) -> + hb_util:unique( + data_keys(Data, Opts) ++ + tag_keys(Item, Opts) ++ + field_keys(Fields, Tags, Data, Opts) + ). +``` + +### field_keys + +Return the list of the keys from the fields TABM. + +```erlang +field_keys(BaseFields, Tags, Data, Opts) -> + HasTarget = + hb_maps:is_key(<<"target">>, BaseFields, Opts) orelse + hb_maps:is_key(<<"target">>, Tags, Opts) orelse + hb_maps:is_key(<<"target">>, Data, Opts), + case HasTarget of + true -> [<<"target">>]; + false -> [] + end. +``` + +### data_keys + +Return the list of the keys from the data TABM. + +```erlang +data_keys(Data, Opts) -> + hb_util:to_sorted_keys(Data, Opts). +``` + +### tag_keys + +Return the list of the keys from the tags TABM. Filter all metadata + +```erlang +tag_keys(Item, _Opts) -> + MetaTags = [ + <<"bundle-format">>, + <<"bundle-version">>, + <<"bundle-map">>, + <<"ao-data-key">> + ], + lists:filtermap( + fun({Tag, _}) -> + case lists:member(Tag, MetaTags) of + true -> false; + false -> {true, hb_util:to_lower(hb_ao:normalize_key(Tag))} + end + end, + Item#tx.tags + ). +``` + +### base + +Return the complete message for an item, less its commitments. The + +```erlang +base(CommittedKeys, Fields, Tags, Data, Opts) -> + hb_maps:from_list( + lists:map( + fun(Key) -> + case hb_maps:find(Key, Data, Opts) of + error -> + case hb_maps:find(Key, Fields, Opts) of + error -> + case hb_maps:find(Key, Tags, Opts) of + error -> throw({missing_key, Key}); + {ok, Value} -> {Key, Value} + end; + {ok, Value} -> {Key, Value} + end; + {ok, Value} -> {Key, Value} + end + end, + CommittedKeys + ) + ). +``` + +### with_commitments + +Return a message with the appropriate commitments added to it. + +```erlang +with_commitments(Item, Tags, Base, CommittedKeys, Opts) -> + case Item#tx.signature of + ?DEFAULT_SIG -> + case normal_tags(Item#tx.tags) of + true -> Base; + false -> + with_unsigned_commitment(Item, Tags, Base, CommittedKeys, Opts) + end; + _ -> with_signed_commitment(Item, Tags, Base, CommittedKeys, Opts) + end. +``` + +### with_unsigned_commitment + +Returns a commitments message for an item, containing an unsigned + +```erlang +with_unsigned_commitment(Item, Tags, UncommittedMessage, CommittedKeys, Opts) -> + ID = hb_util:human_id(Item#tx.unsigned_id), + UncommittedMessage#{ + <<"commitments">> => #{ + ID => + filter_unset( + #{ + <<"commitment-device">> => <<"ans104@1.0">>, + <<"committed">> => CommittedKeys, + <<"type">> => <<"unsigned-sha256">>, + <<"bundle">> => bundle_commitment_key(Tags, Opts), + <<"original-tags">> => original_tags(Item, Opts), + <<"field-target">> => + case Item#tx.target of + ?DEFAULT_TARGET -> unset; + Target -> hb_util:encode(Target) + end, + <<"field-anchor">> => + case Item#tx.anchor of + ?DEFAULT_LAST_TX -> unset; + LastTX -> LastTX + end + }, + Opts + ) + } + }. +``` + +### with_signed_commitment + +Returns a commitments message for an item, containing a signed + +```erlang +with_signed_commitment(Item, Tags, UncommittedMessage, CommittedKeys, Opts) -> + Address = hb_util:human_id(ar_wallet:to_address(Item#tx.owner)), + ID = hb_util:human_id(Item#tx.id), + Commitment = + filter_unset( + #{ + <<"commitment-device">> => <<"ans104@1.0">>, + <<"committer">> => Address, + <<"committed">> => CommittedKeys, + <<"signature">> => hb_util:encode(Item#tx.signature), + <<"keyid">> => + <<"publickey:", (hb_util:encode(Item#tx.owner))/binary>>, + <<"type">> => <<"rsa-pss-sha256">>, + <<"bundle">> => bundle_commitment_key(Tags, Opts), + <<"original-tags">> => original_tags(Item, Opts), + <<"field-anchor">> => + case Item#tx.anchor of + ?DEFAULT_LAST_TX -> unset; + LastTX -> LastTX + end, + <<"field-target">> => + case Item#tx.target of + ?DEFAULT_TARGET -> unset; + Target -> hb_util:encode(Target) + end + }, + Opts + ), + UncommittedMessage#{ + <<"commitments">> => #{ + ID => Commitment + } + }. +``` + +### bundle_commitment_key + +Return the bundle key for an item. +Check whether a list of key-value pairs contains only normalized keys. + +```erlang +bundle_commitment_key(Tags, Opts) -> + hb_util:bin(hb_maps:is_key(<<"bundle-format">>, Tags, Opts)). +``` + +### normal_tags + +Return the bundle key for an item. +Check whether a list of key-value pairs contains only normalized keys. + +```erlang +normal_tags(Tags) -> + lists:all( + fun({Key, _}) -> + hb_util:to_lower(hb_ao:normalize_key(Key)) =:= Key + end, + Tags + ). +``` + +### original_tags + +Return the original tags of an item if it is applicable. Otherwise, + +```erlang +original_tags(Item, _Opts) -> + case normal_tags(Item#tx.tags) of + true -> unset; + false -> encoded_tags_to_map(Item#tx.tags) + end. +``` + +### encoded_tags_to_map + +Convert an ANS-104 encoded tag list into a HyperBEAM-compatible map. + +```erlang +encoded_tags_to_map(Tags) -> + hb_util:list_to_numbered_message( + lists:map( + fun({Key, Value}) -> + #{ + <<"name">> => Key, + <<"value">> => Value + } + end, + Tags + ) + ). +``` + +### filter_unset + +Remove all undefined values from a map. + +```erlang +filter_unset(Map, Opts) -> + hb_maps:filter( + fun(_, Value) -> + case Value of + unset -> false; + _ -> true + end + end, + Map, + Opts + ). +``` + +### deduplicating_from_list + +Deduplicate a list of key-value pairs by key, generating a list of + +```erlang +deduplicating_from_list(Tags, Opts) -> + % Aggregate any duplicated tags into an ordered list of values. +``` + +--- + +*Generated from [dev_codec_ans104_from.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104_from.erl)* diff --git a/docs/book/src/dev_codec_ans104_to.erl.md b/docs/book/src/dev_codec_ans104_to.erl.md new file mode 100644 index 000000000..6c045f251 --- /dev/null +++ b/docs/book/src/dev_codec_ans104_to.erl.md @@ -0,0 +1,240 @@ +# dev_codec_ans104_to + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104_to.erl) + +Library functions for encoding messages to the ANS-104 format. + +--- + +## Exported Functions + +- `data/3` +- `maybe_load/3` +- `siginfo/2` +- `tags/4` + +--- + +### maybe_load + +Library functions for encoding messages to the ANS-104 format. +Determine if the message should be loaded from the cache and re-converted + +```erlang +maybe_load(RawTABM, Req, Opts) -> + case hb_util:atom(hb_ao:get(<<"bundle">>, Req, false, Opts)) of + false -> RawTABM; + true -> + % Convert back to the fully loaded structured@1.0 message, then + % convert to TABM with bundling enabled. +``` + +### siginfo + +Calculate the fields for a message, returning an initial TX record. + +```erlang +siginfo(Message, Opts) -> + MaybeCommitment = + hb_message:commitment( + #{ <<"commitment-device">> => <<"ans104@1.0">> }, + Message, + Opts + ), + case MaybeCommitment of + {ok, _, Commitment} -> commitment_to_tx(Commitment, Opts); + not_found -> + case hb_maps:find(<<"target">>, Message, Opts) of + {ok, EncodedTarget} -> + case hb_util:safe_decode(EncodedTarget) of + {ok, Target} when ?IS_ID(Target) -> + #tx{ target = Target }; + _ -> #tx{} + end; + error -> #tx{} + end; + multiple_matches -> + throw({multiple_ans104_commitments_unsupported, Message}) + end. +``` + +### commitment_to_tx + +Convert a commitment to a base TX record. Extracts the owner, signature, + +```erlang +commitment_to_tx(Commitment, Opts) -> + Signature = + hb_util:decode( + maps:get(<<"signature">>, Commitment, hb_util:encode(?DEFAULT_SIG)) + ), + Owner = + case hb_maps:find(<<"keyid">>, Commitment, Opts) of + {ok, KeyID} -> + hb_util:decode( + dev_codec_httpsig_keyid:remove_scheme_prefix(KeyID) + ); + error -> ?DEFAULT_OWNER + end, + Tags = + case hb_maps:find(<<"original-tags">>, Commitment, Opts) of + {ok, OriginalTags} -> original_tags_to_tags(OriginalTags); + error -> [] + end, + LastTX = + case hb_maps:find(<<"field-anchor">>, Commitment, Opts) of + {ok, EncodedLastTX} -> hb_util:decode(EncodedLastTX); + error -> ?DEFAULT_LAST_TX + end, + Target = + case hb_maps:find(<<"field-target">>, Commitment, Opts) of + {ok, EncodedTarget} -> hb_util:decode(EncodedTarget); + error -> ?DEFAULT_TARGET + end, + ?event({commitment_owner, Owner}), + ?event({commitment_signature, Signature}), + ?event({commitment_tags, Tags}), + ?event({commitment_last_tx, LastTX}), + #tx{ + owner = Owner, + signature = Signature, + tags = Tags, + anchor = LastTX, + target = Target + }. +``` + +### data + +Calculate the data field for a message. + +```erlang +data(TABM, Req, Opts) -> + DataKey = inline_key(TABM), + % Translate the keys into a binary map. If a key has a value that is a map, + % we recursively turn its children into messages. +``` + +### data_messages + +Calculate the data value for a message. The rules are: + +```erlang +data_messages(TABM, Opts) when is_map(TABM) -> + UncommittedTABM = + hb_maps:without( + [<<"commitments">>, <<"data">>, <<"target">>], + hb_private:reset(TABM), + Opts + ), + % If there are too many keys in the TABM, throw an error. +``` + +### tags + +Calculate the tags field for a data item. If the TX already has tags + +```erlang +tags(#tx{ tags = ExistingTags }, _, _, _) when ExistingTags =/= [] -> + ExistingTags; +``` + +### tags + +Calculate the tags field for a data item. If the TX already has tags + +```erlang +tags(TX, TABM, Data, Opts) -> + DataKey = inline_key(TABM), + MaybeCommitment = + hb_message:commitment( + #{ <<"commitment-device">> => <<"ans104@1.0">> }, + TABM, + Opts + ), + CommittedTagKeys = + case MaybeCommitment of + {ok, _, Commitment} -> + % There is already a commitment, so the tags and order are + % pre-determined. However, if the message has been bundled, + % any `+link`-suffixed keys in the committed list may need to + % be resolved to their base keys (e.g., `output+link` -> `output`). +``` + +### include_target_tag + +Return whether to include the `target` tag in the tags list. + +```erlang +include_target_tag(TX, TABM, Opts) -> + case {TX#tx.target, hb_maps:get(<<"target">>, TABM, undefined, Opts)} of + {?DEFAULT_TARGET, _} -> true; + {FieldTarget, TagTarget} when FieldTarget =/= TagTarget -> false; + _ -> true + end. +``` + +### committed_tag_keys_to_tags + +Apply the `ao-data-key` to the committed keys to generate the list of + +```erlang +committed_tag_keys_to_tags(TX, TABM, DataKey, Committed, Opts) -> + DataKeysToExclude = + case TX#tx.data of + Data when is_map(Data)-> maps:keys(Data); + _ -> [] + end, + case DataKey of + <<"data">> -> []; + _ -> [{<<"ao-data-key">>, DataKey}] + end ++ + lists:map( + fun(Key) -> + case hb_maps:find(Key, TABM, Opts) of + error -> throw({missing_committed_key, Key}); + {ok, Value} -> {Key, Value} + end + end, + hb_util:list_without( + [DataKey | DataKeysToExclude], + Committed + ) + ). +``` + +### inline_key + +Determine if an `ao-data-key` should be added to the message. + +```erlang +inline_key(Msg) -> + InlineKey = maps:get(<<"ao-data-key">>, Msg, undefined), + case { + InlineKey, + maps:get(<<"data">>, Msg, ?DEFAULT_DATA) == ?DEFAULT_DATA, + maps:is_key(<<"body">>, Msg) + andalso not ?IS_LINK(maps:get(<<"body">>, Msg, undefined)) + } of + {Explicit, _, _} when Explicit =/= undefined -> + % ao-data-key already exists, so we honor it. +``` + +### original_tags_to_tags + +Convert a HyperBEAM-compatible map into an ANS-104 encoded tag list, + +```erlang +original_tags_to_tags(TagMap) -> + OrderedList = hb_util:message_to_ordered_list(hb_private:reset(TagMap)), + ?event({ordered_tagmap, {explicit, OrderedList}, {input, {explicit, TagMap}}}), + lists:map( + fun(#{ <<"name">> := Key, <<"value">> := Value }) -> + {Key, Value} + end, + OrderedList +``` + +--- + +*Generated from [dev_codec_ans104_to.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104_to.erl)* diff --git a/docs/book/src/dev_codec_cookie.erl.md b/docs/book/src/dev_codec_cookie.erl.md new file mode 100644 index 000000000..b377ac2c8 --- /dev/null +++ b/docs/book/src/dev_codec_cookie.erl.md @@ -0,0 +1,570 @@ +# dev_codec_cookie + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie.erl) + +A utility device that manages setting and encoding/decoding the cookies +found in requests from a caller. This device implements the `~cookie@1.0` +codec, inline with the `~message@1.0` schema for conversion. +Additionally, a `commit` to a message using a secret generated and stored +in the cookies of the caller, and a `verify` key that validates said +commitments. In addition, a `generate` key is provided to perform only the +generation side of the commitment process. The `finalize` key may be +employed to add a `set` operation to the end of a message sequence, which +is used in hooks that need to ensure a caller always receives cookies +generated outside of the normal AO-Core execution flow. In totality, these +keys implement the `generator` interface type, and may be employed in +various contexts. For example, `~auth-hook@1.0` may be configured to use +this device to generate and store secrets in the cookies of the caller, +which are then used with the `~proxy-wallet@1.0` device to sign requests. +The `commit` and `verify` keys utilize the `~httpsig@1.0`'s HMAC `secret` +commitment scheme, which uses a secret key to commit to a message, with the +`committer` being listed as a hash of the secret. +This device supports the following paths: +`/commit`: Sets a `secret` key in the cookies of the caller. The name of +the cookie is calculated as the hash of the secret. +`/verify`: Verifies the caller's request by checking the committer in the +request matches the secret in the cookies of the base message. +`/store`: Sets the keys in the request message in the cookies of the caller. +`/extract`: Extracts the cookies from a base message. +`/reset`: Removes all cookie keys from the base message. +`/to`: Converts a message containing cookie sources (`cookie`, `set-cookie`, +or `priv/cookie`) into the format specified in the request message (e.g. +`set-cookie`, `cookie`). +`/from`: Converts a message containing encoded cookies into a message +containing the cookies parsed and normalized. + +--- + +## Exported Functions + +- `commit/3` +- `extract/3` +- `finalize/3` +- `from/3` +- `generate/3` +- `get_cookie/3` +- `opts/1` +- `reset/2` +- `store/3` +- `to/3` +- `verify/3` + +--- + +### opts + +A utility device that manages setting and encoding/decoding the cookies +Get the private store options to use for functions in the cookie device. + +```erlang +opts(Opts) -> hb_private:opts(Opts). +%%% ~message@1.0 Commitments API keys. +``` + +### commit + +```erlang +commit(Base, Req, RawOpts) -> dev_codec_cookie_auth:commit(Base, Req, RawOpts). +``` + +### verify + +Preprocessor keys that utilize cookies and the `~secret@1.0` device to + +```erlang +verify(Base, Req, RawOpts) -> dev_codec_cookie_auth:verify(Base, Req, RawOpts). +``` + +### generate + +Preprocessor keys that utilize cookies and the `~secret@1.0` device to + +```erlang +generate(Base, Req, Opts) -> + dev_codec_cookie_auth:generate(Base, Req, Opts). +``` + +### finalize + +Finalize an `on-request` hook by adding the `set-cookie` header to the + +```erlang +finalize(Base, Request, Opts) -> + dev_codec_cookie_auth:finalize(Base, Request, Opts). +``` + +### get_cookie + +Get the cookie with the given key from the base message. The format of + +```erlang +get_cookie(Base, Req, RawOpts) -> + Opts = opts(RawOpts), + {ok, Cookies} = extract(Base, Req, Opts), + Key = hb_maps:get(<<"key">>, Req, undefined, Opts), + case hb_maps:get(Key, Cookies, undefined, Opts) of + undefined -> {error, not_found}; + Cookie -> + Format = hb_maps:get(<<"format">>, Req, <<"default">>, Opts), + case Format of + <<"default">> -> {ok, Cookie}; + <<"set-cookie">> -> {ok, normalize_cookie_value(Cookie)}; + <<"cookie">> -> {ok, value(Cookie)} + end + end. +``` + +### extract + +Return the parsed and normalized cookies from a message. + +```erlang +extract(Msg, Req, Opts) -> + {ok, MsgWithCookie} = from(Msg, Req, Opts), + Cookies = hb_private:get(<<"cookie">>, MsgWithCookie, #{}, Opts), + {ok, Cookies}. +``` + +### store + +Set the keys in the request message in the cookies of the caller. Removes + +```erlang +store(Base, Req, RawOpts) -> + Opts = opts(RawOpts), + ?event({store, {base, Base}, {req, Req}}), + {ok, ExistingCookies} = extract(Base, Req, Opts), + ?event({store, {existing_cookies, ExistingCookies}}), + {ok, ResetBase} = reset(Base, Opts), + ?event({store, {reset_base, ResetBase}}), + MsgToSet = + hb_maps:without( + [ + <<"path">>, + <<"accept-bundle">>, + <<"ao-peer">>, + <<"host">>, + <<"method">>, + <<"body">> + ], + hb_private:reset(Req), + Opts + ), + ?event({store, {msg_to_set, MsgToSet}}), + NewCookies = hb_maps:merge(ExistingCookies, MsgToSet, Opts), + NewBase = hb_private:set(ResetBase, <<"cookie">>, NewCookies, Opts), + {ok, NewBase}. +``` + +### reset + +Remove all cookie keys from the given message (including `cookie` and + +```erlang +reset(Base, RawOpts) -> + Opts = opts(RawOpts), + WithoutBaseCookieKeys = + hb_maps:without( + [<<"cookie">>, <<"set-cookie">>], + Base, + Opts + ), + WithoutPrivCookie = + hb_private:set( + WithoutBaseCookieKeys, + <<"cookie">>, + unset, + Opts + ), + {ok, WithoutPrivCookie}. +``` + +### to + +Convert a message containing cookie sources (`cookie`, `set-cookie`, + +```erlang +to(Msg, Req, Opts) -> + ?event({to, {msg, Msg}, {req, Req}}), + CookieOpts = opts(Opts), + LoadedMsg = hb_cache:ensure_all_loaded(Msg, CookieOpts), + ?event({to, {loaded_msg, LoadedMsg}}), + do_to(LoadedMsg, Req, CookieOpts). +``` + +### do_to + +```erlang +do_to(Msg, Req = #{ <<"format">> := <<"set-cookie">> }, Opts) when is_map(Msg) -> + ?event({to_set_cookie, {msg, Msg}, {req, Req}}), + {ok, ExtractedParsedCookies} = extract(Msg, Req, Opts), + {ok, ResetBase} = reset(Msg, Opts), + SetCookieLines = + maps:values( + maps:map( + fun to_set_cookie_line/2, + ExtractedParsedCookies + ) + ), + MsgWithSetCookie = + ResetBase#{ + <<"set-cookie">> => SetCookieLines + }, + {ok, MsgWithSetCookie}; +``` + +### do_to + +```erlang +do_to(Msg, Req = #{ <<"format">> := <<"cookie">> }, Opts) when is_map(Msg) -> + ?event({to_cookie, {msg, Msg}, {req, Req}}), + {ok, ExtractedParsedCookies} = extract(Msg, Req, Opts), + {ok, ResetBase} = reset(Msg, Opts), + CookieLines = + hb_maps:values( + hb_maps:map( + fun to_cookie_line/2, + ExtractedParsedCookies, + Opts + ), + Opts + ), + ?event({to_cookie, {cookie_lines, CookieLines}}), + CookieLine = join(CookieLines, <<"; ">>), + {ok, ResetBase#{ <<"cookie">> => CookieLine }}; +``` + +### do_to + +```erlang +do_to(Msg, _Req, _Opts) when is_map(Msg) -> + error({cookie_to_error, {no_format_specified, Msg}}); +``` + +### do_to + +```erlang +do_to(Msg, _Req, _Opts) -> + error({cookie_to_error, {unexpected_message_format, Msg}}). +``` + +### to_set_cookie_line + +Convert a single cookie into a `set-cookie` header line. The cookie + +```erlang +to_set_cookie_line(Key, RawCookie) -> + Cookie = normalize_cookie_value(RawCookie), + % Encode the cookie key-value pair as a string to use as the base. +``` + +### to_cookie_line + +Convert a single cookie into a `cookie` header component. These + +```erlang +to_cookie_line(Key, Cookie) -> + to_set_cookie_line(Key, value(Cookie)). +``` + +### from + +Normalize a message containing a `cookie`, `set-cookie`, and potentially + +```erlang +from(Msg, Req, Opts) -> + CookieOpts = opts(Opts), + LoadedMsg = hb_cache:ensure_all_loaded(Msg, Opts), + do_from(LoadedMsg, Req, CookieOpts). +``` + +### do_from + +```erlang +do_from(Msg, Req, Opts) when is_map(Msg) -> + {ok, ResetBase} = reset(Msg, Opts), + % Get the cookies, parsed, from each available source. +``` + +### do_from + +```erlang +do_from(CookiesMsg, _Req, _Opts) -> + error({cookie_from_error, {unexpected_message_format, CookiesMsg}}). +``` + +### from_cookie + +Convert the `cookie` key into a parsed cookie message. `cookie` headers + +```erlang +from_cookie(#{ <<"cookie">> := Cookie }, Req, Opts) -> + from_cookie(Cookie, Req, Opts); +``` + +### from_cookie + +Convert the `cookie` key into a parsed cookie message. `cookie` headers + +```erlang +from_cookie(Cookies, Req, Opts) when is_list(Cookies) -> + MergedParsed = + lists:foldl( + fun(Cookie, Acc) -> + {ok, Parsed} = from_cookie(Cookie, Req, Opts), + hb_maps:merge(Acc, Parsed, Opts) + end, + #{}, + Cookies + ), + {ok, MergedParsed}; +``` + +### from_cookie + +Convert the `cookie` key into a parsed cookie message. `cookie` headers + +```erlang +from_cookie(Cookie, _Req, _Opts) when is_binary(Cookie) -> + BinaryCookiePairs = split(semicolon, Cookie), + KeyValList = + lists:map( + fun(BinaryCookiePair) -> + {[Key, Value], _Rest} = split(pair, BinaryCookiePair), + {Key, hb_escape:decode(Value)} + end, + BinaryCookiePairs + ), + NormalizedMessage = maps:from_list(KeyValList), + {ok, NormalizedMessage}; +``` + +### from_cookie + +Convert the `cookie` key into a parsed cookie message. `cookie` headers + +```erlang +from_cookie(_MsgWithoutCookie, _Req, _Opts) -> + % The cookie key is not present in the message, so we return an empty map. +``` + +### from_set_cookie + +Convert a `set-cookie` header line into a cookie message. The `set-cookie` + +```erlang +from_set_cookie(#{ <<"set-cookie">> := Cookie }, Req, Opts) -> + ?event({from_set_cookie, {cookie, Cookie}}), + from_set_cookie(Cookie, Req, Opts); +``` + +### from_set_cookie + +Convert a `set-cookie` header line into a cookie message. The `set-cookie` + +```erlang +from_set_cookie(MsgWithoutSet, _Req, _Opts) when is_map(MsgWithoutSet) -> + % The set-cookie key is not present in the message, so we return an empty map. +``` + +### from_set_cookie + +```erlang +from_set_cookie(Lines, Req, Opts) when is_list(Lines) -> + MergedParsed = + lists:foldl( + fun(Line, Acc) -> + {ok, Parsed} = from_set_cookie(Line, Req, Opts), + hb_maps:merge(Acc, Parsed) + end, + #{}, + Lines + ), + {ok, MergedParsed}; +``` + +### from_set_cookie + +```erlang +from_set_cookie(Line, _Req, Opts) when is_binary(Line) -> + {[Key, Value], Rest} = split(pair, Line), + ValueDecoded = hb_escape:decode(Value), + % If there is no remaining binary after the pair, we have a simple key-value + % pair, returning just the binary as the value. Otherwise, we split the + % remaining binary into attributes and flags and return a message with the + % value and those parsed elements. +``` + +### to_sorted_list + +Takes a message or list of binaries and returns a sorted list of key- + +```erlang +to_sorted_list(Msg) when is_map(Msg) -> + lists:keysort( + 1, + [ + {trim_bin(hb_util:bin(K)), trim_bin(V)} + || {K, V} <- maps:to_list(Msg) + ] + ); +``` + +### to_sorted_list + +Takes a message or list of binaries and returns a sorted list of key- + +```erlang +to_sorted_list(Binaries) when is_list(Binaries) -> + lists:sort( + lists:map( + fun(Bin) -> trim_bin(hb_util:bin(Bin)) end, + Binaries + ) + ). +``` + +### value + +Take a single parse cookie and return only the value (ignoring attributes + +```erlang +value(Msg) when is_map(Msg) -> + maps:get(<<"value">>, Msg, Msg); +``` + +### value + +Take a single parse cookie and return only the value (ignoring attributes + +```erlang +value(Bin) when is_binary(Bin) -> + Bin. +``` + +### normalize_cookie_value + +Normalize a cookie value to a map with the following keys: + +```erlang +normalize_cookie_value(Msg) when is_map(Msg) -> + Msg#{ + <<"value">> => maps:get(<<"value">>, Msg, Msg), + <<"attributes">> => maps:get(<<"attributes">>, Msg, #{}), + <<"flags">> => maps:get(<<"flags">>, Msg, []) + }; +``` + +### normalize_cookie_value + +Normalize a cookie value to a map with the following keys: + +```erlang +normalize_cookie_value(Bin) when is_binary(Bin) -> + #{ + <<"value">> => Bin, + <<"attributes">> => #{}, + <<"flags">> => [] + }. +``` + +### trim_bin + +Trim a binary of leading and trailing whitespace. + +```erlang +trim_bin(Bin) when is_binary(Bin) -> + list_to_binary(string:trim(binary_to_list(Bin))). +``` + +### join + +Join a list of binaries into a `separator`-separated string. Abstracts + +```erlang +join(Binaries, Separator) -> + hb_util:bin( + string:join( + lists:map(fun hb_util:list/1, Binaries), + hb_util:list(Separator) + ) + ). +``` + +### split + +Split a binary by a separator type (`pair`, `lines`, or `attributes`). + +```erlang +split(pair, Bin) -> + [Key, ValueRest] = binary:split(Bin, <<"=">>), + {_, Value, Rest} = hb_util:split_depth_string_aware_single($;, ValueRest), + {[Key, unquote(Value)], trim_leading(Rest)}; +``` + +### split + +Split a binary by a separator type (`pair`, `lines`, or `attributes`). + +```erlang +split(lines, Bin) -> + lists:map(fun trim_leading/1, hb_util:split_depth_string_aware($,, Bin)); +``` + +### split + +Split a binary by a separator type (`pair`, `lines`, or `attributes`). + +```erlang +split(semicolon, Bin) -> + lists:map(fun trim_leading/1, hb_util:split_depth_string_aware($;, Bin)). +``` + +### trim_leading + +Remove leading whitespace from a binary, if present. + +```erlang +trim_leading(Line) when not is_binary(Line) -> + trim_leading(hb_util:bin(Line)); +``` + +### trim_leading + +Remove leading whitespace from a binary, if present. + +```erlang +trim_leading(<<>>) -> <<>>; +``` + +### trim_leading + +Remove leading whitespace from a binary, if present. + +```erlang +trim_leading(<<" ", Rest/binary>>) -> trim_leading(Rest); +``` + +### trim_leading + +Remove leading whitespace from a binary, if present. +Unquote a binary if it is quoted. If it is not quoted, we return the + +```erlang +trim_leading(Line) -> Line. +``` + +### unquote + +Remove leading whitespace from a binary, if present. +Unquote a binary if it is quoted. If it is not quoted, we return the + +```erlang +unquote(<< $\", Rest/binary>>) -> + {Unquoted, _} = hb_util:split_escaped_single($\", Rest), + Unquoted; +``` + +--- + +*Generated from [dev_codec_cookie.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie.erl)* diff --git a/docs/book/src/dev_codec_cookie_auth.erl.md b/docs/book/src/dev_codec_cookie_auth.erl.md new file mode 100644 index 000000000..6f50f3641 --- /dev/null +++ b/docs/book/src/dev_codec_cookie_auth.erl.md @@ -0,0 +1,329 @@ +# dev_codec_cookie_auth + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie_auth.erl) + +Implements the `message@1.0` commitment interface for the `~cookie@1.0`, +as well as the `generator` interface type for the `~auth-hook@1.0` device. +See the [cookie codec](dev_codec_cookie.html) documentation for more details. + +--- + +## Exported Functions + +- `commit/3` +- `finalize/3` +- `generate/3` +- `verify/3` + +--- + +### generate + +Implements the `message@1.0` commitment interface for the `~cookie@1.0`, +Generate a new secret (if no `committer` specified), and use it as the + +```erlang +generate(Base, Request, Opts) -> + {WithCookie, Secrets} = + case find_secrets(Request, Opts) of + [] -> + {ok, GeneratedSecret} = generate_secret(Base, Request, Opts), + {ok, Updated} = store_secret(GeneratedSecret, Request, Opts), + {Updated, [GeneratedSecret]}; + FoundSecrets -> + {Request, FoundSecrets} + end, + ?event({normalized_cookies_found, {secrets, Secrets}}), + { + ok, + WithCookie#{ + <<"secret">> => Secrets + } + }. +``` + +### finalize + +Finalize an `on-request` hook by adding the cookie to the chain of + +```erlang +finalize(Base, Request, Opts) -> + ?event(debug_auth, {finalize, {base, Base}, {request, Request}}), + maybe + {ok, SignedMsg} ?= hb_maps:find(<<"request">>, Request, Opts), + {ok, MessageSequence} ?= hb_maps:find(<<"body">>, Request, Opts), + % Cookie auth adds set-cookie to response + {ok, #{ <<"set-cookie">> := SetCookie }} = + dev_codec_cookie:to( + SignedMsg, + #{ <<"format">> => <<"set-cookie">> }, + Opts + ), + { + ok, + MessageSequence ++ + [#{ <<"path">> => <<"set">>, <<"set-cookie">> => SetCookie }] + } + else error -> + {error, no_request} + end. +``` + +### commit + +Generate a new secret (if no `committer` specified), and use it as the + +```erlang +commit(Base, Request, RawOpts) when ?IS_LINK(Request) -> + Opts = dev_codec_cookie:opts(RawOpts), + commit(Base, hb_cache:ensure_loaded(Request, Opts), Opts); +``` + +### commit + +Generate a new secret (if no `committer` specified), and use it as the + +```erlang +commit(Base, Req = #{ <<"secret">> := Secret }, RawOpts) -> + Opts = dev_codec_cookie:opts(RawOpts), + commit(hb_cache:ensure_loaded(Secret, Opts), Base, Req, Opts); +``` + +### commit + +Generate a new secret (if no `committer` specified), and use it as the + +```erlang +commit(Base, Request, RawOpts) -> + Opts = dev_codec_cookie:opts(RawOpts), + % Calculate the key to use for the commitment. +``` + +### commit + +Given the secret key, commit the message and set the cookie. This + +```erlang +commit(Secret, Base, Request, Opts) -> + {ok, CommittedMsg} = + dev_codec_httpsig_proxy:commit( + <<"cookie@1.0">>, + Secret, + Base, + Request, + Opts + ), + store_secret(Secret, CommittedMsg, Opts). +``` + +### store_secret + +Update the nonces for a given secret. + +```erlang +store_secret(Secret, Msg, Opts) -> + CookieAddr = dev_codec_httpsig_keyid:secret_key_to_committer(Secret), + % Create the cookie parameters, using the name as the key and the secret as + % the value. +``` + +### verify + +Verify the HMAC commitment with the key being the secret from the + +```erlang +verify(Base, ReqLink, RawOpts) when ?IS_LINK(ReqLink) -> + Opts = dev_codec_cookie:opts(RawOpts), + verify(Base, hb_cache:ensure_loaded(ReqLink, Opts), Opts); +``` + +### verify + +Verify the HMAC commitment with the key being the secret from the + +```erlang +verify(Base, Req = #{ <<"secret">> := Secret }, RawOpts) -> + Opts = dev_codec_cookie:opts(RawOpts), + ?event({verify_with_explicit_key, {base, Base}, {request, Req}}), + dev_codec_httpsig_proxy:verify( + hb_util:decode(Secret), + Base, + Req, + Opts + ); +``` + +### verify + +Verify the HMAC commitment with the key being the secret from the + +```erlang +verify(Base, Request, RawOpts) -> + Opts = dev_codec_cookie:opts(RawOpts), + ?event({verify_finding_key, {base, Base}, {request, Request}}), + case find_secret(Request, Opts) of + {ok, Secret} -> + dev_codec_httpsig_proxy:verify( + hb_util:decode(Secret), + Base, + Request, + Opts + ); + {error, Err} -> + {error, Err} + end. +``` + +### generate_secret + +Generate a new secret key for the given request. The user may specify + +```erlang +generate_secret(_Base, Request, Opts) -> + case hb_maps:get(<<"generator">>, Request, undefined, Opts) of + undefined -> + % If no generator is specified, use the default generator. +``` + +### default_generator + +Generate a new secret key using the default generator. + +```erlang +default_generator(_Opts) -> + {ok, hb_util:encode(crypto:strong_rand_bytes(64))}. +``` + +### execute_generator + +Execute a generator function. See `generate_secret/3` for more details. + +```erlang +execute_generator(GeneratorPath, Opts) when is_binary(GeneratorPath) -> + hb_ao:resolve(GeneratorPath, Opts); +``` + +### execute_generator + +Execute a generator function. See `generate_secret/3` for more details. +Find all secrets in the cookie of a message. + +```erlang +execute_generator(Generator, Opts) -> + Path = hb_maps:get(<<"path">>, Generator, <<"generate">>, Opts), + hb_ao:resolve(Generator#{ <<"path">> => Path }, Opts). +``` + +### find_secrets + +Execute a generator function. See `generate_secret/3` for more details. +Find all secrets in the cookie of a message. + +```erlang +find_secrets(Request, Opts) -> + maybe + {ok, Cookie} ?= dev_codec_cookie:extract(Request, #{}, Opts), + [ + hb_maps:get(SecretRef, Cookie, secret_unavailable, Opts) + || + SecretRef = <<"secret-", _/binary>> <- hb_maps:keys(Cookie) + ] + else error -> [] + end. +``` + +### find_secret + +Find the secret key for the given committer, if it exists in the cookie. + +```erlang +find_secret(Request, Opts) -> + maybe + {ok, Committer} ?= hb_maps:find(<<"committer">>, Request, Opts), + find_secret(Committer, Request, Opts) + else error -> {error, no_secret} + end. +``` + +### find_secret + +```erlang +find_secret(Committer, Request, Opts) -> + maybe + {ok, Cookie} ?= dev_codec_cookie:extract(Request, #{}, Opts), + {ok, _Secret} ?= hb_maps:find(<<"secret-", Committer/binary>>, Cookie, Opts) + else error -> {error, not_found} + end. +``` + +### directly_invoke_commit_verify_test + +Call the cookie codec's `commit` and `verify` functions directly. + +```erlang +directly_invoke_commit_verify_test() -> + Base = #{ <<"test-key">> => <<"test-value">> }, + CommittedMsg = + hb_message:commit( + Base, + #{}, + #{ + <<"commitment-device">> => <<"cookie@1.0">> + } + ), + ?event({committed_msg, CommittedMsg}), + ?assertEqual(1, length(hb_message:signers(CommittedMsg, #{}))), + VerifyReq = + apply_cookie( + CommittedMsg#{ + <<"committers">> => hb_message:signers(CommittedMsg, #{}) + }, + CommittedMsg, + #{} + ), + VerifyReqWithoutComms = hb_maps:without([<<"commitments">>], VerifyReq, #{}), + ?event({verify_req_without_comms, VerifyReqWithoutComms}), + ?assert(hb_message:verify(CommittedMsg, VerifyReqWithoutComms, #{})), + ok. +``` + +### http_set_get_cookies_test + +Set keys in a cookie and verify that they can be parsed into a message. + +```erlang +http_set_get_cookies_test() -> + Node = hb_http_server:start_node(#{}), + {ok, SetRes} = + hb_http:get( + Node, + <<"/~cookie@1.0/store?k1=v1&k2=v2">>, + #{} + ), + ?event(debug_cookie, {set_cookie_test, {set_res, SetRes}}), + ?assertMatch(#{ <<"set-cookie">> := _ }, SetRes), + Req = apply_cookie(#{ <<"path">> => <<"/~cookie@1.0/extract">> }, SetRes, #{}), + {ok, Res} = hb_http:get(Node, Req, #{}), + ?assertMatch(#{ <<"k1">> := <<"v1">>, <<"k2">> := <<"v2">> }, Res), + ok. +``` + +### apply_cookie + +Takes the cookies from the `GenerateResponse` and applies them to the + +```erlang +apply_cookie(NextReq, GenerateResponse, Opts) -> + {ok, Cookie} = dev_codec_cookie:extract(GenerateResponse, #{}, Opts), + {ok, NextWithParsedCookie} = dev_codec_cookie:store(NextReq, Cookie, Opts), + {ok, NextWithCookie} = + dev_codec_cookie:to( + NextWithParsedCookie, + #{ <<"format">> => <<"cookie">> }, + Opts + ), +``` + +--- + +*Generated from [dev_codec_cookie_auth.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie_auth.erl)* diff --git a/docs/book/src/dev_codec_cookie_test_vectors.erl.md b/docs/book/src/dev_codec_cookie_test_vectors.erl.md new file mode 100644 index 000000000..54cf6b062 --- /dev/null +++ b/docs/book/src/dev_codec_cookie_test_vectors.erl.md @@ -0,0 +1,904 @@ +# dev_codec_cookie_test_vectors + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie_test_vectors.erl) + +A battery of cookie parsing and encoding test vectors. + +--- + +### assert_set + +A battery of cookie parsing and encoding test vectors. +Assert that when given the inputs in the test set, the outputs are + +```erlang +assert_set(TestSet, Fun) -> + {Inputs, Expected} = maps:get(TestSet, test_data()), + ?event(match_cookie, {starting_group_match, {inputs, {explicit, Inputs}}}), + lists:foreach( + fun(Input) -> + Res = Fun(Input), + ?event( + match_cookie, + {matching, + {expected, {explicit, Expected}, {output, {explicit, Res}}} + } + ), + ?assertEqual(Expected, Res) + end, + Inputs + ). +``` + +### to_string + +Convert a cookie message to a string. +Convert a string to a cookie message. + +```erlang +to_string(CookieMsg) -> + {ok, BaseMsg} = dev_codec_cookie:store(#{}, CookieMsg, #{}), + {ok, Msg} = + dev_codec_cookie:to( + BaseMsg, + #{ <<"format">> => <<"set-cookie">> }, + #{} + ), + hb_maps:get(<<"set-cookie">>, Msg, [], #{}). +``` + +### from_string + +Convert a cookie message to a string. +Convert a string to a cookie message. + +```erlang +from_string(String) -> + {ok, BaseMsg} = + dev_codec_cookie:from( + #{ <<"set-cookie">> => String }, + #{}, + #{} + ), + {ok, Cookie} = dev_codec_cookie:extract(BaseMsg, #{}, #{}), + Cookie. +``` + +### test_data + +returns a map of tuples of the form `testset_name => {[before], after}`. + +```erlang +test_data() -> + #{ + from_string_raw_value => + { + [<<"k1=v1">>, <<"k1=\"v1\"">>], + #{ <<"k1">> => <<"v1">> } + }, + from_string_attributes => + { + [<<"k1=v1; k2=v2">>, <<"k1=\"v1\"; k2=\"v2\"">>], + #{ + <<"k1">> => + #{ + <<"value">> => <<"v1">>, + <<"attributes">> => #{ <<"k2">> => <<"v2">> } + } + } + }, + from_string_flags => + { + [<<"k1=v1; k2=v2; f1; f2">>, <<"k1=\"v1\"; k2=\"v2\"; f1; f2">>], + #{ + <<"k1">> => + #{ + <<"value">> => <<"v1">>, + <<"attributes">> => #{ <<"k2">> => <<"v2">> }, + <<"flags">> => [<<"f1">>, <<"f2">>] + } + } + }, + to_string_raw_value => + { + [ + #{ <<"k1">> => <<"v1">> }, + #{ <<"k1">> => #{ <<"value">> => <<"v1">> } }, + #{ + <<"k1">> => + #{ + <<"value">> => <<"v1">>, + <<"attributes">> => #{}, + <<"flags">> => [] + } + } + ], + [<<"k1=\"v1\"">>] + }, + to_string_attributes => + { + [ + #{ + <<"k1">> => + #{ + <<"value">> => <<"v1">>, + <<"attributes">> => #{ <<"k2">> => <<"v2">> } + } + }, + #{ + <<"k1">> => + #{ + <<"value">> => <<"v1">>, + <<"attributes">> => #{ <<"k2">> => <<"v2">> }, + <<"flags">> => [] + } + } + ], + [<<"k1=\"v1\"; k2=v2">>] + }, + to_string_flags => + { + [ + #{ + <<"k1">> => + #{ + <<"value">> => <<"v1">>, + <<"flags">> => [<<"f1">>, <<"f2">>] + } + }, + #{ + <<"k1">> => + #{ + <<"value">> => <<"v1">>, + <<"attributes">> => #{}, + <<"flags">> => [<<"f1">>, <<"f2">>] + } + } + ], + [<<"k1=\"v1\"; f1; f2">>] + }, + parse_realworld_1 => + { + [ + [ + <<"cart=110045_77895_53420; SameSite=Strict">>, + <<"affiliate=e4rt45dw; SameSite=Lax">> + ] + ], + #{ + <<"cart">> => + #{ + <<"value">> => <<"110045_77895_53420">>, + <<"attributes">> => #{ <<"SameSite">> => <<"Strict">> } + }, + <<"affiliate">> => + #{ + <<"value">> => <<"e4rt45dw">>, + <<"attributes">> => #{ <<"SameSite">> => <<"Lax">> } + } + } + }, + parse_user_settings_and_permissions => + { + [ + [ + <<"user_settings=notifications=true,privacy=strict,layout=grid; Path=/; HttpOnly; Secure">>, + <<"user_permissions=\"read;write;delete\"; Path=/; SameSite=None; Secure">> + ] + ], + #{ + <<"user_settings">> => + #{ + <<"value">> => <<"notifications=true,privacy=strict,layout=grid">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"HttpOnly">>, <<"Secure">>] + }, + <<"user_permissions">> => + #{ + <<"value">> => <<"read;write;delete">>, + <<"attributes">> => #{ <<"Path">> => <<"/">>, <<"SameSite">> => <<"None">> }, + <<"flags">> => [<<"Secure">>] + } + } + }, + parse_session_and_temp_data => + { + [ + [ + <<"SESSION_ID=abc123xyz ; path= /dashboard ; samesite=Strict ; Secure">>, + <<"temp_data=cleanup_me; Max-Age=-1; Path=/">> + ] + ], + #{ + <<"SESSION_ID">> => + #{ + <<"value">> => <<"abc123xyz ">>, + <<"attributes">> => #{ <<"path">> => <<"/dashboard">>, <<"samesite">> => <<"Strict">> }, + <<"flags">> => [<<"Secure">>] + }, + <<"temp_data">> => + #{ + <<"value">> => <<"cleanup_me">>, + <<"attributes">> => #{ <<"Max-Age">> => <<"-1">>, <<"Path">> => <<"/">> } + } + } + }, + parse_empty_and_anonymous => + { + [ + [ + <<"user_preference=; Path=/; HttpOnly">>, + <<"=anonymous_session_123; Path=/guest">> + ] + ], + #{ + <<"user_preference">> => + #{ + <<"value">> => <<"">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"HttpOnly">>] + }, + <<>> => + #{ + <<"value">> => <<"anonymous_session_123">>, + <<"attributes">> => #{ <<"Path">> => <<"/guest">> } + } + } + }, + parse_app_config_and_analytics => + { + [ + [ + <<"$app_config$=theme@dark!%20mode; Path=/">>, + <<"analytics_session_data_with_very_long_name_for_tracking_purposes=comprehensive_user_behavior_analytics_data_including_page_views_click_events_scroll_depth_time_spent_geographic_location_device_info_browser_details_and_more; Path=/">> + ] + ], + #{ + <<"$app_config$">> => + #{ + <<"value">> => <<"theme@dark! mode">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + }, + <<"analytics_session_data_with_very_long_name_for_tracking_purposes">> => + #{ + <<"value">> => <<"comprehensive_user_behavior_analytics_data_including_page_views_click_events_scroll_depth_time_spent_geographic_location_device_info_browser_details_and_more">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + } + } + }, + parse_debug_and_tracking => + { + [ + [ + <<"debug_info=\\tIndented\\t\\nMultiline\\n; Path=/">>, + <<"tracking_id=user_12345; CustomAttr=CustomValue; Analytics=Enabled; Path=/; HttpOnly">> + ] + ], + #{ + <<"debug_info">> => + #{ + <<"value">> => <<"\\tIndented\\t\\nMultiline\\n">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + }, + <<"tracking_id">> => + #{ + <<"value">> => <<"user_12345">>, + <<"attributes">> => #{ + <<"CustomAttr">> => <<"CustomValue">>, + <<"Analytics">> => <<"Enabled">>, + <<"Path">> => <<"/">> + }, + <<"flags">> => [<<"HttpOnly">>] + } + } + }, + parse_cache_and_form_token => + { + [ + [ + <<"cache_bust=v1.2.3; Expires=Mon, 99 Feb 2099 25:99:99 GMT; Path=/">>, + <<"form_token=form_abc123; SameSite=Strick; Secure">> + ] + ], + #{ + <<"cache_bust">> => + #{ + <<"value">> => <<"v1.2.3">>, + <<"attributes">> => #{ + <<"Expires">> => <<"Mon, 99 Feb 2099 25:99:99 GMT">>, + <<"Path">> => <<"/">> + } + }, + <<"form_token">> => + #{ + <<"value">> => <<"form_abc123">>, + <<"attributes">> => #{ <<"SameSite">> => <<"Strick">> }, + <<"flags">> => [<<"Secure">>] + } + } + }, + parse_token_and_reactions => + { + [ + [ + <<"access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c; Path=/; HttpOnly; Secure">>, + <<"reaction_prefs=👍👎; Path=/; Secure">> + ] + ], + #{ + <<"access_token">> => + #{ + <<"value">> => <<"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"HttpOnly">>, <<"Secure">>] + }, + <<"reaction_prefs">> => + #{ + <<"value">> => <<"👍👎">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"Secure">>] + } + } + }, + parse_error_log_and_auth_token => + { + [ + [ + <<"error_log=\"timestamp=2024-01-15 10:30:00\\nlevel=ERROR\\tmessage=Database connection failed\"; Path=/">>, + <<"auth_token=bearer_xyz789; Secure; Path=/api; Secure; HttpOnly">> + ] + ], + #{ + <<"error_log">> => + #{ + <<"value">> => <<"timestamp=2024-01-15 10:30:00\\nlevel=ERROR\\tmessage=Database connection failed">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + }, + <<"auth_token">> => + #{ + <<"value">> => <<"bearer_xyz789">>, + <<"attributes">> => #{ <<"Path">> => <<"/api">> }, + <<"flags">> => [<<"HttpOnly">>,<<"Secure">>, <<"Secure">>] + } + } + }, + parse_csrf_and_quick_setting => + { + [ + [ + <<"csrf_token=abc123; \"HttpOnly\"; Path=/">>, + <<"quick_setting=\"enabled\"">> + ] + ], + #{ + <<"csrf_token">> => + #{ + <<"value">> => <<"abc123">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"HttpOnly">>] + }, + <<"quick_setting">> => <<"enabled">> + } + }, + parse_admin_and_upload => + { + [ + [ + <<"secret_key=confidential; Path=%2Fadmin">>, + <<"admin_flag=true; Path=/">> + ] + ], + #{ + <<"secret_key">> => + #{ + <<"value">> => <<"confidential">>, + <<"attributes">> => #{ <<"Path">> => <<"%2Fadmin">> } + }, + <<"admin_flag">> => + #{ + <<"value">> => <<"true">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + } + } + }, + parse_search_and_tags => + { + [ + [ + <<"search_history=\"query,results\"; Path=/">>, + <<"user_tags=\"work,personal\"; Path=/">> + ] + ], + #{ + <<"search_history">> => + #{ + <<"value">> => <<"query,results">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + }, + <<"user_tags">> => + #{ + <<"value">> => <<"work,personal">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + } + } + }, + to_string_realworld_1 => + { + [ + #{ + <<"cart">> => + #{ + <<"value">> => <<"110045_77895_53420">>, + <<"attributes">> => #{ <<"SameSite">> => <<"Strict">> } + }, + <<"affiliate">> => + #{ + <<"value">> => <<"e4rt45dw">>, + <<"attributes">> => #{ <<"SameSite">> => <<"Lax">> } + } + } + ], + [ + <<"affiliate=\"e4rt45dw\"; SameSite=Lax">>, + <<"cart=\"110045_77895_53420\"; SameSite=Strict">> + ] + }, + to_string_user_settings_and_permissions => + { + [ + #{ + <<"user_settings">> => + #{ + <<"value">> => <<"notifications=true,privacy=strict,layout=grid">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"HttpOnly">>, <<"Secure">>] + }, + <<"user_permissions">> => + #{ + <<"value">> => <<"read;write;delete">>, + <<"attributes">> => #{ <<"Path">> => <<"/">>, <<"SameSite">> => <<"None">> }, + <<"flags">> => [<<"Secure">>] + } + } + ], + [ + <<"user_permissions=\"read;write;delete\"; Path=/; SameSite=None; Secure">>, + <<"user_settings=\"notifications=true,privacy=strict,layout=grid\"; Path=/; HttpOnly; Secure">> + ] + }, + to_string_session_and_temp_data => + { + [ + #{ + <<"SESSION_ID">> => + #{ + <<"value">> => <<"abc123xyz ">>, + <<"attributes">> => #{ <<"path">> => <<"/dashboard">>, <<"samesite">> => <<"Strict">> }, + <<"flags">> => [<<"Secure">>] + }, + <<"temp_data">> => + #{ + <<"value">> => <<"cleanup_me">>, + <<"attributes">> => #{ <<"Max-Age">> => <<"-1">>, <<"Path">> => <<"/">> } + } + } + ], + [ + <<"SESSION_ID=\"abc123xyz \"; path=/dashboard; samesite=Strict; Secure">>, + <<"temp_data=\"cleanup_me\"; Max-Age=-1; Path=/">> + ] + }, + to_string_empty_and_anonymous => + { + [ + #{ + <<"user_preference">> => + #{ + <<"value">> => <<"">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"HttpOnly">>] + }, + <<>> => + #{ + <<"value">> => <<"anonymous_session_123">>, + <<"attributes">> => #{ <<"Path">> => <<"/guest">> } + } + } + ], + [ + <<"=\"anonymous_session_123\"; Path=/guest">>, + <<"user_preference=\"\"; Path=/; HttpOnly">> + ] + }, + to_string_app_config_and_analytics => + { + [ + #{ + <<"$app_config$">> => + #{ + <<"value">> => <<"theme@dark!%20mode">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + }, + <<"analytics_session_data_with_very_long_name_for_tracking_purposes">> => + #{ + <<"value">> => <<"comprehensive_user_behavior_analytics_data_including_page_views_click_events_scroll_depth_time_spent_geographic_location_device_info_browser_details_and_more">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + } + } + ], + [ + <<"$app_config$=\"theme@dark!%20mode\"; Path=/">>, + <<"analytics_session_data_with_very_long_name_for_tracking_purposes=\"comprehensive_user_behavior_analytics_data_including_page_views_click_events_scroll_depth_time_spent_geographic_location_device_info_browser_details_and_more\"; Path=/">> + ] + }, + to_string_debug_and_tracking => + { + [ + #{ + <<"debug_info">> => + #{ + <<"value">> => <<"\\tIndented\\t\\nMultiline\\n">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + }, + <<"tracking_id">> => + #{ + <<"value">> => <<"user_12345">>, + <<"attributes">> => #{ + <<"CustomAttr">> => <<"CustomValue">>, + <<"Analytics">> => <<"Enabled">>, + <<"Path">> => <<"/">> + }, + <<"flags">> => [<<"HttpOnly">>] + } + } + ], + [ + <<"debug_info=\"\\tIndented\\t\\nMultiline\\n\"; Path=/">>, + <<"tracking_id=\"user_12345\"; Analytics=Enabled; CustomAttr=CustomValue; Path=/; HttpOnly">> + ] + }, + to_string_cache_and_form_token => + { + [ + #{ + <<"cache_bust">> => + #{ + <<"value">> => <<"v1.2.3">>, + <<"attributes">> => #{ + <<"Expires">> => <<"Mon, 99 Feb 2099 25:99:99 GMT">>, + <<"Path">> => <<"/">> + } + }, + <<"form_token">> => + #{ + <<"value">> => <<"form_abc123">>, + <<"attributes">> => #{ <<"SameSite">> => <<"Strick">> }, + <<"flags">> => [<<"Secure">>] + } + } + ], + [ + <<"cache_bust=\"v1.2.3\"; Expires=Mon, 99 Feb 2099 25:99:99 GMT; Path=/">>, + <<"form_token=\"form_abc123\"; SameSite=Strick; Secure">> + ] + }, + to_string_token_and_reactions => + { + [ + #{ + <<"access_token">> => + #{ + <<"value">> => <<"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"HttpOnly">>, <<"Secure">>] + }, + <<"reaction_prefs">> => + #{ + <<"value">> => <<"👍👎">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"Secure">>] + } + } + ], + [ + <<"access_token=\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c\"; Path=/; HttpOnly; Secure">>, + <<"reaction_prefs=\"👍👎\"; Path=/; Secure">> + ] + }, + to_string_error_log_and_auth_token => + { + [ + #{ + <<"error_log">> => + #{ + <<"value">> => <<"timestamp=2024-01-15 10:30:00\\nlevel=ERROR\\tmessage=Database connection failed">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + }, + <<"auth_token">> => + #{ + <<"value">> => <<"bearer_xyz789">>, + <<"attributes">> => #{ <<"Path">> => <<"/api">> }, + <<"flags">> => [<<"HttpOnly">>, <<"Secure">>, <<"Secure">>] + } + } + ], + [ + <<"auth_token=\"bearer_xyz789\"; Path=/api; HttpOnly; Secure; Secure">>, + <<"error_log=\"timestamp=2024-01-15 10:30:00\\nlevel=ERROR\\tmessage=Database connection failed\"; Path=/">> + ] + }, + to_string_csrf_and_quick_setting => + { + [ + #{ + <<"csrf_token">> => + #{ + <<"value">> => <<"abc123">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> }, + <<"flags">> => [<<"HttpOnly">>] + }, + <<"quick_setting">> => <<"enabled">> + } + ], + [ + <<"csrf_token=\"abc123\"; Path=/; HttpOnly">>, + <<"quick_setting=\"enabled\"">> + ] + }, + to_string_admin_and_upload => + { + [ + #{ + <<"secret_key">> => + #{ + <<"value">> => <<"confidential">>, + <<"attributes">> => #{ <<"Path">> => <<"%2Fadmin">> } + }, + <<"admin_flag">> => + #{ + <<"value">> => <<"true">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + } + } + ], + [ + <<"admin_flag=\"true\"; Path=/">>, + <<"secret_key=\"confidential\"; Path=%2Fadmin">> + ] + }, + to_string_search_and_tags => + { + [ + #{ + <<"search_history">> => + #{ + <<"value">> => <<"query,results">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + }, + <<"user_tags">> => + #{ + <<"value">> => <<"work,personal">>, + <<"attributes">> => #{ <<"Path">> => <<"/">> } + } + } + ], + [ + <<"search_history=\"query,results\"; Path=/">>, + <<"user_tags=\"work,personal\"; Path=/">> + ] + } + }. +``` + +### from_string_basic_test + +```erlang +from_string_basic_test() -> + assert_set(from_string_raw_value, fun from_string/1). +``` + +### from_string_attributes_test + +```erlang +from_string_attributes_test() -> + assert_set(from_string_attributes, fun from_string/1). +``` + +### from_string_flags_test + +```erlang +from_string_flags_test() -> + assert_set(from_string_flags, fun from_string/1). +``` + +### to_string_basic_test + +```erlang +to_string_basic_test() -> + assert_set(to_string_raw_value, fun to_string/1). +``` + +### to_string_attributes_test + +```erlang +to_string_attributes_test() -> + assert_set(to_string_attributes, fun to_string/1). +``` + +### to_string_flags_test + +```erlang +to_string_flags_test() -> + assert_set(to_string_flags, fun to_string/1). +``` + +### parse_realworld_test + +```erlang +parse_realworld_test() -> + assert_set(parse_realworld_1, fun from_string/1). +``` + +### parse_user_settings_and_permissions_test + +```erlang +parse_user_settings_and_permissions_test() -> + assert_set(parse_user_settings_and_permissions, fun from_string/1). +``` + +### parse_session_and_temp_data_test + +```erlang +parse_session_and_temp_data_test() -> + assert_set(parse_session_and_temp_data, fun from_string/1). +``` + +### parse_empty_and_anonymous_test + +```erlang +parse_empty_and_anonymous_test() -> + assert_set(parse_empty_and_anonymous, fun from_string/1). +``` + +### parse_app_config_and_analytics_test + +```erlang +parse_app_config_and_analytics_test() -> + assert_set(parse_app_config_and_analytics, fun from_string/1). +``` + +### parse_debug_and_tracking_test + +```erlang +parse_debug_and_tracking_test() -> + assert_set(parse_debug_and_tracking, fun from_string/1). +``` + +### parse_cache_and_form_token_test + +```erlang +parse_cache_and_form_token_test() -> + assert_set(parse_cache_and_form_token, fun from_string/1). +``` + +### parse_token_and_reactions_test + +```erlang +parse_token_and_reactions_test() -> + assert_set(parse_token_and_reactions, fun from_string/1). +``` + +### parse_error_log_and_auth_token_test + +```erlang +parse_error_log_and_auth_token_test() -> + assert_set(parse_error_log_and_auth_token, fun from_string/1). +``` + +### parse_csrf_and_quick_setting_test + +```erlang +parse_csrf_and_quick_setting_test() -> + assert_set(parse_csrf_and_quick_setting, fun from_string/1). +``` + +### parse_admin_and_upload_test + +```erlang +parse_admin_and_upload_test() -> + assert_set(parse_admin_and_upload, fun from_string/1). +``` + +### parse_search_and_tags_test + +```erlang +parse_search_and_tags_test() -> + assert_set(parse_search_and_tags, fun from_string/1). +``` + +### to_string_realworld_1_test + +```erlang +to_string_realworld_1_test() -> + assert_set(to_string_realworld_1, fun to_string/1). +``` + +### to_string_user_settings_and_permissions_test + +```erlang +to_string_user_settings_and_permissions_test() -> + assert_set(to_string_user_settings_and_permissions, fun to_string/1). +``` + +### to_string_session_and_temp_data_test + +```erlang +to_string_session_and_temp_data_test() -> + assert_set(to_string_session_and_temp_data, fun to_string/1). +``` + +### to_string_empty_and_anonymous_test + +```erlang +to_string_empty_and_anonymous_test() -> + assert_set(to_string_empty_and_anonymous, fun to_string/1). +``` + +### to_string_app_config_and_analytics_test + +```erlang +to_string_app_config_and_analytics_test() -> + assert_set(to_string_app_config_and_analytics, fun to_string/1). +``` + +### to_string_debug_and_tracking_test + +```erlang +to_string_debug_and_tracking_test() -> + assert_set(to_string_debug_and_tracking, fun to_string/1). +``` + +### to_string_cache_and_form_token_test + +```erlang +to_string_cache_and_form_token_test() -> + assert_set(to_string_cache_and_form_token, fun to_string/1). +``` + +### to_string_token_and_reactions_test + +```erlang +to_string_token_and_reactions_test() -> + assert_set(to_string_token_and_reactions, fun to_string/1). +``` + +### to_string_error_log_and_auth_token_test + +```erlang +to_string_error_log_and_auth_token_test() -> + assert_set(to_string_error_log_and_auth_token, fun to_string/1). +``` + +### to_string_csrf_and_quick_setting_test + +```erlang +to_string_csrf_and_quick_setting_test() -> + assert_set(to_string_csrf_and_quick_setting, fun to_string/1). +``` + +### to_string_admin_and_upload_test + +```erlang +to_string_admin_and_upload_test() -> + assert_set(to_string_admin_and_upload, fun to_string/1). +``` + +### to_string_search_and_tags_test + +```erlang +to_string_search_and_tags_test() -> +``` + +--- + +*Generated from [dev_codec_cookie_test_vectors.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie_test_vectors.erl)* diff --git a/docs/book/src/dev_codec_flat.erl.md b/docs/book/src/dev_codec_flat.erl.md new file mode 100644 index 000000000..4bb41a9a6 --- /dev/null +++ b/docs/book/src/dev_codec_flat.erl.md @@ -0,0 +1,266 @@ +# dev_codec_flat + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_flat.erl) + +A codec for turning TABMs into/from flat Erlang maps that have +(potentially multi-layer) paths as their keys, and a normal TABM binary as +their value. + +--- + +## Exported Functions + +- `commit/3` +- `deserialize/1` +- `from/3` +- `serialize/1` +- `serialize/2` +- `to/3` +- `verify/3` + +--- + +### commit + +A codec for turning TABMs into/from flat Erlang maps that have + +```erlang +commit(Msg, Req, Opts) -> dev_codec_httpsig:commit(Msg, Req, Opts). +``` + +### verify + +A codec for turning TABMs into/from flat Erlang maps that have +Convert a flat map to a TABM. + +```erlang +verify(Msg, Req, Opts) -> dev_codec_httpsig:verify(Msg, Req, Opts). +``` + +### from + +A codec for turning TABMs into/from flat Erlang maps that have +Convert a flat map to a TABM. + +```erlang +from(Bin, _, _Opts) when is_binary(Bin) -> {ok, Bin}; +``` + +### from + +A codec for turning TABMs into/from flat Erlang maps that have +Convert a flat map to a TABM. + +```erlang +from(Map, Req, Opts) when is_map(Map) -> + {ok, + maps:fold( + fun(Path, Value, Acc) -> + case Value of + [] -> + ?event(error, + {empty_list_value, + {path, Path}, + {value, Value}, + {map, Map} + } + ); + _ -> + ok + end, + hb_util:deep_set( + hb_path:term_to_path_parts(Path, Opts), + hb_util:ok(from(Value, Req, Opts)), + Acc, + Opts + ) + end, + #{}, + Map + ) + }. +``` + +### to + +Convert a TABM to a flat map. + +```erlang +to(Bin, _, _Opts) when is_binary(Bin) -> {ok, Bin}; +``` + +### to + +Convert a TABM to a flat map. + +```erlang +to(Map, Req, Opts) when is_map(Map) -> + Res = + maps:fold( + fun(Key, Value, Acc) -> + case to(Value, Req, Opts) of + {ok, SubMap} when is_map(SubMap) -> + maps:fold( + fun(SubKey, SubValue, InnerAcc) -> + maps:put( + hb_path:to_binary([Key, SubKey]), + SubValue, + InnerAcc + ) + end, + Acc, + SubMap + ); + {ok, SimpleValue} -> + maps:put(hb_path:to_binary([Key]), SimpleValue, Acc) + end + end, + #{}, + Map + ), + {ok, Res}. +``` + +### serialize + +```erlang +serialize(Map) when is_map(Map) -> + serialize(Map, #{}). +``` + +### serialize + +```erlang +serialize(Map, Opts) when is_map(Map) -> + Flattened = hb_message:convert(Map, <<"flat@1.0">>, #{}), + {ok, + iolist_to_binary(lists:foldl( + fun(Key, Acc) -> + [ + Acc, + hb_path:to_binary(Key), + <<": ">>, + hb_maps:get(Key, Flattened, Opts), <<"\n">> + ] + end, + <<>>, + hb_util:to_sorted_keys(Flattened, Opts) + ) + ) + }. +``` + +### deserialize + +```erlang +deserialize(Bin) when is_binary(Bin) -> + Flat = lists:foldl( + fun(Line, Acc) -> + case binary:split(Line, <<": ">>, [global]) of + [Key, Value] -> + Acc#{ Key => Value }; + _ -> + Acc + end + end, + #{}, + binary:split(Bin, <<"\n">>, [global]) + ), + {ok, hb_message:convert(Flat, <<"structured@1.0">>, <<"flat@1.0">>, #{})}. +%%% Tests +``` + +### simple_conversion_test + +```erlang +simple_conversion_test() -> + Flat = #{[<<"a">>] => <<"value">>}, + Nested = #{<<"a">> => <<"value">>}, + ?assert(hb_message:match(Nested, hb_util:ok(dev_codec_flat:from(Flat, #{}, #{})))), + ?assert(hb_message:match(Flat, hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})))). +``` + +### nested_conversion_test + +```erlang +nested_conversion_test() -> + Flat = #{<<"a/b">> => <<"value">>}, + Nested = #{<<"a">> => #{<<"b">> => <<"value">>}}, + Unflattened = hb_util:ok(dev_codec_flat:from(Flat, #{}, #{})), + Flattened = hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})), + ?assert(hb_message:match(Nested, Unflattened)), + ?assert(hb_message:match(Flat, Flattened)). +``` + +### multiple_paths_test + +```erlang +multiple_paths_test() -> + Flat = #{ + <<"x/y">> => <<"1">>, + <<"x/z">> => <<"2">>, + <<"a">> => <<"3">> + }, + Nested = #{ + <<"x">> => #{ + <<"y">> => <<"1">>, + <<"z">> => <<"2">> + }, + <<"a">> => <<"3">> + }, + ?assert(hb_message:match(Nested, hb_util:ok(dev_codec_flat:from(Flat, #{}, #{})))), + ?assert(hb_message:match(Flat, hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})))). +``` + +### path_list_test + +```erlang +path_list_test() -> + Nested = #{ + <<"x">> => #{ + [<<"y">>, <<"z">>] => #{ + <<"a">> => <<"2">> + }, + <<"a">> => <<"2">> + } + }, + Flat = hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})), + lists:foreach( + fun(Key) -> + ?assert(not lists:member($\n, binary_to_list(Key))) + end, + hb_maps:keys(Flat, #{}) + ). +``` + +### binary_passthrough_test + +```erlang +binary_passthrough_test() -> + Bin = <<"raw binary">>, + ?assertEqual(Bin, hb_util:ok(dev_codec_flat:from(Bin, #{}, #{}))), + ?assertEqual(Bin, hb_util:ok(dev_codec_flat:to(Bin, #{}, #{}))). +``` + +### deep_nesting_test + +```erlang +deep_nesting_test() -> + Flat = #{<<"a/b/c/d">> => <<"deep">>}, + Nested = #{<<"a">> => #{<<"b">> => #{<<"c">> => #{<<"d">> => <<"deep">>}}}}, + Unflattened = hb_util:ok(dev_codec_flat:from(Flat, #{}, #{})), + Flattened = hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})), + ?assert(hb_message:match(Nested, Unflattened)), + ?assert(hb_message:match(Flat, Flattened)). +``` + +### empty_map_test + +```erlang +empty_map_test() -> + ?assertEqual(#{}, hb_util:ok(dev_codec_flat:from(#{}, #{}, #{}))), +``` + +--- + +*Generated from [dev_codec_flat.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_flat.erl)* diff --git a/docs/book/src/dev_codec_http_auth.erl.md b/docs/book/src/dev_codec_http_auth.erl.md new file mode 100644 index 000000000..50b677225 --- /dev/null +++ b/docs/book/src/dev_codec_http_auth.erl.md @@ -0,0 +1,210 @@ +# dev_codec_http_auth + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_http_auth.erl) + +Implements a two-step authentication process for HTTP requests, using +the `Basic` authentication scheme. This device is a viable implementation +of the `generator` interface type employed by `~auth-hook@1.0`, as well as +the `~message@1.0` commitment scheme interface. +`http-auth@1.0``s `commit` and `verify` keys proxy to the `~httpsig@1.0` +secret key HMAC commitment scheme, utilizing a secret key derived from the +user's authentication information. Callers may also utilize the `generate` +key directly to derive entropy from HTTP Authorization headers provided by +the user. If no Authorization header is provided, the `generate` key will +return a `401 Unauthorized` response, which triggers a recipient`s browser +to prompt the user for authentication details and resend the request. +The `generate` key derives secrets for it's users by calling PBKDF2 with +the user's authentication information. The parameters for the PBKDF2 +algorithm are configurable, and can be specified in the request message: +
+  salt:       The salt to use for the PBKDF2 algorithm. Defaults to
+              `sha256("constant:ao")`.
+  iterations: The number of iterations to use for the PBKDF2 algorithm.
+              Defaults to `1,200,000`.
+  alg:        The hashing algorithm to use with PBKDF2. Defaults to
+              `sha256`.
+  key-length: The length of the key to derive from PBKDF2. Defaults to
+              `64`.
+
+The default iteration count was chosen at two times the recommendation of +OWASP in 2023 (600,000), and executes at a run rate of ~5-10 key derivations +per second on modern CPU hardware. Additionally, the default salt was chosen +such that it is a public constant (needed in order for reproducibility +between nodes), and hashed in order to provide additional entropy, in +alignment with RFC 8018, Section 4.1. + +--- + +## Exported Functions + +- `commit/3` +- `generate/3` +- `verify/3` + +--- + +### commit + +Implements a two-step authentication process for HTTP requests, using +The default salt to use for the PBKDF2 algorithm. This value must be +Generate or extract a new secret and commit to the message with the + +```erlang +commit(Base, Req, Opts) -> + case generate(Base, Req, Opts) of + {ok, Key} -> + {ok, CommitRes} = + dev_codec_httpsig_proxy:commit( + <<"http-auth@1.0">>, + Key, + Base, + Req, + Opts + ), + ?event({commit_result, CommitRes}), + {ok, CommitRes}; + {error, Err} -> + {error, Err} + end. +``` + +### verify + +Verify a given `Base` message with a derived `Key` using the + +```erlang +verify(Base, RawReq, Opts) -> + ?event({verify_invoked, {base, Base}, {req, RawReq}}), + {ok, Key} = generate(Base, RawReq, Opts), + ?event({verify_found_key, {key, Key}, {base, Base}, {req, RawReq}}), + {ok, VerifyRes} = + dev_codec_httpsig_proxy:verify( + Key, + Base, + RawReq, + Opts + ), + ?event({verify_result, VerifyRes}), + {ok, VerifyRes}. +``` + +### generate + +Collect authentication information from the client. If the `raw` flag + +```erlang +generate(_Msg, ReqLink, Opts) when ?IS_LINK(ReqLink) -> + generate(_Msg, hb_cache:ensure_loaded(ReqLink, Opts), Opts); +``` + +### generate + +Collect authentication information from the client. If the `raw` flag + +```erlang +generate(_Msg, #{ <<"secret">> := Secret }, _Opts) -> + {ok, Secret}; +``` + +### generate + +Collect authentication information from the client. If the `raw` flag + +```erlang +generate(_Msg, Req, Opts) -> + case hb_maps:get(<<"authorization">>, Req, undefined, Opts) of + <<"Basic ", Auth/binary>> -> + Decoded = base64:decode(Auth), + ?event(key_gen, {generated_key, {auth, Auth}, {decoded, Decoded}}), + case hb_maps:get(<<"raw">>, Req, false, Opts) of + true -> {ok, Decoded}; + false -> derive_key(Decoded, Req, Opts) + end; + undefined -> + {error, + #{ + <<"status">> => 401, + <<"www-authenticate">> => <<"Basic">>, + <<"details">> => <<"No authorization header provided.">> + } + }; + Unrecognized -> + {error, + #{ + <<"status">> => 400, + <<"details">> => + <<"Unrecognized authorization header: ", Unrecognized/binary>> + } + } + end. +``` + +### derive_key + +Derive a key from the authentication information using the PBKDF2 + +```erlang +derive_key(Decoded, Req, Opts) -> + Alg = hb_util:atom(hb_maps:get(<<"alg">>, Req, <<"sha256">>, Opts)), + Salt = + hb_maps:get( + <<"salt">>, + Req, + hb_crypto:sha256(?DEFAULT_SALT), + Opts + ), + Iterations = hb_maps:get(<<"iterations">>, Req, 2 * 600_000, Opts), + KeyLength = hb_maps:get(<<"key-length">>, Req, 64, Opts), + ?event(key_gen, + {derive_key, + {alg, Alg}, + {salt, Salt}, + {iterations, Iterations}, + {key_length, KeyLength} + } + ), + case hb_crypto:pbkdf2(Alg, Decoded, Salt, Iterations, KeyLength) of + {ok, Key} -> + EncodedKey = hb_util:encode(Key), + {ok, EncodedKey}; + {error, Err} -> + ?event(key_gen, + {pbkdf2_error, + {alg, Alg}, + {salt, Salt}, + {iterations, Iterations}, + {key_length, KeyLength}, + {error, Err} + } + ), + {error, + #{ + <<"status">> => 500, + <<"details">> => <<"Failed to derive key.">> + } + } + end. +``` + +### benchmark_pbkdf2_test + +```erlang +benchmark_pbkdf2_test() -> + Key = crypto:strong_rand_bytes(32), + Iterations = 2 * 600_000, + KeyLength = 32, + Derivations = + hb_test_utils:benchmark( + fun() -> + hb_crypto:pbkdf2(sha256, Key, <<"salt">>, Iterations, KeyLength) + end + ), + hb_test_utils:benchmark_print( + <<"Derived">>, + <<"keys (1.2m iterations each)">>, + Derivations +``` + +--- + +*Generated from [dev_codec_http_auth.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_http_auth.erl)* diff --git a/docs/book/src/dev_codec_httpsig.erl.md b/docs/book/src/dev_codec_httpsig.erl.md new file mode 100644 index 000000000..ae5c48ab9 --- /dev/null +++ b/docs/book/src/dev_codec_httpsig.erl.md @@ -0,0 +1,447 @@ +# dev_codec_httpsig + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig.erl) + +This module implements HTTP Message Signatures as described in RFC-9421 +(https://datatracker.ietf.org/doc/html/rfc9421), as an AO-Core device. +It implements the codec standard (from/1, to/1), as well as the optional +commitment functions (id/3, sign/3, verify/3). The commitment functions +are found in this module, while the codec functions are relayed to the +`dev_codec_httpsig_conv` module. + +--- + +## Exported Functions + +- `add_content_digest/2` +- `commit/3` +- `from/3` +- `normalize_for_encoding/3` +- `serialize/2` +- `serialize/3` +- `to/3` +- `verify/3` + +--- + +### to + +This module implements HTTP Message Signatures as described in RFC-9421 + +```erlang +to(Msg, Req, Opts) -> dev_codec_httpsig_conv:to(Msg, Req, Opts). +``` + +### from + +This module implements HTTP Message Signatures as described in RFC-9421 +Generate the `Opts` to use during AO-Core operations in the codec. + +```erlang +from(Msg, Req, Opts) -> dev_codec_httpsig_conv:from(Msg, Req, Opts). +``` + +### opts + +This module implements HTTP Message Signatures as described in RFC-9421 +Generate the `Opts` to use during AO-Core operations in the codec. + +```erlang +opts(RawOpts) -> + RawOpts#{ + hashpath => ignore, + cache_control => [<<"no-cache">>, <<"no-store">>], + force_message => false + }. +``` + +### serialize + +A helper utility for creating a direct encoding of a HTTPSig message. + +```erlang +serialize(Msg, Opts) -> serialize(Msg, #{}, Opts). +``` + +### serialize + +A helper utility for creating a direct encoding of a HTTPSig message. + +```erlang +serialize(Msg, #{ <<"format">> := <<"components">> }, Opts) -> + % Convert to HTTPSig via TABM through calling `hb_message:convert` rather + % than executing `to/3` directly. This ensures that our responses are + % normalized. +``` + +### serialize + +```erlang +serialize(Msg, _Req, Opts) -> + % We assume the default format of `binary` if none of the prior clauses + % match. +``` + +### verify + +```erlang +verify(Base, Req, RawOpts) -> + % A rsa-pss-sha512 commitment is verified by regenerating the signature + % base and validating against the signature. +``` + +### commit + +Commit to a message using the HTTP-Signature format. We use the `type` + +```erlang +commit(Msg, Req = #{ <<"type">> := <<"unsigned">> }, Opts) -> + commit(Msg, Req#{ <<"type">> => <<"hmac-sha256">> }, Opts); +``` + +### commit + +Commit to a message using the HTTP-Signature format. We use the `type` + +```erlang +commit(Msg, Req = #{ <<"type">> := <<"signed">> }, Opts) -> + commit(Msg, Req#{ <<"type">> => <<"rsa-pss-sha512">> }, Opts); +``` + +### commit + +Commit to a message using the HTTP-Signature format. We use the `type` + +```erlang +commit(MsgToSign, Req = #{ <<"type">> := <<"rsa-pss-sha512">> }, RawOpts) -> + ?event( + {generating_rsa_pss_sha512_commitment, {msg, MsgToSign}, {req, Req}} + ), + Opts = opts(RawOpts), + Wallet = hb_opts:get(priv_wallet, no_viable_wallet, Opts), + if Wallet =:= no_viable_wallet -> + throw({cannot_commit, no_viable_wallet, MsgToSign}); + true -> + ok + end, + % Utilize the hashpath, if present, as the tag for the commitment. +``` + +### commit + +```erlang +commit(BaseMsg, Req = #{ <<"type">> := <<"hmac-sha256">> }, RawOpts) -> + % Extract the key material from the request. +``` + +### maybe_bundle_tag_commitment + +Annotate the commitment with the `bundle` key if the request contains + +```erlang +maybe_bundle_tag_commitment(Commitment, Req, _Opts) -> + case hb_util:atom(maps:get(<<"bundle">>, Req, false)) of + true -> Commitment#{ <<"bundle">> => <<"true">> }; + false -> Commitment + end. +``` + +### keys_to_commit + +Derive the set of keys to commit to from a `commit` request and a + +```erlang +keys_to_commit(_Base, #{ <<"committed">> := Explicit}, _Opts) -> + % Case 1: Explicitly provided keys to commit. +``` + +### keys_to_commit + +```erlang +keys_to_commit(Base, _Req, Opts) -> + % Extract the set of committed keys from the message. +``` + +### add_content_digest + +If the `body` key is present and a binary, replace it with a + +```erlang +add_content_digest(Msg, _Opts) -> + case maps:get(<<"body">>, Msg, not_found) of + Body when is_binary(Body) -> + % Remove the body from the message and add the content-digest, + % encoded as a structured field. +``` + +### normalize_for_encoding + +Given a base message and a commitment, derive the message and commitment + +```erlang +normalize_for_encoding(Msg, Commitment, Opts) -> + % Extract the requested keys to include in the signature base. +``` + +### key_present + +Calculate if a key or its `+link` TABM variant is present in a message. +create the signature base that will be signed in order to create the + +```erlang +key_present(Key, Msg) -> + NormalizedKey = hb_ao:normalize_key(Key), + maps:is_key(NormalizedKey, Msg) + orelse maps:is_key(<>, Msg). +%% +``` + +### signature_base + +Calculate if a key or its `+link` TABM variant is present in a message. +create the signature base that will be signed in order to create the + +```erlang +signature_base(EncodedMsg, Commitment, Opts) -> + ComponentsLines = + signature_components_line( + EncodedMsg, + Commitment, + Opts + ), + ?event({component_identifiers_for_sig_base, ComponentsLines}), + ParamsLine = signature_params_line(Commitment, Opts), + SignatureBase = + << + ComponentsLines/binary, "\n", + "\"@signature-params\": ", ParamsLine/binary + >>, + ?event(signature_base, {signature_base, {string, SignatureBase}}), + SignatureBase. +``` + +### signature_components_line + +Given a list of Component Identifiers and a Request/Response Message +construct the "signature-params-line" part of the signature base. + +```erlang +signature_components_line(Req, Commitment, _Opts) -> + ComponentsLines = + lists:map( + fun(Name) -> + case maps:get(Name, Req, not_found) of + not_found -> + throw( + { + missing_key_for_signature_component_line, + Name, + {message, Req}, + {commitment, Commitment} + } + ); + Value -> + << <<"\"">>/binary, Name/binary, <<"\"">>/binary, <<": ">>/binary, Value/binary>> + end + end, + maps:get(<<"committed">>, Commitment) + ), + iolist_to_binary(lists:join(<<"\n">>, ComponentsLines)). +%% +``` + +### signature_params_line + +Given a list of Component Identifiers and a Request/Response Message +construct the "signature-params-line" part of the signature base. + +```erlang +signature_params_line(RawCommitment, Opts) -> + Commitment = + maps:without( + [<<"signature">>, <<"signature-input">>], + RawCommitment + ), + ?event(debug_enc, {signature_params_line, {commitment, Commitment}}), + hb_util:bin( + hb_structured_fields:list( + [ + { + list, + lists:map( + fun(Key) -> {item, {string, Key}, []} end, + dev_codec_httpsig_siginfo:add_derived_specifiers( + hb_util:message_to_ordered_list( + maps:get(<<"committed">>, Commitment), + Opts + ) + ) + ), + lists:map( + fun ({<<"alg">>, Param}) when is_binary(Param) -> + {<<"alg">>, {string, Param}}; + ({Name, Param}) when is_binary(Param) -> + {Name, {string, Param}}; + ({Name, Param}) when is_integer(Param) -> + {Name, Param} + end, + lists:sort(maps:to_list( + maps:with( + [ + <<"created">>, + <<"expires">>, + <<"nonce">>, + <<"alg">>, + <<"keyid">>, + <<"tag">>, + <<"bundle">> + ], + Commitment#{ <<"alg">> => maps:get(<<"type">>, Commitment) } + ) + )) + ) + } + ] + ) + ). +``` + +### validate_large_message_from_http_test + +Ensure that we can validate a signature on an extremely large and complex + +```erlang +validate_large_message_from_http_test() -> + Node = hb_http_server:start_node(Opts = #{ + force_signed => true, + commitment_device => <<"httpsig@1.0">>, + extra => + [ + [ + [ + #{ + <<"n">> => N, + <<"m">> => M, + <<"o">> => O + } + || + O <- lists:seq(1, 3) + ] + || + M <- lists:seq(1, 3) + ] + || + N <- lists:seq(1, 3) + ] + }), + {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), + Signers = hb_message:signers(Res, Opts), + ?event({received, {signers, Signers}, {res, Res}}), + ?assert(length(Signers) == 1), + ?assert(hb_message:verify(Res, Signers, Opts)), + ?event({sig_verifies, Signers}), + ?assert(hb_message:verify(Res, all, Opts)), + ?event({hmac_verifies, <<"hmac-sha256">>}), + {ok, OnlyCommitted} = hb_message:with_only_committed(Res, Opts), + ?event({msg_with_only_committed, OnlyCommitted}), + ?assert(hb_message:verify(OnlyCommitted, Signers, Opts)), + ?event({msg_with_only_committed_verifies, Signers}), + ?assert(hb_message:verify(OnlyCommitted, all, Opts)), + ?event({msg_with_only_committed_verifies_hmac, <<"hmac-sha256">>}). +``` + +### committed_id_test + +Ensure that we can validate a signature on an extremely large and complex + +```erlang +committed_id_test() -> + Msg = #{ <<"basic">> => <<"value">> }, + Signed = hb_message:commit(Msg, hb:wallet()), + ?assert(hb_message:verify(Signed, all, #{})), + ?event({signed_msg, Signed}), + UnsignedID = hb_message:id(Signed, none), + SignedID = hb_message:id(Signed, all), + ?event({ids, {unsigned_id, UnsignedID}, {signed_id, SignedID}}), + ?assertNotEqual(UnsignedID, SignedID). +``` + +### commit_secret_key_test + +```erlang +commit_secret_key_test() -> + Msg = #{ <<"basic">> => <<"value">> }, + CommittedMsg = + hb_message:commit( + Msg, + #{}, + #{ + <<"type">> => <<"hmac-sha256">>, + <<"secret">> => <<"test-secret">>, + <<"commitment-device">> => <<"httpsig@1.0">>, + <<"scheme">> => <<"secret">> + } + ), + ?event({committed_msg, CommittedMsg}), + Committers = hb_message:signers(CommittedMsg, #{}), + ?assert(length(Committers) == 1), + ?event({committers, Committers}), + ?assert( + hb_message:verify( + CommittedMsg, + #{ <<"committers">> => Committers, <<"secret">> => <<"test-secret">> }, + #{} + ) + ), + ?assertNot( + hb_message:verify( + CommittedMsg, + #{ <<"committers">> => Committers, <<"secret">> => <<"bad-secret">> }, + #{} + ) + ). +``` + +### multicommitted_id_test + +```erlang +multicommitted_id_test() -> + Msg = #{ <<"basic">> => <<"value">> }, + Signed1 = hb_message:commit(Msg, Wallet1 = ar_wallet:new()), + Signed2 = hb_message:commit(Signed1, Wallet2 = ar_wallet:new()), + Addr1 = hb_util:human_id(ar_wallet:to_address(Wallet1)), + Addr2 = hb_util:human_id(ar_wallet:to_address(Wallet2)), + ?event({signed_msg, Signed2}), + UnsignedID = hb_message:id(Signed2, none), + SignedID = hb_message:id(Signed2, all), + ?event({ids, {unsigned_id, UnsignedID}, {signed_id, SignedID}}), + ?assertNotEqual(UnsignedID, SignedID), + ?assert(hb_message:verify(Signed2, [])), + ?assert(hb_message:verify(Signed2, [Addr1])), + ?assert(hb_message:verify(Signed2, [Addr2])), + ?assert(hb_message:verify(Signed2, [Addr1, Addr2])), + ?assert(hb_message:verify(Signed2, [Addr2, Addr1])), + ?assert(hb_message:verify(Signed2, all)). +``` + +### sign_and_verify_link_test + +Test that we can sign and verify a message with a link. We use + +```erlang +sign_and_verify_link_test() -> + Msg = #{ + <<"normal">> => <<"typical-value">>, + <<"untyped">> => #{ <<"inner-untyped">> => <<"inner-value">> }, + <<"typed">> => #{ <<"inner-typed">> => 123 } + }, + NormMsg = hb_message:convert(Msg, <<"structured@1.0">>, #{}), + ?event({msg, NormMsg}), + Signed = hb_message:commit(NormMsg, hb:wallet()), + ?event({signed_msg, Signed}), + ?assert(hb_message:verify(Signed)). +``` + +--- + +*Generated from [dev_codec_httpsig.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig.erl)* diff --git a/docs/book/src/dev_codec_httpsig_conv.erl.md b/docs/book/src/dev_codec_httpsig_conv.erl.md new file mode 100644 index 000000000..26c4d2130 --- /dev/null +++ b/docs/book/src/dev_codec_httpsig_conv.erl.md @@ -0,0 +1,564 @@ +# dev_codec_httpsig_conv + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_conv.erl) + +A codec that marshals TABM encoded messages to and from the "HTTP" +message structure. +Every HTTP message is an HTTP multipart message. +See https://datatracker.ietf.org/doc/html/rfc7578 +For each TABM Key: +The Key/Value Pair will be encoded according to the following rules: + "signatures" -> {SignatureInput, Signature} header Tuples, each encoded + as a Structured Field Dictionary + "body" -> + - if a map, then recursively encode as its own HyperBEAM message + - otherwise encode as a normal field + _ -> encode as a normal field +Each field will be mapped to the HTTP Message according to the following +rules: + "body" -> always encoded part of the body as with Content-Disposition + type of "inline" + _ -> + - If the byte size of the value is less than the ?MAX_TAG_VALUE, + then encode as a header, also attempting to encode as a + structured field. + - Otherwise encode the value as a part in the multipart response + +--- + +## Exported Functions + +- `encode_http_msg/2` +- `from/3` +- `to/3` + +--- + +### from + +A codec that marshals TABM encoded messages to and from the "HTTP" +Convert a HTTP Message into a TABM. + +```erlang +from(Bin, _Req, _Opts) when is_binary(Bin) -> {ok, Bin}; +``` + +### from + +A codec that marshals TABM encoded messages to and from the "HTTP" +Convert a HTTP Message into a TABM. + +```erlang +from(Link, _Req, _Opts) when ?IS_LINK(Link) -> {ok, Link}; +``` + +### from + +A codec that marshals TABM encoded messages to and from the "HTTP" +Convert a HTTP Message into a TABM. + +```erlang +from(HTTP, _Req, Opts) -> + % First, parse all headers excluding the signature-related headers, as they + % are handled separately. +``` + +### body_to_tabm + +Generate the body TABM from the `body` key of the encoded message. + +```erlang +body_to_tabm(HTTP, Opts) -> + % Extract the body and content-type from the HTTP message. +``` + +### body_to_parts + +Split the body into parts, if it is a multipart. + +```erlang +body_to_parts(_ContentType, no_body, _Opts) -> no_body; +``` + +### body_to_parts + +Split the body into parts, if it is a multipart. + +```erlang +body_to_parts(ContentType, Body, _Opts) -> + ?event( + {from_body, + {content_type, {explicit, ContentType}}, + {body, Body} + } + ), + Params = + case ContentType of + undefined -> []; + _ -> + {item, {_, _XT}, XParams} = + hb_structured_fields:parse_item(ContentType), + XParams + end, + case lists:keyfind(<<"boundary">>, 1, Params) of + false -> + % The body is not a multipart, so just set as is to the Inlined key on + % the TABM. +``` + +### from_body_part + +Parse a single part of a multipart body into a TABM. + +```erlang +from_body_part(InlinedKey, Part, Opts) -> + % Extract the Headers block and Body. Only split on the FIRST double CRLF + {RawHeadersBlock, RawBody} = + case binary:split(Part, [?DOUBLE_CRLF], []) of + [XRawHeadersBlock] -> + % The message has no body. +``` + +### to + +Convert a TABM into an HTTP Message. The HTTP Message is a simple Erlang Map + +```erlang +to(TABM, Req, Opts) -> to(TABM, Req, [], Opts). +``` + +### to + +Convert a TABM into an HTTP Message. The HTTP Message is a simple Erlang Map + +```erlang +to(Bin, _Req, _FormatOpts, _Opts) when is_binary(Bin) -> {ok, Bin}; +``` + +### to + +Convert a TABM into an HTTP Message. The HTTP Message is a simple Erlang Map + +```erlang +to(Link, _Req, _FormatOpts, _Opts) when ?IS_LINK(Link) -> {ok, Link}; +``` + +### to + +Convert a TABM into an HTTP Message. The HTTP Message is a simple Erlang Map + +```erlang +to(TABM, Req = #{ <<"index">> := true }, _FormatOpts, Opts) -> + % If the caller has specified that an `index` page is requested, we: + % 1. Convert the message to HTTPSig as usual. +``` + +### to + +```erlang +to(TABM, Req, FormatOpts, Opts) when is_map(TABM) -> + % Ensure that the material for the message is loaded, if the request is + % asking for a bundle. +``` + +### do_to + +```erlang +do_to(Binary, _FormatOpts, _Opts) when is_binary(Binary) -> Binary; +``` + +### do_to + +```erlang +do_to(TABM, FormatOpts, Opts) when is_map(TABM) -> + InlineKey = + case lists:keyfind(inline, 1, FormatOpts) of + {inline, _InlineFieldHdrs, Key} -> Key; + _ -> not_set + end, + % Calculate the initial encoding from the TABM + Enc0 = + maps:fold( + fun(<<"body">>, Value, AccMap) -> + OldBody = maps:get(<<"body">>, AccMap, #{}), + AccMap#{ <<"body">> => OldBody#{ <<"body">> => Value } }; + (Key, Value, AccMap) when Key =:= InlineKey andalso InlineKey =/= not_set -> + OldBody = maps:get(<<"body">>, AccMap, #{}), + AccMap#{ <<"body">> => OldBody#{ InlineKey => Value } }; + (Key, Value, AccMap) -> + field_to_http(AccMap, {Key, Value}, #{}) + end, + % Add any inline field denotations to the HTTP message + case lists:keyfind(inline, 1, FormatOpts) of + {inline, InlineFieldHdrs, _InlineKey} -> InlineFieldHdrs; + _ -> #{} + end, + maps:without([<<"priv">>], TABM) + ), + ?event({prepared_body_map, {msg, Enc0}}), + BodyMap = maps:get(<<"body">>, Enc0, #{}), + GroupedBodyMap = group_maps(BodyMap, <<>>, #{}, Opts), + Enc1 = + case GroupedBodyMap of + EmptyBody when map_size(EmptyBody) =:= 0 -> + % If the body map is empty, then simply set the body to be a + % corresponding empty binary. +``` + +### group_ids + +Group all elements with: + +```erlang +group_ids(Map) -> + % Find all keys that are IDs. +``` + +### ungroup_ids + +Decode the `ao-ids` key into a map. + +```erlang +ungroup_ids(Msg = #{ <<"ao-ids">> := IDBin }, Opts) -> + % Extract the ID binary from the Map + EncodedIDsMap = hb_structured_fields:parse_dictionary(IDBin), + % Convert the value back into a raw binary + IDsMap = + lists:map( + fun({K, {item, {string, Bin}, _}}) -> {K, Bin} end, + EncodedIDsMap + ), + % Add the decoded IDs to the Map and remove the `ao-ids' key + hb_maps:merge(hb_maps:without([<<"ao-ids">>], Msg, Opts), hb_maps:from_list(IDsMap), Opts); +``` + +### ungroup_ids + +Decode the `ao-ids` key into a map. +Merge maps at the same level, if possible. + +```erlang +ungroup_ids(Msg, _Opts) -> Msg. +``` + +### group_maps + +Decode the `ao-ids` key into a map. +Merge maps at the same level, if possible. + +```erlang +group_maps(Map) -> + group_maps(Map, <<>>, #{}, #{}). +``` + +### group_maps + +```erlang +group_maps(Map, Parent, Top, Opts) when is_map(Map) -> + ?event({group_maps, {map, Map}, {parent, Parent}, {top, Top}}), + {Flattened, NewTop} = hb_maps:fold( + fun(Key, Value, {CurMap, CurTop}) -> + ?event({group_maps, {key, Key}, {value, Value}}), + NormKey = hb_ao:normalize_key(Key), + FlatK = + case Parent of + <<>> -> NormKey; + _ -> <> + end, + case Value of + _ when is_map(Value) orelse is_list(Value) -> + NormMsg = + if is_list(Value) -> + hb_message:convert( + Value, + tabm, + <<"structured@1.0">>, + Opts + ); + true -> + Value + end, + case hb_maps:size(NormMsg, Opts) of + 0 -> + { + CurMap, + hb_maps:put( + FlatK, + #{ <<"ao-types">> => <<"empty-message">> }, + CurTop, + Opts + ) + }; + _ -> + NewTop = group_maps(NormMsg, FlatK, CurTop, Opts), + {CurMap, NewTop} + end; + _ -> + ?event({group_maps, {norm_key, NormKey}, {value, Value}}), + case byte_size(Value) > ?MAX_HEADER_LENGTH of + % the value is too large to be encoded as a header + % within a part, so instead lift it to be a top level + % part + true -> + NewTop = hb_maps:put(FlatK, Value, CurTop, Opts), + {CurMap, NewTop}; + % Encode the value in the current part + false -> + NewCurMap = hb_maps:put(NormKey, Value, CurMap, Opts), + {NewCurMap, CurTop} + end + end + end, + {#{}, Top}, + Map, + Opts + ), + case hb_maps:size(Flattened, Opts) of + 0 -> NewTop; + _ -> case Parent of + <<>> -> hb_maps:merge(NewTop, Flattened, Opts); + _ -> + Res = NewTop#{ Parent => Flattened }, + ?event({returning_res, {res, Res}}), + Res + end + end. +``` + +### boundary_from_parts + +Generate a unique, reproducible boundary for the + +```erlang +boundary_from_parts(PartList) -> + BodyBin = + iolist_to_binary( + lists:join(?CRLF, + lists:map( + fun ({_PartName, PartBin}) -> PartBin end, + PartList + ) + ) + ), + RawBoundary = crypto:hash(sha256, BodyBin), + hb_util:encode(RawBoundary). +``` + +### encode_body_part + +Encode a multipart body part to a flat binary. + +```erlang +encode_body_part(PartName, BodyPart, InlineKey, Opts) -> + % We'll need to prepend a Content-Disposition header + % to the part, using the field name as the form part + % name. +``` + +### inline_key + +given a message, returns a binary tuple: + +```erlang +inline_key(Msg) -> + inline_key(Msg, #{}). +``` + +### inline_key + +```erlang +inline_key(Msg, Opts) -> + % The message can name a key whose value will be placed in the body as the + % inline part. Otherwise, the Msg <<"body">> is used. If not present, the + % Msg <<"data">> is used. + InlineBodyKey = hb_maps:get(<<"ao-body-key">>, Msg, false, Opts), + ?event({inlined, InlineBodyKey}), + case { + InlineBodyKey, + hb_maps:is_key(<<"body">>, Msg, Opts) + andalso not ?IS_LINK(maps:get(<<"body">>, Msg, Opts)), + hb_maps:is_key(<<"data">>, Msg, Opts) + andalso not ?IS_LINK(maps:get(<<"data">>, Msg, Opts)) + } of + % ao-body-key already exists, so no need to add one + {Explicit, _, _} when Explicit =/= false -> {#{}, InlineBodyKey}; + % ao-body-key defaults to <<"body">> (see below) + % So no need to add one + {_, true, _} -> {#{}, <<"body">>}; + % We need to preserve the ao-body-key, as the <<"data">> field, + % so that it is preserved during encoding and decoding + {_, _, true} -> {#{<<"ao-body-key">> => <<"data">>}, <<"data">>}; + % default to body being the inlined part. +``` + +### encode_http_msg + +Encode a HTTP message into a binary, converting it to `httpsig@1.0` + +```erlang +encode_http_msg(Msg, Opts) -> + % Convert the message to a HTTP-Sig encoded output. +``` + +### encode_http_flat_msg + +Encode a HTTP message into a binary. The input *must* be a raw map of + +```erlang +encode_http_flat_msg(Httpsig, Opts) -> + % Serialize the headers, to be included in the part of the multipart response + HeaderList = + lists:foldl( + fun ({HeaderName, RawHeaderVal}, Acc) -> + HVal = hb_cache:ensure_loaded(RawHeaderVal, Opts), + ?event({encoding_http_header, {header, HeaderName}, {value, HVal}}), + [<> | Acc] + end, + [], + hb_maps:to_list(hb_maps:without([<<"body">>, <<"priv">>], Httpsig, Opts), Opts) + ), + EncodedHeaders = iolist_to_binary(lists:join(?CRLF, lists:reverse(HeaderList))), + case hb_maps:get(<<"body">>, Httpsig, <<>>, Opts) of + <<>> -> EncodedHeaders; + % Some-Headers: some-value + % content-type: image/png + % + % + SubBody -> <> + end. +``` + +### field_to_http + +All maps are encoded into the body of the HTTP message + +```erlang +field_to_http(Httpsig, {Name, Value}, Opts) when is_map(Value) -> + NormalizedName = hb_ao:normalize_key(Name), + OldBody = hb_maps:get(<<"body">>, Httpsig, #{}, Opts), + Httpsig#{ <<"body">> => OldBody#{ NormalizedName => Value } }; +``` + +### field_to_http + +All maps are encoded into the body of the HTTP message + +```erlang +field_to_http(Httpsig, {Name, Value}, Opts) when is_binary(Value) -> + NormalizedName = hb_ao:normalize_key(Name), + % The default location where the value is encoded within the HTTP + % message depends on its size. +``` + +### group_maps_test + +```erlang +group_maps_test() -> + Map = #{ + <<"a">> => <<"1">>, + <<"b">> => #{ + <<"x">> => <<"10">>, + <<"y">> => #{ + <<"z">> => <<"20">> + }, + <<"foo">> => #{ + <<"bar">> => #{ + <<"fizz">> => <<"buzz">> + } + } + }, + <<"c">> => #{ + <<"d">> => <<"30">> + }, + <<"e">> => <<"2">>, + <<"buf">> => <<"hello">>, + <<"nested">> => #{ + <<"foo">> => <<"iiiiii">>, + <<"here">> => #{ + <<"bar">> => <<"baz">>, + <<"fizz">> => <<"buzz">>, + <<"pop">> => #{ + <<"very-fizzy">> => <<"very-buzzy">> + } + } + } + }, + Lifted = group_maps(Map), + ?assertEqual( + Lifted, + #{ + <<"a">> => <<"1">>, + <<"b">> => #{<<"x">> => <<"10">>}, + <<"b/foo/bar">> => #{<<"fizz">> => <<"buzz">>}, + <<"b/y">> => #{<<"z">> => <<"20">>}, + <<"buf">> => <<"hello">>, + <<"c">> => #{<<"d">> => <<"30">>}, + <<"e">> => <<"2">>, + <<"nested">> => #{<<"foo">> => <<"iiiiii">>}, + <<"nested/here">> => #{<<"bar">> => <<"baz">>, <<"fizz">> => <<"buzz">>}, + <<"nested/here/pop">> => #{<<"very-fizzy">> => <<"very-buzzy">>} + } + ), + ok. +``` + +### group_maps_flat_compatible_test + +The grouped maps encoding is a subset of the flat encoding, + +```erlang +group_maps_flat_compatible_test() -> + Map = #{ + <<"a">> => <<"1">>, + <<"b">> => #{ + <<"x">> => <<"10">>, + <<"y">> => #{ + <<"z">> => <<"20">> + }, + <<"foo">> => #{ + <<"bar">> => #{ + <<"fizz">> => <<"buzz">> + } + } + }, + <<"c">> => #{ + <<"d">> => <<"30">> + }, + <<"e">> => <<"2">>, + <<"buf">> => <<"hello">>, + <<"nested">> => #{ + <<"foo">> => <<"iiiiii">>, + <<"here">> => #{ + <<"bar">> => <<"baz">>, + <<"fizz">> => <<"buzz">> + } + } + }, + Lifted = group_maps(Map), + ?assertEqual(dev_codec_flat:from(Lifted, #{}, #{}), {ok, Map}), + ok. +``` + +### encode_message_with_links_test + +```erlang +encode_message_with_links_test() -> + Msg = #{ + <<"immediate-key">> => <<"immediate-value">>, + <<"typed-key">> => 4 + }, + {ok, Path} = hb_cache:write(Msg, #{}), + {ok, Read} = hb_cache:read(Path, #{}), + % Ensure that the message now has a lazy link + ?assertMatch({link, _, _}, maps:get(<<"typed-key">>, Read, #{})), + % Encode and decode the message as `httpsig@1.0` + Enc = hb_message:convert(Msg, <<"httpsig@1.0">>, #{}), + ?event({encoded, Enc}), + Dec = hb_message:convert(Enc, <<"structured@1.0">>, <<"httpsig@1.0">>, #{}), + % Ensure that the result is the same as the original message + ?event({decoded, Dec}), +``` + +--- + +*Generated from [dev_codec_httpsig_conv.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_conv.erl)* diff --git a/docs/book/src/dev_codec_httpsig_keyid.erl.md b/docs/book/src/dev_codec_httpsig_keyid.erl.md new file mode 100644 index 000000000..d65cfe36a --- /dev/null +++ b/docs/book/src/dev_codec_httpsig_keyid.erl.md @@ -0,0 +1,195 @@ +# dev_codec_httpsig_keyid + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_keyid.erl) + +A library for extracting and validating key material for `httpsig@1.0` +requests. Offers support for the following keyid schemes: +- `publickey`: The keyid is an encoded public key with the `publickey:` prefix. +- `constant`: The key is simply the keyid itself, including the `public:` + prefix if given. +- `secret`: The key is hashed and the `secret:` prefix is added to the + result in order to generate a keyid. +These functions are abstracted in order to allow for the addition of new +schemes in the future. + +--- + +## Exported Functions + +- `keyid_to_committer/1` +- `keyid_to_committer/2` +- `remove_scheme_prefix/1` +- `req_to_key_material/2` +- `secret_key_to_committer/1` + +--- + +### req_to_key_material + +A library for extracting and validating key material for `httpsig@1.0` +Extract the key and keyid from a request, returning + +```erlang +req_to_key_material(Req, Opts) -> + ?event({req_to_key_material, {req, Req}}), + KeyID = maps:get(<<"keyid">>, Req, undefined), + ?event({keyid_to_key_material, {keyid, KeyID}}), + case find_scheme(KeyID, Req, Opts) of + {ok, Scheme} -> + ?event({scheme_found, {scheme, Scheme}}), + ApplyRes = apply_scheme(Scheme, KeyID, Req), + ?event({apply_scheme_result, {apply_res, ApplyRes}}), + case ApplyRes of + {ok, _, CalcKeyID} when KeyID /= undefined, CalcKeyID /= KeyID -> + {error, key_mismatch}; + {ok, Key, CalcKeyID} -> + {ok, Scheme, Key, CalcKeyID}; + {error, Reason} -> + {error, Reason} + end; + {error, undefined_scheme} -> + {ok, DefaultScheme} = req_to_default_scheme(Req, Opts), + req_to_key_material(Req#{ <<"scheme">> => DefaultScheme }, Opts); + {error, Reason} -> + {error, Reason} + end. +``` + +### find_scheme + +Find the scheme from a keyid or request. Returns `{ok, Scheme}` or + +```erlang +find_scheme(KeyID, Req = #{ <<"scheme">> := RawScheme }, Opts) -> + Scheme = hb_util:atom(RawScheme), + % Validate that the scheme in the request matches the scheme in the keyid. +``` + +### find_scheme + +```erlang +find_scheme(undefined, _Req, _Opts) -> + {error, undefined_scheme}; +``` + +### find_scheme + +```erlang +find_scheme(KeyID, Req, Opts) -> + SchemeRes = + case binary:split(KeyID, <<":">>) of + [SchemeBin, _KeyID] -> {ok, SchemeBin}; + [_NoSchemeKeyID] -> + % Determine the default scheme based on the `type' of the request. +``` + +### req_to_default_scheme + +Determine the default scheme based on the `type` of the request. + +```erlang +req_to_default_scheme(Req, _Opts) -> + case maps:find(<<"type">>, Req) of + {ok, Type} -> + case maps:find(Type, ?DEFAULT_SCHEMES_BY_TYPE) of + {ok, Scheme} -> {ok, Scheme}; + error -> {error, unsupported_scheme} + end; + error -> + {error, no_request_type} + end. +``` + +### apply_scheme + +Apply the requested scheme to generate the key material (key and keyid). + +```erlang +apply_scheme(publickey, KeyID, _Req) -> + % Remove the `publickey:' prefix from the keyid and return the key. +``` + +### apply_scheme + +```erlang +apply_scheme(constant, RawKeyID, _Req) -> + % In the `constant' scheme, the key is simply the key itself, including the + % `constant:' prefix if given. +``` + +### apply_scheme + +```erlang +apply_scheme(secret, _KeyID, Req) -> + % In the `secret' scheme, the key is hashed to generate a keyid. +``` + +### apply_scheme + +```erlang +apply_scheme(_Scheme, _Key, _KeyID) -> + {error, unsupported_scheme}. +``` + +### keyid_to_committer + +Given a keyid and a scheme, generate the committer value for a commitment. + +```erlang +keyid_to_committer(KeyID) -> + case find_scheme(KeyID, #{}, #{}) of + {ok, Scheme} -> keyid_to_committer(Scheme, KeyID); + {error, _} -> undefined + end. +``` + +### keyid_to_committer + +```erlang +keyid_to_committer(publickey, KeyID) -> + % Note: There is a subtlety here. The `KeyID' is decoded with the + % `hb_util:decode' function rather than `base64:decode'. The reason for this + % is that certain codecs (e.g. `ans104@1.0') encode the public key with + % `base64url' encoding, rather than the standard `base64' encoding in + % HTTPSig. Our `hb_util:decode' function handles both cases returning the + % same raw bytes, and is subsequently safe. +``` + +### keyid_to_committer + +```erlang +keyid_to_committer(secret, KeyID) -> + remove_scheme_prefix(KeyID); +``` + +### keyid_to_committer + +```erlang +keyid_to_committer(constant, _KeyID) -> + undefined. +``` + +### secret_key_to_committer + +Given a secret key, generate the committer value for a commitment. + +```erlang +secret_key_to_committer(Key) -> + hb_util:human_id(hb_crypto:sha256(Key)). +``` + +### remove_scheme_prefix + +Remove the `scheme:` prefix from a keyid. + +```erlang +remove_scheme_prefix(KeyID) -> + case binary:split(KeyID, <<":">>) of + [_Scheme, Key] -> Key; + [Key] -> Key + end. +``` + +--- + +*Generated from [dev_codec_httpsig_keyid.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_keyid.erl)* diff --git a/docs/book/src/dev_codec_httpsig_proxy.erl.md b/docs/book/src/dev_codec_httpsig_proxy.erl.md new file mode 100644 index 000000000..9e32c0c99 --- /dev/null +++ b/docs/book/src/dev_codec_httpsig_proxy.erl.md @@ -0,0 +1,54 @@ +# dev_codec_httpsig_proxy + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_proxy.erl) + +A utility module that contains proxy functions for calling the +`~httpsig@1.0` codec's HMAC commitment functions with secret keys. +These tools are helpful for implementing a standardized pattern: +1. A device verifies a user's request/derives a secret key for them. +2. The device then wants to commit a message with the user's secret key + using the `secret:[h(secret)]` commitment scheme. +3. The commitment must then be modified to reference a different device + as the `commitment-device` key. +4. When `/verify` is called, the `~httpsig@1.0` codec is used under-the-hood + to validate the commitment on the re-derived secret key. +This module is currently used by the `~cookie@1.0` and `~http-auth@1.0` +devices. + +--- + +## Exported Functions + +- `commit/5` +- `verify/4` + +--- + +### commit + +A utility module that contains proxy functions for calling the +Commit to a given `Base` message with a given `Secret`, setting the + +```erlang +commit(Device, Secret, Base, Req, Opts) -> + % If there are no existing commitments, we use the unmodified base message. +``` + +### verify + +Verify a given `Base` message with a given `Secret` using the `~httpsig@1.0` + +```erlang +verify(Secret, Base, RawReq, Opts) -> + ProxyRequest = + RawReq#{ + <<"commitment-device">> => <<"httpsig@1.0">>, + <<"path">> => <<"verify">>, + <<"secret">> => Secret + }, + ?event({proxy_request, ProxyRequest}), +``` + +--- + +*Generated from [dev_codec_httpsig_proxy.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_proxy.erl)* diff --git a/docs/book/src/dev_codec_httpsig_siginfo.erl.md b/docs/book/src/dev_codec_httpsig_siginfo.erl.md new file mode 100644 index 000000000..b899d9e03 --- /dev/null +++ b/docs/book/src/dev_codec_httpsig_siginfo.erl.md @@ -0,0 +1,387 @@ +# dev_codec_httpsig_siginfo + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_siginfo.erl) + +A list of components that are `derived` in the context of RFC-9421 from the +request message. + +--- + +## Exported Functions + +- `add_derived_specifiers/1` +- `commitment_to_sig_name/1` +- `commitments_to_siginfo/3` +- `committed_keys_to_siginfo/1` +- `from_siginfo_keys/3` +- `remove_derived_specifiers/1` +- `siginfo_to_commitments/3` +- `to_siginfo_keys/3` + +--- + +### commitments_to_siginfo + +A module for converting between commitments and their encoded `signature` +Generate a `signature` and `signature-input` key pair from a commitment + +```erlang +commitments_to_siginfo(_Msg, Comms, _Opts) when ?IS_EMPTY_MESSAGE(Comms) -> + #{}; +``` + +### commitments_to_siginfo + +A module for converting between commitments and their encoded `signature` +Generate a `signature` and `signature-input` key pair from a commitment + +```erlang +commitments_to_siginfo(Msg, Comms, Opts) -> + % Generate a SF item for each commitment's signature and signature-input. +``` + +### commitment_to_sf_siginfo + +Generate a `signature` and `signature-input` key pair from a given + +```erlang +commitment_to_sf_siginfo(Msg, Commitment, Opts) -> + % Generate the `alg' key from the commitment. +``` + +### get_additional_params + +```erlang +get_additional_params(Commitment) -> + AdditionalParams = + sets:to_list( + sets:subtract( + sets:from_list(maps:keys(Commitment)), + sets:from_list( + [ + <<"alg">>, + <<"keyid">>, + <<"tag">>, + <<"created">>, + <<"expires">>, + <<"nonce">>, + <<"committed">>, + <<"signature">>, + <<"type">>, + <<"commitment-device">>, + <<"committer">> + ] + ) + ) + ), + lists:map(fun(Param) -> + ParamValue = maps:get(Param, Commitment), + case ParamValue of + Val when is_atom(Val) -> + {Param, {string, atom_to_binary(Val, utf8)}}; + Val when is_binary(Val) -> + {Param, {string, Val}}; + Val when is_list(Val) -> + {Param, {string, list_to_binary(lists:join(<<", ">>, Val))}}; + Val when is_map(Val) -> + Map = nested_map_to_string(Val), + {Param, {string, list_to_binary(lists:join(<<", ">>, Map))} } + end + end, AdditionalParams). +``` + +### nested_map_to_string + +```erlang +nested_map_to_string(Map) -> + lists:map(fun(I) -> + case maps:get(I, Map) of + Val when is_map(Val) -> + Name = maps:get(<<"name">>, Val), + Value = hb_util:encode(maps:get(<<"value">>, Val)), + <>; + Val -> + Val + end + end, maps:keys(Map)). +``` + +### siginfo_to_commitments + +Take a message with a `signature` and `signature-input` key pair and + +```erlang +siginfo_to_commitments( + Msg = + #{ + <<"signature">> := <<"comm-", SFSigBin/binary>>, + <<"signature-input">> := <<"comm-", SFSigInputBin/binary>> + }, + BodyKeys, + Opts) -> + % Parse the signature and signature-input structured-fields. +``` + +### siginfo_to_commitments + +```erlang +siginfo_to_commitments(_Msg, _BodyKeys, _Opts) -> + % If the message does not contain a `signature' or `signature-input' key, + % we return an empty map. +``` + +### sf_siginfo_to_commitment + +Take a signature and signature-input as parsed structured-fields and + +```erlang +sf_siginfo_to_commitment(Msg, BodyKeys, SFSig, SFSigInput, Opts) -> + % Extract the signature and signature-input from the structured-fields. +``` + +### decoding_nested_map_binary + +```erlang +decoding_nested_map_binary(Bin) -> + MapBinary = + lists:foldl( + fun (X, Acc) -> + case binary:split(X, <<":">>, [global]) of + [ID, Key, Value] -> + Acc#{ + ID => #{ + <<"name">> => Key, + <<"value">> => hb_util:decode(Value) + } + }; + _ -> + X + end + end, + #{}, + binary:split(Bin, <<", ">>, [global]) + ), + case MapBinary of + Res when is_map(Res) -> + Res; + Res -> + Res + end. +``` + +### to_siginfo_keys + +Normalize a list of AO-Core keys to their equivalents in `httpsig@1.0` +Normalize a list of `httpsig@1.0` keys to their equivalents in AO-Core + +```erlang +to_siginfo_keys(Msg, Commitment, Opts) -> + {ok, _EncMsg, EncComm, _} = + dev_codec_httpsig:normalize_for_encoding(Msg, Commitment, Opts), + maps:get(<<"committed">>, EncComm). +``` + +### from_siginfo_keys + +Normalize a list of AO-Core keys to their equivalents in `httpsig@1.0` +Normalize a list of `httpsig@1.0` keys to their equivalents in AO-Core + +```erlang +from_siginfo_keys(HTTPEncMsg, BodyKeys, SigInfoCommitted) -> + % 1. Remove specifiers from the list. +``` + +### committed_keys_to_siginfo + +Convert committed keys to their siginfo format. This involves removing + +```erlang +committed_keys_to_siginfo(Msg) when is_map(Msg) -> + committed_keys_to_siginfo(hb_util:message_to_ordered_list(Msg)); +``` + +### committed_keys_to_siginfo + +Convert committed keys to their siginfo format. This involves removing + +```erlang +committed_keys_to_siginfo([]) -> []; +``` + +### committed_keys_to_siginfo + +Convert committed keys to their siginfo format. This involves removing + +```erlang +committed_keys_to_siginfo([<<"body">> | Rest]) -> + [<<"content-digest">> | Rest]; +``` + +### committed_keys_to_siginfo + +Convert committed keys to their siginfo format. This involves removing + +```erlang +committed_keys_to_siginfo([Key | Rest]) -> + [Key | committed_keys_to_siginfo(Rest)]. +``` + +### commitment_to_device_specifiers + +Convert an `alg` to a commitment device. If the `alg` has the form of + +```erlang +commitment_to_device_specifiers(Commitment, Opts) when is_map(Commitment) -> + commitment_to_device_specifiers(maps:get(<<"alg">>, Commitment), Opts); +``` + +### commitment_to_device_specifiers + +Convert an `alg` to a commitment device. If the `alg` has the form of + +```erlang +commitment_to_device_specifiers(Alg, _Opts) -> + case binary:split(Alg, <<"@">>) of + [Type] -> + % The `alg' is not a device specifier, so we assume that it is a + % type of the `httpsig@1.0' algorithm. +``` + +### commitment_to_alg + +Calculate an `alg` string from a commitment message, using its + +```erlang +commitment_to_alg(#{ <<"commitment-device">> := <<"httpsig@1.0">>, <<"type">> := Type }, _Opts) -> + Type; +``` + +### commitment_to_alg + +Calculate an `alg` string from a commitment message, using its + +```erlang +commitment_to_alg(Commitment, _Opts) -> + Type = + case maps:get(<<"type">>, Commitment, undefined) of + undefined -> <<>>; + TypeSpecifier -> <<"/", TypeSpecifier/binary>> + end, + CommitmentDevice = maps:get(<<"commitment-device">>, Commitment), + <>. +``` + +### commitment_to_sig_name + +Generate a signature name from a commitment. The commitment message is +Normalize key parameters to ensure their names are correct for inclusion + +```erlang +commitment_to_sig_name(Commitment) -> + BaseStr = + case maps:get(<<"committer">>, Commitment, undefined) of + undefined -> maps:get(<<"keyid">>, Commitment); + Committer -> + << + (hb_util:to_hex(binary:part(hb_util:native_id(Committer), 1, 8))) + /binary + >> + end, + DeviceStr = + binary:replace( + maps:get( + <<"commitment-device">>, + Commitment + ), + <<"@">>, + <<"-">> + ), + <>. +``` + +### add_derived_specifiers + +Generate a signature name from a commitment. The commitment message is +Normalize key parameters to ensure their names are correct for inclusion + +```erlang +add_derived_specifiers(ComponentIdentifiers) -> + % Remove the @ prefix from the component identifiers, if present. +``` + +### remove_derived_specifiers + +Remove derived specifiers from a list of component identifiers. + +```erlang +remove_derived_specifiers(ComponentIdentifiers) -> + lists:map( + fun(<<"@", Key/binary>>) -> + Key; + (Key) -> + Key + end, + ComponentIdentifiers + ). +``` + +### parse_alg_test + +```erlang +parse_alg_test() -> + ?assertEqual( + commitment_to_device_specifiers(#{ <<"alg">> => <<"rsa-pss-sha512">> }, #{}), + #{ + <<"commitment-device">> => <<"httpsig@1.0">>, + <<"type">> => <<"rsa-pss-sha512">> + } + ), + ?assertEqual( + commitment_to_device_specifiers( + #{ <<"alg">> => <<"ans104@1.0/rsa-pss-sha256">> }, + #{}), + #{ + <<"commitment-device">> => <<"ans104@1.0">>, + <<"type">> => <<"rsa-pss-sha256">> + } + ). +``` + +### escaped_value_test + +Test that tag values with special characters are correctly encoded and + +```erlang +escaped_value_test() -> + KeyID = crypto:strong_rand_bytes(32), + Committer = hb_util:human_id(ar_wallet:to_address(KeyID)), + Signature = crypto:strong_rand_bytes(512), + ID = hb_util:human_id(crypto:hash(sha256, Signature)), + Commitment = #{ + <<"committed">> => #{}, + <<"committer">> => Committer, + <<"commitment-device">> => <<"tx@1.0">>, + <<"keyid">> => <<"publickey:", (hb_util:encode(KeyID))/binary>>, + <<"original-tags">> => #{ + <<"1">> => #{ + <<"name">> => <<"Key">>, + <<"value">> => <<"value">> + }, + <<"2">> => #{ + <<"name">> => <<"Quotes">>, + <<"value">> => <<"{\"function\":\"mint\"}">> + } + }, + <<"signature">> => hb_util:encode(Signature), + <<"type">> => <<"rsa-pss-sha256">> + }, + SigInfo = commitments_to_siginfo(#{}, #{ ID => Commitment }, #{}), + Commitments = siginfo_to_commitments(SigInfo, #{}, #{}), + ?event(debug_test, {siginfo, {explicit, SigInfo}}), + ?event(debug_test, {commitments, {explicit, Commitments}}), + ?assertEqual(#{ ID => Commitment }, Commitments). +``` + +--- + +*Generated from [dev_codec_httpsig_siginfo.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_siginfo.erl)* diff --git a/docs/book/src/dev_codec_json.erl.md b/docs/book/src/dev_codec_json.erl.md new file mode 100644 index 000000000..56bfbac9c --- /dev/null +++ b/docs/book/src/dev_codec_json.erl.md @@ -0,0 +1,170 @@ +# dev_codec_json + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_json.erl) + +A simple JSON codec for HyperBEAM's message format. Takes a +message as TABM and returns an encoded JSON string representation. +This codec utilizes the httpsig@1.0 codec for signing and verifying. + +--- + +## Exported Functions + +- `commit/3` +- `committed/3` +- `content_type/1` +- `deserialize/3` +- `from/3` +- `serialize/3` +- `to/3` +- `verify/3` + +--- + +### content_type + +A simple JSON codec for HyperBEAM's message format. Takes a +Return the content type for the codec. +Encode a message to a JSON string, using JSON-native typing. + +```erlang +content_type(_) -> {ok, <<"application/json">>}. +``` + +### to + +A simple JSON codec for HyperBEAM's message format. Takes a +Return the content type for the codec. +Encode a message to a JSON string, using JSON-native typing. + +```erlang +to(Msg, _Req, _Opts) when is_binary(Msg) -> + {ok, hb_util:bin(json:encode(Msg))}; +``` + +### to + +A simple JSON codec for HyperBEAM's message format. Takes a +Return the content type for the codec. +Encode a message to a JSON string, using JSON-native typing. + +```erlang +to(Msg, Req, Opts) -> + % The input to this function will be a TABM message, so we: + % 1. Convert it to a structured message. +``` + +### from + +Decode a JSON string to a message. + +```erlang +from(Map, _Req, _Opts) when is_map(Map) -> {ok, Map}; +``` + +### from + +Decode a JSON string to a message. + +```erlang +from(JSON, _Req, Opts) -> + % The JSON string will be a partially-TABM encoded message: Rich number + % and list types, but no `atom's. Subsequently, we convert it to a fully + % structured message after decoding, then turn the result back into a TABM. +``` + +### commit + +```erlang +commit(Msg, Req, Opts) -> dev_codec_httpsig:commit(Msg, Req, Opts). +``` + +### verify + +```erlang +verify(Msg, Req, Opts) -> dev_codec_httpsig:verify(Msg, Req, Opts). +``` + +### committed + +```erlang +committed(Msg, Req, Opts) when is_binary(Msg) -> + committed(hb_util:ok(from(Msg, Req, Opts)), Req, Opts); +``` + +### committed + +```erlang +committed(Msg, _Req, Opts) -> + hb_message:committed(Msg, all, Opts). +``` + +### deserialize + +Deserialize the JSON string found at the given path. + +```erlang +deserialize(Base, Req, Opts) -> + Payload = + hb_ao:get( + Target = + hb_ao:get( + <<"target">>, + Req, + <<"body">>, + Opts + ), + Base, + Opts + ), + case Payload of + not_found -> {error, #{ + <<"status">> => 404, + <<"body">> => + << + "JSON payload not found in the base message.", + "Searched for: ", Target/binary + >> + }}; + _ -> + from(Payload, Req, Opts) + end. +``` + +### serialize + +Serialize a message to a JSON string. + +```erlang +serialize(Base, Msg, Opts) -> + {ok, + #{ + <<"content-type">> => <<"application/json">>, + <<"body">> => hb_util:ok(to(Base, Msg, Opts)) + } + }. +``` + +### decode_with_atom_test + +```erlang +decode_with_atom_test() -> + JSON = + <<""" + [ + { + "store-module": "hb_store_fs", + "name": "cache-TEST/json-test-store", + "ao-types": "store-module=\"atom\"" + } + ] + """>>, + Msg = hb_message:convert(JSON, <<"structured@1.0">>, <<"json@1.0">>, #{}), + ?assertMatch( + [#{ <<"store-module">> := hb_store_fs }|_], + hb_cache:ensure_all_loaded(Msg, #{}) +``` + +--- + +*Generated from [dev_codec_json.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_json.erl)* diff --git a/docs/book/src/dev_codec_structured.erl.md b/docs/book/src/dev_codec_structured.erl.md new file mode 100644 index 000000000..d78e1379f --- /dev/null +++ b/docs/book/src/dev_codec_structured.erl.md @@ -0,0 +1,500 @@ +# dev_codec_structured + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_structured.erl) + +A device implementing the codec interface (to/1, from/1) for +HyperBEAM's internal, richly typed message format. Supported rich types are: +- `integer` +- `float` +- `atom` +- `list` +Encoding to TABM can be limited to a subset of types (with other types +passing through in their rich representation) by specifying the types +that should be encoded with the `encode-types` request key. +This format mirrors HTTP Structured Fields, aside from its limitations of +compound type depths, as well as limited floating point representations. +As with all AO-Core codecs, its target format (the format it expects to +receive in the `to/1` function, and give in `from/1`) is TABM. +For more details, see the HTTP Structured Fields (RFC-9651) specification. + +--- + +## Exported Functions + +- `commit/3` +- `decode_ao_types/2` +- `decode_value/2` +- `encode_ao_types/2` +- `encode_value/1` +- `from/3` +- `implicit_keys/2` +- `is_list_from_ao_types/2` +- `to/3` +- `verify/3` + +--- + +### commit + +A device implementing the codec interface (to/1, from/1) for + +```erlang +commit(Msg, Req, Opts) -> dev_codec_httpsig:commit(Msg, Req, Opts). +``` + +### verify + +A device implementing the codec interface (to/1, from/1) for +Convert a rich message into a 'Type-Annotated-Binary-Message' (TABM). + +```erlang +verify(Msg, Req, Opts) -> dev_codec_httpsig:verify(Msg, Req, Opts). +``` + +### from + +A device implementing the codec interface (to/1, from/1) for +Convert a rich message into a 'Type-Annotated-Binary-Message' (TABM). + +```erlang +from(Bin, _Req, _Opts) when is_binary(Bin) -> {ok, Bin}; +``` + +### from + +A device implementing the codec interface (to/1, from/1) for +Convert a rich message into a 'Type-Annotated-Binary-Message' (TABM). + +```erlang +from(List, Req, Opts) when is_list(List) -> + % Encode the list as a map, then -- if our request indicates that we are + % encoding lists -- add the `.' key to the `ao-types' field, indicating + % that this message is a list and return. Otherwise, if the downstream + % encoding did not set its own `ao-types' field, we convert the message + % back to a list. +``` + +### from + +```erlang +from(Msg, Req, Opts) when is_map(Msg) -> + % Normalize the message, offloading links to the cache. +``` + +### from + +Find the types that should be encoded from the request and options. + +```erlang +from(Other, _Req, _Opts) -> {ok, hb_path:to_binary(Other)}. +``` + +### find_encode_types + +Find the types that should be encoded from the request and options. +Determine the type for a value. + +```erlang +find_encode_types(Req, Opts) -> + hb_maps:get(<<"encode-types">>, Req, ?SUPPORTED_TYPES, Opts). +``` + +### type + +Find the types that should be encoded from the request and options. +Determine the type for a value. + +```erlang +type(Int) when is_integer(Int) -> <<"integer">>; +``` + +### type + +Find the types that should be encoded from the request and options. +Determine the type for a value. + +```erlang +type(Float) when is_float(Float) -> <<"float">>; +``` + +### type + +Find the types that should be encoded from the request and options. +Determine the type for a value. + +```erlang +type(Atom) when is_atom(Atom) -> <<"atom">>; +``` + +### type + +Find the types that should be encoded from the request and options. +Determine the type for a value. + +```erlang +type(List) when is_list(List) -> <<"list">>; +``` + +### type + +Find the types that should be encoded from the request and options. +Determine the type for a value. +Discern the linkify mode from the request and the options. + +```erlang +type(Other) -> Other. +``` + +### linkify_mode + +Find the types that should be encoded from the request and options. +Determine the type for a value. +Discern the linkify mode from the request and the options. + +```erlang +linkify_mode(Req, Opts) -> + case hb_maps:get(<<"bundle">>, Req, not_found, Opts) of + not_found -> hb_opts:get(linkify_mode, offload, Opts); + true -> + % The request is asking for a bundle, so we should _not_ linkify. +``` + +### to + +Convert a TABM into a native HyperBEAM message. + +```erlang +to(Bin, _Req, _Opts) when is_binary(Bin) -> {ok, Bin}; +``` + +### to + +Convert a TABM into a native HyperBEAM message. + +```erlang +to(TABM0, Req, Opts) when is_list(TABM0) -> + % If we receive a list, we convert it to a message and run `to/3' on it. +``` + +### to + +```erlang +to(TABM0, Req, Opts) -> + Types = decode_ao_types(TABM0, Opts), + % Decode all links to their HyperBEAM-native, resolvable form. +``` + +### encode_ao_types + +Generate an `ao-types` structured field from a map of keys and their + +```erlang +encode_ao_types(Types, _Opts) -> + iolist_to_binary(hb_structured_fields:dictionary( + lists:map( + fun(Key) -> + {ok, Item} = hb_structured_fields:to_item(maps:get(Key, Types)), + {hb_escape:encode(Key), Item} + end, + hb_util:to_sorted_keys(Types) + ) + )). +``` + +### decode_ao_types + +Parse the `ao-types` field of a TABM if present, and return a map of + +```erlang +decode_ao_types(List, _Opts) when is_list(List) -> #{}; +``` + +### decode_ao_types + +Parse the `ao-types` field of a TABM if present, and return a map of + +```erlang +decode_ao_types(Msg, Opts) when is_map(Msg) -> + decode_ao_types(hb_maps:get(<<"ao-types">>, Msg, <<>>, Opts), Opts); +``` + +### decode_ao_types + +Parse the `ao-types` field of a TABM if present, and return a map of + +```erlang +decode_ao_types(Bin, _Opts) when is_binary(Bin) -> + hb_maps:from_list( + lists:map( + fun({Key, {item, {_, Value}, _}}) -> + {hb_escape:decode(Key), Value} + end, + hb_structured_fields:parse_dictionary(Bin) + ) + ). +``` + +### is_list_from_ao_types + +Determine if the `ao-types` field of a TABM indicates that the message + +```erlang +is_list_from_ao_types(Types, Opts) when is_binary(Types) -> + is_list_from_ao_types(decode_ao_types(Types, Opts), Opts); +``` + +### is_list_from_ao_types + +Determine if the `ao-types` field of a TABM indicates that the message + +```erlang +is_list_from_ao_types(Types, _Opts) -> + case maps:find(<<".">>, Types) of + {ok, <<"list">>} -> true; + _ -> false + end. +``` + +### implicit_keys + +Find the implicit keys of a TABM. + +```erlang +implicit_keys(Req, Opts) -> + hb_maps:keys( + hb_maps:filtermap( + fun(_Key, Val = <<"empty-", _/binary>>) -> {true, Val}; + (_Key, _Val) -> false + end, + decode_ao_types(Req, Opts), + Opts + ), + Opts + ). +``` + +### maybe_encode_value + +Encode a value if it is in the list of supported types. + +```erlang +maybe_encode_value(Value, EncodeTypes) -> + case lists:member(type(Value), EncodeTypes) of + true -> encode_value(Value); + false -> skip + end. +``` + +### encode_value + +Convert a term to a binary representation, emitting its type for + +```erlang +encode_value(Value) when is_integer(Value) -> + [Encoded, _] = hb_structured_fields:item({item, Value, []}), + {<<"integer">>, Encoded}; +``` + +### encode_value + +Convert a term to a binary representation, emitting its type for + +```erlang +encode_value(Value) when is_float(Value) -> + ?no_prod("Must use structured field representation for floats!"), + {<<"float">>, float_to_binary(Value)}; +``` + +### encode_value + +Convert a term to a binary representation, emitting its type for + +```erlang +encode_value(Value) when is_atom(Value) -> + EncodedIOList = + hb_structured_fields:item({item, {token, hb_util:bin(Value)}, []}), + Encoded = hb_util:bin(EncodedIOList), + {<<"atom">>, Encoded}; +``` + +### encode_value + +Convert a term to a binary representation, emitting its type for + +```erlang +encode_value(Values) when is_list(Values) -> + EncodedValues = + lists:map( + fun(Bin) when is_binary(Bin) -> {item, {string, Bin}, []}; + (Item) -> + {RawType, Encoded} = encode_value(Item), + Type = hb_ao:normalize_key(RawType), + { + item, + { + string, + << + "(ao-type-", Type/binary, ") ", + Encoded/binary + >> + }, + [] + } + end, + Values + ), + EncodedList = hb_structured_fields:list(EncodedValues), + {<<"list">>, iolist_to_binary(EncodedList)}; +``` + +### encode_value + +Convert a term to a binary representation, emitting its type for + +```erlang +encode_value(Value) when is_binary(Value) -> + {<<"binary">>, Value}; +``` + +### encode_value + +Convert a term to a binary representation, emitting its type for + +```erlang +encode_value(Value) -> + Value. +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(Type, Value) when is_list(Type) -> + decode_value(list_to_binary(Type), Value); +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(Type, Value) when is_binary(Type) -> + ?event({decoding, {type, Type}, {value, Value}}), + decode_value( + binary_to_existing_atom( + list_to_binary(string:to_lower(binary_to_list(Type))), + latin1 + ), + Value + ); +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(integer, Value) -> + {item, Number, _} = hb_structured_fields:parse_item(Value), + Number; +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(float, Value) -> + binary_to_float(Value); +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(atom, Value) -> + {item, {_, AtomString}, _} = + hb_structured_fields:parse_item(Value), + hb_util:atom(AtomString); +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(list, Value) when is_binary(Value) -> + lists:map( + fun({item, {string, <<"(ao-type-", Rest/binary>>}, _}) -> + [Type, Item] = binary:split(Rest, <<") ">>), + decode_value(Type, Item); + ({item, Item, _}) -> hb_structured_fields:from_bare_item(Item) + end, + hb_structured_fields:parse_list(iolist_to_binary(Value)) + ); +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(list, Value) when is_map(Value) -> + hb_util:message_to_ordered_list(Value); +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(map, Value) -> + hb_maps:from_list( + lists:map( + fun({Key, {item, Item, _}}) -> + ?event({decoded_item, {explicit, Key}, Item}), + {Key, hb_structured_fields:from_bare_item(Item)} + end, + hb_structured_fields:parse_dictionary(iolist_to_binary(Value)) + ) + ); +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(BinType, Value) when is_binary(BinType) -> + decode_value( + list_to_existing_atom( + string:to_lower( + binary_to_list(BinType) + ) + ), + Value + ); +``` + +### decode_value + +Convert non-binary values to binary for serialization. + +```erlang +decode_value(OtherType, Value) -> + ?event({unexpected_type, OtherType, Value}), + throw({unexpected_type, OtherType, Value}). +``` + +### list_encoding_test + +```erlang +list_encoding_test() -> + % Test that we can encode and decode a list of integers. +``` + +--- + +*Generated from [dev_codec_structured.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_structured.erl)* diff --git a/docs/book/src/dev_copycat.erl.md b/docs/book/src/dev_copycat.erl.md new file mode 100644 index 000000000..4a8df13c1 --- /dev/null +++ b/docs/book/src/dev_copycat.erl.md @@ -0,0 +1,42 @@ +# dev_copycat + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat.erl) + +A device for orchestrating indexing of messages from foreign sources +into a HyperBEAM node's caches. +Supported sources of messages are as follows: +- A remote Arweave GraphQL endpoint. +- A remote Arweave node. +Each source is implemented as a separate engine, with `dev_copycat_[ENGINE]` +as the module name. + +--- + +## Exported Functions + +- `arweave/3` +- `graphql/3` + +--- + +### graphql + +A device for orchestrating indexing of messages from foreign sources +Fetch data from a GraphQL endpoint for replication. See + +```erlang +graphql(Base, Request, Opts) -> + dev_copycat_graphql:graphql(Base, Request, Opts). +``` + +### arweave + +Fetch data from an Arweave node for replication. See `dev_copycat_arweave` + +```erlang +arweave(Base, Request, Opts) -> +``` + +--- + +*Generated from [dev_copycat.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat.erl)* diff --git a/docs/book/src/dev_copycat_arweave.erl.md b/docs/book/src/dev_copycat_arweave.erl.md new file mode 100644 index 000000000..64b304897 --- /dev/null +++ b/docs/book/src/dev_copycat_arweave.erl.md @@ -0,0 +1,112 @@ +# dev_copycat_arweave + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat_arweave.erl) + +A `~copycat@1.0` engine that fetches block data from an Arweave node for +replication. This engine works in _reverse_ chronological order by default, +fetching blocks from the latest known block towards the Genesis block. The +node avoids retrieving blocks that are already present in the cache using +`~arweave@2.9-pre`'s built-in caching mechanism. + +--- + +## Exported Functions + +- `arweave/3` + +--- + +### arweave + +A `~copycat@1.0` engine that fetches block data from an Arweave node for +Fetch blocks from an Arweave node between a given range, or from the + +```erlang +arweave(_Base, Request, Opts) -> + {From, To} = parse_range(Request, Opts), + fetch_blocks(Request, From, To, Opts). +``` + +### parse_range + +Parse the range from the request. + +```erlang +parse_range(Request, Opts) -> + From = + case hb_maps:find(<<"from">>, Request, Opts) of + {ok, Height} -> Height; + error -> + {ok, LatestHeight} = + hb_ao:resolve( + <>, + Opts + ), + LatestHeight + end, + To = hb_maps:get(<<"to">>, Request, 0, Opts), + {From, To}. +``` + +### fetch_blocks + +Fetch blocks from an Arweave node between a given range. + +```erlang +fetch_blocks(Req, Current, Current, _Opts) -> + ?event(copycat_arweave, + {arweave_block_indexing_completed, + {reached_target, Current}, + {initial_request, Req} + } + ), + {ok, Current}; +``` + +### fetch_blocks + +Fetch blocks from an Arweave node between a given range. + +```erlang +fetch_blocks(Req, Current, To, Opts) -> + BlockRes = + hb_ao:resolve( + << + ?ARWEAVE_DEVICE/binary, + "/block=", + (hb_util:bin(Current))/binary + >>, + Opts + ), + process_block(BlockRes, Req, Current, To, Opts), + fetch_blocks(Req, Current - 1, To, Opts). +``` + +### process_block + +Process a block. + +```erlang +process_block(BlockRes, _Req, Current, To, _Opts) -> + case BlockRes of + {ok, _} -> + ?event( + copycat_short, + {arweave_block_cached, + {height, Current}, + {target, To} + } + ); + {error, not_found} -> + ?event( + copycat_short, + {arweave_block_not_found, + {height, Current}, + {target, To} + } + ) +``` + +--- + +*Generated from [dev_copycat_arweave.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat_arweave.erl)* diff --git a/docs/book/src/dev_copycat_graphql.erl.md b/docs/book/src/dev_copycat_graphql.erl.md new file mode 100644 index 000000000..8398ab922 --- /dev/null +++ b/docs/book/src/dev_copycat_graphql.erl.md @@ -0,0 +1,219 @@ +# dev_copycat_graphql + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat_graphql.erl) + +A `~copycat@1.0` engine that fetches data from a GraphQL endpoint for +replication. + +--- + +## Exported Functions + +- `graphql/3` + +--- + +### graphql + +A `~copycat@1.0` engine that fetches data from a GraphQL endpoint for +Takes a GraphQL query, optionally with a node address, and curses through + +```erlang +graphql(Base, Req, Opts) -> + case parse_query(Base, Req, Opts) of + {ok, Query} -> + Node = maps:get(<<"node">>, Opts, undefined), + OpName = hb_maps:get(<<"operationName">>, Req, undefined, Opts), + Vars = hb_maps:get(<<"variables">>, Req, #{}, Opts), + index_graphql(0, Query, Vars, Node, OpName, Opts); + Other -> + Other + end. +``` + +### index_graphql + +Index a GraphQL query into the node's caches. + +```erlang +index_graphql(Total, Query, Vars, Node, OpName, Opts) -> + maybe + ?event( + {graphql_run_called, + {query, {string, Query}}, + {operation, OpName}, + {variables, Vars} + } + ), + {ok, RawRes} ?= hb_gateway_client:query(Query, Vars, Node, OpName, Opts), + Res = hb_util:deep_get(<<"data/transactions">>, RawRes, #{}, Opts), + NodeStructs = hb_util:deep_get(<<"edges">>, Res, [], Opts), + ?event({graphql_request_returned_items, length(NodeStructs)}), + ?event( + {graphql_indexing_responses, + {query, {string, Query}}, + {variables, Vars}, + {result, Res} + } + ), + ParsedMsgs = + lists:filtermap( + fun(NodeStruct) -> + Struct = hb_maps:get(<<"node">>, NodeStruct, not_found, Opts), + try + {ok, ParsedMsg} = + hb_gateway_client:result_to_message( + Struct, + Opts + ), + {true, ParsedMsg} + catch + error:Reason -> + ?event( + warning, + {indexer_graphql_parse_failed, + {struct, NodeStruct}, + {reason, Reason} + } + ), + false + end + end, + NodeStructs + ), + ?event({graphql_parsed_msgs, length(ParsedMsgs)}), + WrittenMsgs = + lists:filter( + fun(ParsedMsg) -> + try + {ok, _} = hb_cache:write(ParsedMsg, Opts), + true + catch + error:Reason -> + ?event( + warning, + {indexer_graphql_write_failed, + {reason, Reason}, + {msg, ParsedMsg} + } + ), + false + end + end, + ParsedMsgs + ), + NewTotal = Total + length(WrittenMsgs), + ?event(copycat_short, + {indexer_graphql_wrote, + {total, NewTotal}, + {batch, length(WrittenMsgs)}, + {batch_failures, length(ParsedMsgs) - length(WrittenMsgs)} + } + ), + HasNextPage = hb_util:deep_get(<<"pageInfo/hasNextPage">>, Res, false, Opts), + case HasNextPage of + true -> + % Get the last cursor from the node structures and recurse. +``` + +### parse_query + +Find or create a GraphQL query from a given base and request. We expect + +```erlang +parse_query(Base, Req, Opts) -> + % Merge the keys of the base and request maps, and remove duplicates. +``` + +### default_query + +Return a default query for a given filter type. + +```erlang +default_query(<<"tags">>, RawMessage, Opts) -> + Message = hb_cache:ensure_all_loaded(RawMessage, Opts), + BinaryPairs = + lists:map( + fun({Key, Value}) -> {hb_util:bin(Key), hb_util:bin(Value)} end, + hb_maps:to_list(Message, Opts) + ), + TagsQueryStr = + hb_util:bin( + [ + <<"{name: \"", Key/binary, "\", values: [\"", Value/binary, "\"]}">> + || + {Key, Value} <- BinaryPairs + ] + ), + ?event({tags_query, + {message, Message}, + {binary_pairs, BinaryPairs}, + {tags_query_str, {string, TagsQueryStr}} + }), + {ok, <<"query($after: String) { ", + "transactions(after: $after, tags: [", + TagsQueryStr/binary, + "]) { ", + "edges { ", (hb_gateway_client:item_spec())/binary , " } ", + "pageInfo { hasNextPage }", + "} }">>}; +``` + +### default_query + +Return a default query for a given filter type. + +```erlang +default_query(<<"tag">>, {Key, Value}, _Opts) -> + {ok, <<"query($after: String) { ", + "transactions(after: $after, tags: [", + "{name: \"", Key/binary, "\", values: [\"", Value/binary, "\"]}", + "]) { ", + "edges { ", (hb_gateway_client:item_spec())/binary , " } ", + "pageInfo { hasNextPage }", + "} }">>}; +``` + +### default_query + +Return a default query for a given filter type. + +```erlang +default_query(<<"recipient">>, Merged, Opts) -> + Recipient = hb_maps:get(<<"recipient">>, Merged, <<>>, Opts), + {ok, <<"query($after: String) { ", + "transactions(after: $after, recipients: [\"", Recipient/binary, "\"]) { ", + "edges { ", (hb_gateway_client:item_spec())/binary , " } ", + "pageInfo { hasNextPage }", + "} }">>}; +``` + +### default_query + +Return a default query for a given filter type. + +```erlang +default_query(<<"owner">>, Merged, Opts) -> + Owner = hb_maps:get(<<"owner">>, Merged, <<>>, Opts), + {ok, <<"query($after: String) { ", + "transactions(after: $after, owner: \"", Owner/binary, "\") { ", + "edges { ", (hb_gateway_client:item_spec())/binary , " } ", + "pageInfo { hasNextPage }", + "} }">>}; +``` + +### default_query + +Return a default query for a given filter type. + +```erlang +default_query(<<"all">>, _Merged, _Opts) -> + {ok, <<"query($after: String) { ", + "transactions(after: $after) { ", + "edges { ", (hb_gateway_client:item_spec())/binary , " } ", + "pageInfo { hasNextPage }", +``` + +--- + +*Generated from [dev_copycat_graphql.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat_graphql.erl)* diff --git a/docs/book/src/dev_cron.erl.md b/docs/book/src/dev_cron.erl.md new file mode 100644 index 000000000..ef30611d6 --- /dev/null +++ b/docs/book/src/dev_cron.erl.md @@ -0,0 +1,400 @@ +# dev_cron + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cron.erl) + +A device that inserts new messages into the schedule to allow processes +to passively 'call' themselves without user interaction. + +--- + +## Exported Functions + +- `every/3` +- `info/1` +- `info/3` +- `once/3` +- `stop/3` + +--- + +### info + +A device that inserts new messages into the schedule to allow processes +Exported function for getting device info. + +```erlang +info(_) -> + #{ exports => [info, once, every, stop] }. +``` + +### info + +Exported function for scheduling a one-time message. + +```erlang +info(_Msg1, _Msg2, _Opts) -> + InfoBody = #{ + <<"description">> => <<"Cron device for scheduling messages">>, + <<"version">> => <<"1.0">>, + <<"paths">> => #{ + <<"info">> => <<"Get device info">>, + <<"once">> => <<"Schedule a one-time message">>, + <<"every">> => <<"Schedule a recurring message">>, + <<"stop">> => <<"Stop a scheduled task {task}">> + } + }, + {ok, #{<<"status">> => 200, <<"body">> => InfoBody}}. +``` + +### once + +Exported function for scheduling a one-time message. + +```erlang +once(_Msg1, Msg2, Opts) -> + case hb_ao:get(<<"cron-path">>, Msg2, Opts) of + not_found -> + {error, <<"No cron path found in message.">>}; + CronPath -> + ReqMsgID = hb_message:id(Msg2, all, Opts), + % make the path specific for the end device to be used + ModifiedMsg2 = + maps:remove( + <<"cron-path">>, + maps:put(<<"path">>, CronPath, Msg2) + ), + Name = {<<"cron@1.0">>, ReqMsgID}, + Pid = spawn(fun() -> once_worker(CronPath, ModifiedMsg2, Opts) end), + hb_name:register(Name, Pid), + {ok, ReqMsgID} + end. +``` + +### once_worker + +Internal function for scheduling a one-time message. + +```erlang +once_worker(Path, Req, Opts) -> + % Directly call the meta device on the newly constructed 'singleton', just + % as hb_http_server does. +``` + +### every + +Exported function for scheduling a recurring message. + +```erlang +every(_Msg1, Msg2, Opts) -> + case { + hb_ao:get(<<"cron-path">>, Msg2, Opts), + hb_ao:get(<<"interval">>, Msg2, Opts) + } of + {not_found, _} -> + {error, <<"No cron path found in message.">>}; + {_, not_found} -> + {error, <<"No interval found in message.">>}; + {CronPath, IntervalString} -> + try + IntervalMillis = parse_time(IntervalString), + if IntervalMillis =< 0 -> + throw({error, invalid_interval_value}); + true -> + ok + end, + ReqMsgID = hb_message:id(Msg2, all, Opts), + ModifiedMsg2 = + maps:remove( + <<"cron-path">>, + maps:remove(<<"interval">>, Msg2) + ), + TracePID = hb_tracer:start_trace(), + Pid = + spawn( + fun() -> + every_worker_loop( + CronPath, + ModifiedMsg2, + Opts#{ trace => TracePID }, + IntervalMillis + ) + end + ), + Name = {<<"cron@1.0">>, ReqMsgID}, + hb_name:register(Name, Pid), + {ok, ReqMsgID} + catch + error:{invalid_time_unit, Unit} -> + {error, <<"Invalid time unit: ", Unit/binary>>}; + error:{invalid_interval_value} -> + {error, <<"Invalid interval value.">>}; + error:{Reason, _Stack} -> + {error, {<<"Error parsing interval">>, Reason}} + end + end. +``` + +### stop + +Exported function for stopping a scheduled task. + +```erlang +stop(_Msg1, Msg2, Opts) -> + case hb_ao:get(<<"task">>, Msg2, Opts) of + not_found -> + {error, <<"No task ID found in message.">>}; + TaskID -> + Name = {<<"cron@1.0">>, TaskID}, + case hb_name:lookup(Name) of + Pid when is_pid(Pid) -> + ?event({cron_stopping_task, {task_id, TaskID}, {pid, Pid}}), + exit(Pid, kill), + hb_name:unregister(Name), + {ok, #{<<"status">> => 200, <<"body">> => #{ + <<"message">> => <<"Task stopped successfully">>, + <<"task_id">> => TaskID + }}}; + undefined -> + {error, <<"Task not found.">>}; + Error -> + ?event({cron_stop_lookup_error, {task_id, TaskID}, {error, Error}}), + {error, #{ + <<"error">> => + <<"Failed to lookup task or unexpected result">>, + <<"details">> => Error + }} + end + end. +``` + +### every_worker_loop + +```erlang +every_worker_loop(CronPath, Req, Opts, IntervalMillis) -> + Req1 = Req#{<<"path">> => CronPath}, + ?event( + {cron_every_worker_executing, + {path, CronPath}, + {req_id, hb_message:id(Req, all, Opts)} + } + ), + try + dev_meta:handle(Opts, Req1), + ?event({cron_every_worker_executed, {path, CronPath}}) + catch + Class:Reason:Stack -> + ?event(cron_error, {cron_every_worker_error, + {path, CronPath}, + {error, Class, Reason, Stack}}) + end, + timer:sleep(IntervalMillis), + every_worker_loop(CronPath, Req, Opts, IntervalMillis). +``` + +### parse_time + +Parse a time string into milliseconds. + +```erlang +parse_time(BinString) -> + [AmountStr, UnitStr] = binary:split(BinString, <<"-">>), + Amount = binary_to_integer(AmountStr), + Unit = string:lowercase(binary_to_list(UnitStr)), + case Unit of + "millisecond" ++ _ -> Amount; + "second" ++ _ -> Amount * 1000; + "minute" ++ _ -> Amount * 60 * 1000; + "hour" ++ _ -> Amount * 60 * 60 * 1000; + "day" ++ _ -> Amount * 24 * 60 * 60 * 1000; + _ -> throw({error, invalid_time_unit, UnitStr}) + end. +``` + +### stop_once_test + +This test verifies that a recurring task can be stopped by + +```erlang +stop_once_test() -> + % Start a new node + Node = hb_http_server:start_node(), + % Set up a standard test worker (even though delay doesn't use its state) + TestWorkerPid = spawn(fun test_worker/0), + TestWorkerNameId = hb_util:human_id(crypto:strong_rand_bytes(32)), + hb_name:register({<<"test">>, TestWorkerNameId}, TestWorkerPid), + % Create a "once" task targeting the delay function + OnceUrlPath = <<"/~cron@1.0/once?test-id=", TestWorkerNameId/binary, + "&cron-path=/~test-device@1.0/delay">>, + {ok, OnceTaskID} = hb_http:get(Node, OnceUrlPath, #{}), + ?event({'cron:stop_once:test:created', {task_id, OnceTaskID}}), + % Give a short delay to ensure the task has started and called handle, + % entering the sleep + timer:sleep(200), + % Verify the once task worker process is registered and alive + OncePid = hb_name:lookup({<<"cron@1.0">>, OnceTaskID}), + ?assert(is_pid(OncePid), "Lookup did not return a PID"), + ?assert(erlang:is_process_alive(OncePid), "OnceWorker process died prematurely"), + % Call stop on the once task while it's sleeping + OnceStopPath = <<"/~cron@1.0/stop?task=", OnceTaskID/binary>>, + {ok, OnceStopResult} = hb_http:get(Node, OnceStopPath, #{}), + ?event({'cron:stop_once:test:stopped', {result, OnceStopResult}}), + % Verify success response from stop + ?assertMatch(#{<<"status">> := 200}, OnceStopResult), + % Verify name is unregistered + ?assertEqual(undefined, hb_name:lookup({<<"cron@1.0">>, OnceTaskID})), + % Allow a moment for the kill signal to be processed + timer:sleep(100), + % Verify process termination + ?assertNot(erlang:is_process_alive(OncePid), "Process not killed by stop"), + % Call stop again to verify 404 response + {error, <<"Task not found.">>} = hb_http:get(Node, OnceStopPath, #{}). +``` + +### stop_every_test + +This test verifies that a recurring task can be stopped by +This test verifies that a one-time task can be scheduled and executed. + +```erlang +stop_every_test() -> + % Start a new node + Node = hb_http_server:start_node(), + % Set up a test worker process to hold state (counter) + TestWorkerPid = spawn(fun test_worker/0), + TestWorkerNameId = hb_util:human_id(crypto:strong_rand_bytes(32)), + hb_name:register({<<"test">>, TestWorkerNameId}, TestWorkerPid), + % Create an "every" task that calls the test worker + EveryUrlPath = <<"/~cron@1.0/every?test-id=", TestWorkerNameId/binary, + "&interval=500-milliseconds", + "&cron-path=/~test-device@1.0/increment_counter">>, + {ok, CronTaskID} = hb_http:get(Node, EveryUrlPath, #{}), + ?event({'cron:stop_every:test:created', {task_id, CronTaskID}}), + % Verify the cron worker process was registered and is alive + CronWorkerPid = hb_name:lookup({<<"cron@1.0">>, CronTaskID}), + ?assert(is_pid(CronWorkerPid)), + ?assert(erlang:is_process_alive(CronWorkerPid)), + % Wait a bit to ensure the cron worker has run a few times + timer:sleep(1000), + % Call stop on the cron task using its ID + EveryStopPath = <<"/~cron@1.0/stop?task=", CronTaskID/binary>>, + {ok, EveryStopResult} = hb_http:get(Node, EveryStopPath, #{}), + ?event({'cron:stop_every:test:stopped', {result, EveryStopResult}}), + % Verify success response + ?assertMatch(#{<<"status">> := 200}, EveryStopResult), + % Verify the cron task name is unregistered (lookup returns undefined) + ?assertEqual(undefined, hb_name:lookup({<<"cron@1.0">>, CronTaskID})), + % Allow a moment for the process termination signal to be processed + timer:sleep(100), + % Verify the cron worker process is terminated + ?assertNot(erlang:is_process_alive(CronWorkerPid)), + % Check the counter in the original test worker was incremented + TestWorkerPid ! {get, self()}, + receive + {state, State = #{count := Count}} -> + ?event({'cron:stop_every:test:counter_state', {state, State}}), + ?assert(Count > 0) + after 1000 -> + throw(no_response_from_worker) + end, + % Call stop again using the same CronTaskID to verify the error + {error, <<"Task not found.">>} = hb_http:get(Node, EveryStopPath, #{}). +``` + +### once_executed_test + +This test verifies that a recurring task can be stopped by +This test verifies that a one-time task can be scheduled and executed. + +```erlang +once_executed_test() -> + % start a new node + Node = hb_http_server:start_node(), + % spawn a worker on the new node that calls test_worker/0 which inits + % test_worker/1 with a state of undefined + PID = spawn(fun test_worker/0), + % generate a random id that we can then use later to lookup the worker + ID = hb_util:human_id(crypto:strong_rand_bytes(32)), + % register the worker with the id + hb_name:register({<<"test">>, ID}, PID), + % Construct the URL path with the dynamic ID + UrlPath = <<"/~cron@1.0/once?test-id=", ID/binary, + "&cron-path=/~test-device@1.0/update_state">>, + % this should call the worker via the test device + % the test device should look up the worker via the id given + {ok, _ReqMsgId} = hb_http:get(Node, UrlPath, #{}), + % wait for the request to be processed + timer:sleep(1000), + % send a message to the worker to get the state + PID ! {get, self()}, + % receive the state from the worker + receive + {state, State} -> + ?event({once_executed_test_received_state, {state, State}}), + ?assertMatch(#{ <<"test-id">> := ID }, State) + after 1000 -> + FinalLookup = hb_name:lookup({<<"test">>, ID}), + ?event({timeout_waiting_for_worker, {pid, PID}, {lookup_result, FinalLookup}}), + throw(no_response_from_worker) + end. +``` + +### every_worker_loop_test + +This test verifies that a recurring task can be scheduled and executed. + +```erlang +every_worker_loop_test() -> + Node = hb_http_server:start_node(), + PID = spawn(fun test_worker/0), + ID = hb_util:human_id(crypto:strong_rand_bytes(32)), + hb_name:register({<<"test">>, ID}, PID), + UrlPath = <<"/~cron@1.0/every?test-id=", ID/binary, + "&interval=500-milliseconds", + "&cron-path=/~test-device@1.0/increment_counter">>, + ?event({'cron:every:test:sendUrl', {url_path, UrlPath}}), + {ok, ReqMsgId} = hb_http:get(Node, UrlPath, #{}), + ?event({'cron:every:test:get_done', {req_id, ReqMsgId}}), + timer:sleep(1500), + PID ! {get, self()}, + % receive the state from the worker + receive + {state, State = #{count := C}} -> + ?event({'cron:every:test:received_state', {state, State}}), + ?assert(C >= 3) + after 1000 -> + FinalLookup = hb_name:lookup({<<"test">>, ID}), + ?event({'cron:every:test:timeout', {pid, PID}, {lookup_result, FinalLookup}}), + throw({test_timeout_waiting_for_state, {id, ID}}) + end. +``` + +### test_worker + +This is a helper function that is used to test the cron device. + +```erlang +test_worker() -> test_worker(#{count => 0}). +``` + +### test_worker + +This is a helper function that is used to test the cron device. + +```erlang +test_worker(State) -> + receive + {increment} -> + NewCount = maps:get(count, State, 0) + 1, + ?event({'test_worker:incremented', {new_count, NewCount}}), + test_worker(State#{count := NewCount}); + {update, NewState} -> + ?event({'test_worker:updated', {new_state, NewState}}), + test_worker(NewState); + {get, Pid} -> + Pid ! {state, State}, + test_worker(State) +``` + +--- + +*Generated from [dev_cron.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cron.erl)* diff --git a/docs/book/src/dev_cu.erl.md b/docs/book/src/dev_cu.erl.md new file mode 100644 index 000000000..4b9f4ce28 --- /dev/null +++ b/docs/book/src/dev_cu.erl.md @@ -0,0 +1,46 @@ +# dev_cu + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cu.erl) + +## Exported Functions + +- `execute/2` +- `push/2` + +--- + +### push + +```erlang +push(Msg, S = #{ assignment := Assignment, logger := _Logger }) -> + ?event( + {pushing_message, + {assignment, hb_util:id(Assignment, unsigned)}, + {message, hb_util:id(Msg, unsigned)} + } + ), + case hb_client:compute(Assignment, Msg) of + {ok, Results} -> + ?event(computed_results), + {ok, S#{ results => Results }}; + Error -> + throw({cu_error, Error}) + end. +``` + +### execute + +```erlang +execute(CarrierMsg, S) -> + MaybeBundle = ar_bundles:hd(CarrierMsg), + Store = hb_opts:get(store), + Wallet = hb:wallet(), + {ok, Results} = + case MaybeBundle of + #tx{data = #{ <<"body">> := _Msg, <<"assignment">> := Assignment }} -> + % TODO: Execute without needing to call the SU unnecessarily. +``` + +--- + +*Generated from [dev_cu.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cu.erl)* diff --git a/docs/book/src/dev_dedup.erl.md b/docs/book/src/dev_dedup.erl.md new file mode 100644 index 000000000..193fc9d17 --- /dev/null +++ b/docs/book/src/dev_dedup.erl.md @@ -0,0 +1,93 @@ +# dev_dedup + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_dedup.erl) + +A device that deduplicates messages in an evaluation stream, returning +status `skip` if the message has already been seen. +This device is typically used to ensure that a message is only executed +once, even if assigned multiple times, upon a `~process@1.0` evaluation. +It can, however, be used in many other contexts. +This device honors the `pass` key if it is present in the message. If so, +it will only run on the first pass. Additionally, the device supports +a `subject-key` key that allows the caller to specify the key whose ID +should be used for deduplication. If the `subject-key` key is not present, +the device will use the `body` of the request as the subject. If the key is +set to `request`, the device will use the entire request itself as the +subject. +This device runs on the first pass of the `compute` key call if executed +in a stack, and not in subsequent passes. Currently the device stores its +list of already seen items in memory, but at some point it will likely make +sense to drop them in the cache. + +--- + +## Exported Functions + +- `info/1` + +--- + +### info + +A device that deduplicates messages in an evaluation stream, returning + +```erlang +info(_M1) -> + #{ + default => fun handle/4, + exclude => [keys, set, id, commit] + }. +``` + +### handle + +Forward the keys and `set` functions to the message device, handle all + +```erlang +handle(<<"keys">>, M1, _M2, _Opts) -> + dev_message:keys(M1); +``` + +### handle + +Forward the keys and `set` functions to the message device, handle all + +```erlang +handle(<<"set">>, M1, M2, Opts) -> + dev_message:set(M1, M2, Opts); +``` + +### handle + +Forward the keys and `set` functions to the message device, handle all + +```erlang +handle(Key, M1, M2, Opts) -> + ?event({dedup_handle, {key, Key}, {msg1, M1}, {msg2, M2}}), + % Find the relevant parameters from the messages. We search for the + % `dedup-key' key in the first message, and use that value as the key to + % look for in the second message. +``` + +### dedup_test + +```erlang +dedup_test() -> + hb:init(), + % Create a stack with a dedup device and 2 devices that will append to a + % `Result' key. +``` + +### dedup_with_multipass_test + +```erlang +dedup_with_multipass_test() -> + % Create a stack with a dedup device and 2 devices that will append to a + % `Result' key and a `Multipass' device that will repeat the message for + % an additional pass. We want to ensure that Multipass is not hindered by + % the dedup device. +``` + +--- + +*Generated from [dev_dedup.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_dedup.erl)* diff --git a/docs/book/src/dev_delegated_compute.erl.md b/docs/book/src/dev_delegated_compute.erl.md new file mode 100644 index 000000000..eabf786ae --- /dev/null +++ b/docs/book/src/dev_delegated_compute.erl.md @@ -0,0 +1,242 @@ +# dev_delegated_compute + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_delegated_compute.erl) + +Simple wrapper module that enables compute on remote machines, +implementing the JSON-Iface. This can be used either as a standalone, to +bring trusted results into the local node, or as the `Execution-Device` of +an AO process. + +--- + +## Exported Functions + +- `compute/3` +- `init/3` +- `normalize/3` +- `snapshot/3` + +--- + +### init + +Simple wrapper module that enables compute on remote machines, +Initialize or normalize the compute-lite device. For now, we don't + +```erlang +init(Msg1, _Msg2, _Opts) -> + {ok, Msg1}. +``` + +### normalize + +We assume that the compute engine stores its own internal state, +Call the delegated server to compute the result. The endpoint is + +```erlang +normalize(Msg1, _Msg2, Opts) -> + hb_ao:set(Msg1, #{ <<"snapshot">> => unset }, Opts). +``` + +### compute + +We assume that the compute engine stores its own internal state, +Call the delegated server to compute the result. The endpoint is + +```erlang +compute(Msg1, Msg2, Opts) -> + OutputPrefix = dev_stack:prefix(Msg1, Msg2, Opts), + % Extract the process ID - this identifies which process to run compute + % against. +``` + +### do_compute + +Execute computation on a remote machine via relay and the JSON-Iface. + +```erlang +do_compute(ProcID, Msg2, Opts) -> + ?event({do_compute_msg, {req, Msg2}}), + Slot = hb_ao:get(<<"slot">>, Msg2, Opts), + {ok, AOS2 = #{ <<"body">> := Body }} = + dev_scheduler_formats:assignments_to_aos2( + ProcID, + #{ + Slot => Msg2 + }, + false, + Opts + ), + ?event({do_compute_body, {aos2, {string, Body}}}), + % Send to external CU via relay using /result endpoint + Response = + do_relay( + <<"POST">>, + <<"/result/", (hb_util:bin(Slot))/binary, "?process-id=", ProcID/binary>>, + Body, + AOS2, + Opts#{ + hashpath => ignore, + cache_control => [<<"no-store">>, <<"no-cache">>] + } + ), + extract_json_res(Response, Opts). +``` + +### do_dryrun + +Execute dry-run computation on a remote machine via relay and use + +```erlang +do_dryrun(ProcID, Msg2, Opts) -> + ?event({do_dryrun_msg, {req, Msg2}}), + % Remove commitments from the message before sending to the external CU + Body = + hb_json:encode( + dev_json_iface:message_to_json_struct( + hb_maps:without([<<"commitments">>], Msg2, Opts), + Opts + ) + ), + ?event({do_dryrun_body, {string, Body}}), + % Send to external CU via relay using /dry-run endpoint + Response = do_relay( + <<"POST">>, + <<"/dry-run?process-id=", ProcID/binary>>, + Body, + #{}, + Opts#{ + hashpath => ignore, + cache_control => [<<"no-store">>, <<"no-cache">>] + } + ), + extract_json_res(Response, Opts). +``` + +### do_relay + +```erlang +do_relay(Method, Path, Body, AOS2, Opts) -> + hb_ao:resolve( + #{ + <<"device">> => <<"relay@1.0">>, + <<"content-type">> => <<"application/json">> + }, + AOS2#{ + <<"path">> => <<"call">>, + <<"relay-method">> => Method, + <<"relay-body">> => Body, + <<"relay-path">> => Path, + <<"content-type">> => <<"application/json">> + }, + Opts + ). +``` + +### extract_json_res + +Extract the JSON response from the delegated compute response. + +```erlang +extract_json_res(Response, Opts) -> + case Response of + {ok, Res} -> + JSONRes = hb_ao:get(<<"body">>, Res, Opts), + ?event({ + delegated_compute_res_metadata, + {req, hb_maps:without([<<"body">>], Res, Opts)} + }), + {ok, JSONRes}; + {Err, Error} when Err == error; Err == failure -> + {error, Error} + end. +``` + +### get_process_id + +```erlang +get_process_id(Msg1, Msg2, Opts) -> + RawProcessID = dev_process:process_id(Msg1, #{}, Opts), + case RawProcessID of + not_found -> hb_ao:get(<<"process-id">>, Msg2, Opts); + ProcID -> ProcID + end. +``` + +### handle_relay_response + +Handle the response from the delegated compute server. Assumes that the + +```erlang +handle_relay_response(Msg1, Msg2, Opts, Response, OutputPrefix, ProcessID, Slot) -> + case Response of + {ok, JSONRes} -> + ?event( + {compute_lite_res, + {process_id, ProcessID}, + {slot, Slot}, + {json_res, {string, JSONRes}}, + {req, Msg2} + } + ), + {ok, Msg} = dev_json_iface:json_to_message(JSONRes, Opts), + {ok, + hb_ao:set( + Msg1, + #{ + <> => Msg, + <> => + #{ + <<"content-type">> => <<"application/json">>, + <<"body">> => JSONRes + } + }, + Opts + ) + }; + {error, Error} -> + {error, Error} + end. +``` + +### snapshot + +Generate a snapshot of a running computation by calling the + +```erlang +snapshot(Msg, Msg2, Opts) -> + ?event({snapshotting, {req, Msg2}}), + ProcID = dev_process:process_id(Msg, #{}, Opts), + Res = + hb_ao:resolve( + #{ + <<"device">> => <<"relay@1.0">>, + <<"content-type">> => <<"application/json">> + }, + #{ + <<"path">> => <<"call">>, + <<"relay-method">> => <<"POST">>, + <<"relay-path">> => <<"/snapshot/", ProcID/binary>>, + <<"content-type">> => <<"application/json">>, + <<"body">> => <<"{}">> + }, + Opts#{ + hashpath => ignore, + cache_control => [<<"no-store">>, <<"no-cache">>] + } + ), + ?event({snapshotting_result, Res}), + case Res of + {ok, Response} -> + {ok, Response}; + {error, Error} -> + {ok, + #{ + <<"error">> => <<"No checkpoint produced.">>, + <<"error-details">> => Error + }} +``` + +--- + +*Generated from [dev_delegated_compute.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_delegated_compute.erl)* diff --git a/docs/book/src/dev_faff.erl.md b/docs/book/src/dev_faff.erl.md new file mode 100644 index 000000000..e09e3d64a --- /dev/null +++ b/docs/book/src/dev_faff.erl.md @@ -0,0 +1,62 @@ +# dev_faff + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_faff.erl) + +A module that implements a 'friends and family' pricing policy. +It will allow users to process requests only if their addresses are +in the allow-list for the node. +Fundamentally against the spirit of permissionlessness, but it is useful if +you are running a node for your own purposes and would not like to allow +others to make use of it -- even for a fee. It also serves as a useful +example of how to implement a custom pricing policy, as it implements stubs +for both the pricing and ledger P4 APIs. + +--- + +## Exported Functions + +- `charge/3` +- `estimate/3` + +--- + +### estimate + +A module that implements a 'friends and family' pricing policy. +Decide whether or not to service a request from a given address. + +```erlang +estimate(_, Msg, NodeMsg) -> + ?event(payment, {estimate, {msg, Msg}}), + % Check if the address is in the allow-list. +``` + +### is_admissible + +Check whether all of the signers of the request are in the allow-list. + +```erlang +is_admissible(Msg, NodeMsg) -> + AllowList = hb_opts:get(faff_allow_list, [], NodeMsg), + Req = hb_ao:get(<<"request">>, Msg, NodeMsg), + Signers = hb_message:signers(Req, NodeMsg), + ?event(payment, {is_admissible, {signers, Signers}, {allow_list, AllowList}}), + lists:all( + fun(Signer) -> lists:member(Signer, AllowList) end, + Signers + ). +``` + +### charge + +Charge the user's account if the request is allowed. + +```erlang +charge(_, Req, _NodeMsg) -> + ?event(payment, {charge, Req}), + {ok, true}. +``` + +--- + +*Generated from [dev_faff.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_faff.erl)* diff --git a/docs/book/src/dev_genesis_wasm.erl.md b/docs/book/src/dev_genesis_wasm.erl.md new file mode 100644 index 000000000..b906f31bc --- /dev/null +++ b/docs/book/src/dev_genesis_wasm.erl.md @@ -0,0 +1,756 @@ +# dev_genesis_wasm + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_genesis_wasm.erl) + +A device that mimics an environment suitable for `legacynet` AO +processes, using HyperBEAM infrastructure. This allows existing `legacynet` +AO process definitions to be used in HyperBEAM. + +--- + +## Exported Functions + +- `compute/3` +- `init/3` +- `normalize/3` +- `snapshot/3` + +--- + +### init + +A device that mimics an environment suitable for `legacynet` AO +Initialize the device. +Normalize the device. + +```erlang +init(Msg, _Msg2, _Opts) -> {ok, Msg}. +``` + +### normalize + +A device that mimics an environment suitable for `legacynet` AO +Initialize the device. +Normalize the device. + +```erlang +normalize(Msg, Msg2, Opts) -> + dev_delegated_compute:normalize(Msg, Msg2, Opts). +``` + +### compute + +Genesis-wasm device compute handler. + +```erlang +compute(Msg, Msg2, Opts) -> + % Validate whether the genesis-wasm feature is enabled. +``` + +### snapshot + +Snapshot the state of the process via the `delegated-compute@1.0` device. + +```erlang +snapshot(Msg, Msg2, Opts) -> + delegate_request(Msg, Msg2, Opts). +``` + +### delegate_request + +Proxy a request to the delegated-compute@1.0 device, ensuring that + +```erlang +delegate_request(Msg, Msg2, Opts) -> + % Validate whether the genesis-wasm feature is enabled. +``` + +### do_compute + +Handle normal compute execution with state persistence (GET method). + +```erlang +do_compute(Msg, Msg2, Opts) -> + % Resolve the `delegated-compute@1.0' device. +``` + +### ensure_started + +Ensure the local `genesis-wasm@1.0` is live. If it not, start it. + +```erlang +ensure_started(Opts) -> + % Check if the `genesis-wasm@1.0' device is already running. The presence + % of the registered name implies its availability. +``` + +### is_genesis_wasm_server_running + +Check if the genesis-wasm server is running, using the cached process ID + +```erlang +is_genesis_wasm_server_running(Opts) -> + case get(genesis_wasm_pid) of + undefined -> + ?event(genesis_wasm_pinging_server), + Parent = self(), + PID = spawn( + fun() -> + ?event({genesis_wasm_get_info_endpoint, {worker, self()}}), + Parent ! {ok, self(), status(Opts)} + end + ), + receive + {ok, PID, Status} -> + put(genesis_wasm_pid, Status), + ?event({genesis_wasm_received_status, Status}), + Status + after ?STATUS_TIMEOUT -> + ?event({genesis_wasm_status_check, timeout}), + erlang:exit(PID, kill), + false + end; + _ -> true + end. +``` + +### status + +Check if the genesis-wasm server is running by requesting its status + +```erlang +status(Opts) -> + ServerPort = + integer_to_binary( + hb_opts:get( + genesis_wasm_port, + 6363, + Opts + ) + ), + try hb_http:get(<<"http://localhost:", ServerPort/binary, "/status">>, Opts) of + {ok, Res} -> + ?event({genesis_wasm_status_check, {res, Res}}), + true; + Err -> + ?event({genesis_wasm_status_check, {err, Err}}), + false + catch + _:Err -> + ?event({genesis_wasm_status_check, {error, Err}}), + false + end. +``` + +### collect_events + +Collect events from the port and log them. + +```erlang +collect_events(Port) -> + collect_events(Port, <<>>). +``` + +### collect_events + +```erlang +collect_events(Port, Acc) -> + receive + {Port, {data, Data}} -> + collect_events(Port, + log_server_events(<>) + ); + stop -> + port_close(Port), + ?event(genesis_wasm_stopped, {pid, self()}), + ok + end. +``` + +### log_server_events + +Log lines of output from the genesis-wasm server. + +```erlang +log_server_events(Bin) when is_binary(Bin) -> + log_server_events(binary:split(Bin, <<"\n">>, [global])); +``` + +### log_server_events + +Log lines of output from the genesis-wasm server. + +```erlang +log_server_events([Remaining]) -> Remaining; +``` + +### log_server_events + +Log lines of output from the genesis-wasm server. + +```erlang +log_server_events([Line | Rest]) -> + ?event(genesis_wasm_server, {server_logged, {string, Line}}), + log_server_events(Rest). +``` + +### test_base_process + +```erlang +test_base_process() -> + test_base_process(#{}). +``` + +### test_base_process + +```erlang +test_base_process(Opts) -> + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + Address = hb_util:human_id(ar_wallet:to_address(Wallet)), + hb_message:commit(#{ + <<"device">> => <<"process@1.0">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"scheduler-location">> => Address, + <<"type">> => <<"Process">>, + <<"test-random-seed">> => rand:uniform(1337) + }, #{ priv_wallet => Wallet }). +``` + +### test_wasm_process + +```erlang +test_wasm_process(WASMImage) -> + test_wasm_process(WASMImage, #{}). +``` + +### test_wasm_process + +```erlang +test_wasm_process(WASMImage, Opts) -> + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + #{ <<"image">> := WASMImageID } = dev_wasm:cache_wasm_image(WASMImage, Opts), + hb_message:commit( + maps:merge( + hb_message:uncommitted(test_base_process(Opts)), + #{ + <<"execution-device">> => <<"stack@1.0">>, + <<"device-stack">> => [<<"WASM-64@1.0">>], + <<"image">> => WASMImageID + } + ), + #{ priv_wallet => Wallet } + ). +``` + +### test_wasm_stack_process + +```erlang +test_wasm_stack_process(Opts, Stack) -> + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + Address = hb_util:human_id(ar_wallet:to_address(Wallet)), + WASMProc = test_wasm_process(<<"test/aos-2-pure-xs.wasm">>, Opts), + hb_message:commit( + maps:merge( + hb_message:uncommitted(WASMProc), + #{ + <<"device-stack">> => Stack, + <<"execution-device">> => <<"genesis-wasm@1.0">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"patch-from">> => <<"/results/outbox">>, + <<"passes">> => 2, + <<"stack-keys">> => + [ + <<"init">>, + <<"compute">>, + <<"snapshot">>, + <<"normalize">>, + <<"compute">> + ], + <<"scheduler">> => Address, + <<"authority">> => Address, + <<"module">> => <<"URgYpPQzvxxfYQtjrIQ116bl3YBfcImo3JEnNo8Hlrk">>, + <<"data-protocol">> => <<"ao">>, + <<"type">> => <<"Process">> + } + ), + #{ priv_wallet => Wallet } + ). +``` + +### test_genesis_wasm_process + +```erlang +test_genesis_wasm_process() -> + Opts = #{ + genesis_wasm_db_dir => "cache-mainnet-test/genesis-wasm", + genesis_wasm_checkpoints_dir => "cache-mainnet-test/genesis-wasm/checkpoints", + genesis_wasm_log_level => "error", + genesis_wasm_port => 6363, + execution_device => <<"genesis-wasm@1.0">> + }, + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + Address = hb_util:human_id(ar_wallet:to_address(Wallet)), + WASMProc = test_wasm_process(<<"test/aos-2-pure-xs.wasm">>, Opts), + hb_message:commit( + maps:merge( + hb_message:uncommitted(WASMProc), + #{ + <<"execution-device">> => <<"genesis-wasm@1.0">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"push-device">> => <<"push@1.0">>, + <<"patch-from">> => <<"/results/outbox">>, + <<"passes">> => 1, + <<"scheduler">> => Address, + <<"authority">> => Address, + <<"module">> => <<"URgYpPQzvxxfYQtjrIQ116bl3YBfcImo3JEnNo8Hlrk">>, + <<"data-protocol">> => <<"ao">>, + <<"type">> => <<"Process">> + }), + #{ priv_wallet => Wallet } + ). +``` + +### schedule_test_message + +```erlang +schedule_test_message(Msg1, Text) -> + schedule_test_message(Msg1, Text, #{}). +``` + +### schedule_test_message + +```erlang +schedule_test_message(Msg1, Text, MsgBase) -> + Wallet = hb:wallet(), + UncommittedBase = hb_message:uncommitted(MsgBase), + Msg2 = + hb_message:commit(#{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + UncommittedBase#{ + <<"type">> => <<"Message">>, + <<"test-label">> => Text + }, + #{ priv_wallet => Wallet } + ) + }, + #{ priv_wallet => Wallet } + ), + hb_ao:resolve(Msg1, Msg2, #{}). +``` + +### schedule_aos_call + +```erlang +schedule_aos_call(Msg1, Code) -> + schedule_aos_call(Msg1, Code, <<"Eval">>, #{}). +``` + +### schedule_aos_call + +```erlang +schedule_aos_call(Msg1, Code, Action) -> + schedule_aos_call(Msg1, Code, Action, #{}). +``` + +### schedule_aos_call + +```erlang +schedule_aos_call(Msg1, Code, Action, Opts) -> + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + ProcID = hb_message:id(Msg1, all), + Msg2 = + hb_message:commit( + #{ + <<"action">> => Action, + <<"data">> => Code, + <<"target">> => ProcID, + <<"timestamp">> => os:system_time(millisecond) + }, + #{ priv_wallet => Wallet } + ), + schedule_test_message(Msg1, <<"TEST MSG">>, Msg2). +``` + +### spawn_and_execute_slot_test_ + +```erlang +spawn_and_execute_slot_test_() -> + { timeout, 900, fun spawn_and_execute_slot/0 }. +``` + +### spawn_and_execute_slot + +```erlang +spawn_and_execute_slot() -> + application:ensure_all_started(hb), + Opts = #{ + priv_wallet => hb:wallet(), + cache_control => <<"always">>, + store => hb_opts:get(store) + }, + Msg1 = test_genesis_wasm_process(), + hb_cache:write(Msg1, Opts), + {ok, _SchedInit} = + hb_ao:resolve( + Msg1, + #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => Msg1 + }, + Opts + ), + {ok, _} = schedule_aos_call(Msg1, <<"return 1+1">>), + {ok, _} = schedule_aos_call(Msg1, <<"return 2+2">>), + {ok, SchedulerRes} = + hb_ao:resolve(Msg1, #{ + <<"method">> => <<"GET">>, + <<"path">> => <<"schedule">> + }, Opts), + % Verify process message is scheduled first + ?assertMatch( + <<"Process">>, + hb_ao:get(<<"assignments/0/body/type">>, SchedulerRes) + ), + % Verify messages are scheduled + ?assertMatch( + <<"return 1+1">>, + hb_ao:get(<<"assignments/1/body/data">>, SchedulerRes) + ), + ?assertMatch( + <<"return 2+2">>, + hb_ao:get(<<"assignments/2/body/data">>, SchedulerRes) + ), + {ok, Result} = hb_ao:resolve(Msg1, #{ <<"path">> => <<"now">> }, Opts), + ?assertEqual(<<"4">>, hb_ao:get(<<"results/data">>, Result)). +``` + +### compare_result_genesis_wasm_and_wasm_test_ + +```erlang +compare_result_genesis_wasm_and_wasm_test_() -> + { timeout, 900, fun compare_result_genesis_wasm_and_wasm/0 }. +``` + +### compare_result_genesis_wasm_and_wasm + +```erlang +compare_result_genesis_wasm_and_wasm() -> + application:ensure_all_started(hb), + Opts = #{ + priv_wallet => hb:wallet(), + cache_control => <<"always">>, + store => hb_opts:get(store) + }, + % Test with genesis-wasm + MsgGenesisWasm = test_genesis_wasm_process(), + hb_cache:write(MsgGenesisWasm, Opts), + {ok, _SchedInitGenesisWasm} = + hb_ao:resolve( + MsgGenesisWasm, + #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => MsgGenesisWasm + }, + Opts + ), + % Test with wasm + MsgWasm = test_wasm_stack_process(Opts, [ + <<"WASI@1.0">>, + <<"JSON-Iface@1.0">>, + <<"WASM-64@1.0">>, + <<"Multipass@1.0">> + ]), + hb_cache:write(MsgWasm, Opts), + {ok, _SchedInitWasm} = + hb_ao:resolve( + MsgWasm, + #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => MsgWasm + }, + Opts + ), + % Schedule messages + {ok, _} = schedule_aos_call(MsgGenesisWasm, <<"return 1+1">>), + {ok, _} = schedule_aos_call(MsgGenesisWasm, <<"return 2+2">>), + {ok, _} = schedule_aos_call(MsgWasm, <<"return 1+1">>), + {ok, _} = schedule_aos_call(MsgWasm, <<"return 2+2">>), + % Get results + {ok, ResultGenesisWasm} = + hb_ao:resolve( + MsgGenesisWasm, + #{ <<"path">> => <<"now">> }, + Opts + ), + {ok, ResultWasm} = + hb_ao:resolve( + MsgWasm, + #{ <<"path">> => <<"now">> }, + Opts + ), + ?assertEqual( + hb_ao:get(<<"results/data">>, ResultGenesisWasm), + hb_ao:get(<<"results/data">>, ResultWasm) + ). +``` + +### send_message_between_genesis_wasm_processes_test_ + +```erlang +send_message_between_genesis_wasm_processes_test_() -> + { timeout, 900, fun send_message_between_genesis_wasm_processes/0 }. +``` + +### send_message_between_genesis_wasm_processes + +```erlang +send_message_between_genesis_wasm_processes() -> + application:ensure_all_started(hb), + Opts = #{ + priv_wallet => hb:wallet(), + cache_control => <<"always">>, + store => hb_opts:get(store) + }, + % Create receiver process with handler + MsgReceiver = test_genesis_wasm_process(), + hb_cache:write(MsgReceiver, Opts), + ProcId = dev_process:process_id(MsgReceiver, #{}, #{}), + {ok, _SchedInitReceiver} = + hb_ao:resolve( + MsgReceiver, + #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => MsgReceiver + }, + Opts + ), + schedule_aos_call(MsgReceiver, <<"Number = 10">>), + schedule_aos_call(MsgReceiver, <<" + Handlers.add('foo', function(msg) + print(\"Number: \" .. Number * 2) + return Number * 2 end) + ">>), + schedule_aos_call(MsgReceiver, <<"return Number">>), + {ok, ResultReceiver} = hb_ao:resolve(MsgReceiver, <<"now">>, Opts), + ?assertEqual(<<"10">>, hb_ao:get(<<"results/data">>, ResultReceiver)), + % Create sender process to send message to receiver + MsgSender = test_genesis_wasm_process(), + hb_cache:write(MsgSender, Opts), + {ok, _SchedInitSender} = + hb_ao:resolve( + MsgSender, + #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => MsgSender + }, + Opts + ), + {ok, SendMsgToReceiver} = + schedule_aos_call( + MsgSender, + <<"Send({ Target = \"", ProcId/binary, "\", Action = \"foo\" })">> + ), + {ok, ResultSender} = hb_ao:resolve(MsgSender, <<"now">>, Opts), + {ok, Slot} = hb_ao:resolve(SendMsgToReceiver, <<"slot">>, Opts), + {ok, Res} = + hb_ao:resolve( + MsgSender, + #{ + <<"path">> => <<"push">>, + <<"slot">> => Slot, + <<"result-depth">> => 1 + }, + Opts + ), + % Get schedule for receiver + {ok, ScheduleReceiver} = + hb_ao:resolve( + MsgReceiver, + #{ + <<"method">> => <<"GET">>, + <<"path">> => <<"schedule">> + }, + Opts + ), + ?assertEqual( + <<"foo">>, + hb_ao:get(<<"assignments/4/body/action">>, ScheduleReceiver) + ), + {ok, NewResultReceiver} = hb_ao:resolve(MsgReceiver, <<"now">>, Opts), + ?assertEqual( + <<"Number: 20">>, + hb_ao:get(<<"results/data">>, NewResultReceiver) + ). +``` + +### dryrun_genesis_wasm_test_ + +```erlang +dryrun_genesis_wasm_test_() -> + { timeout, 900, fun dryrun_genesis_wasm/0 }. +``` + +### dryrun_genesis_wasm + +```erlang +dryrun_genesis_wasm() -> + application:ensure_all_started(hb), + Opts = #{ + priv_wallet => hb:wallet(), + cache_control => <<"always">>, + store => hb_opts:get(store) + }, + % Set up process with increment handler to receive messages + ProcReceiver = test_genesis_wasm_process(), + hb_cache:write(ProcReceiver, #{}), + {ok, _SchedInit1} = + hb_ao:resolve( + ProcReceiver, + #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => ProcReceiver + }, + Opts + ), + ProcReceiverId = dev_process:process_id(ProcReceiver, #{}, #{}), + % Initialize increment handler + {ok, _} = schedule_aos_call(ProcReceiver, <<" + Number = Number or 5 + Handlers.add('Increment', function(msg) + Number = Number + 1 + ao.send({ Target = msg.From, Data = 'The current number is ' .. Number .. '!' }) + return 'The current number is ' .. Number .. '!' + end) + ">>), + % Ensure Handlers were properly added + schedule_aos_call(ProcReceiver, <<"return #Handlers.list">>), + {ok, NumHandlers} = + hb_ao:resolve( + ProcReceiver, + <<"now/results/data">>, + Opts + ), + % _eval, _default, Increment + ?assertEqual(<<"3">>, NumHandlers), + schedule_aos_call(ProcReceiver, <<"return Number">>), + {ok, InitialNumber} = + hb_ao:resolve( + ProcReceiver, + <<"now/results/data">>, + Opts + ), + % Number is initialized to 5 + ?assertEqual(<<"5">>, InitialNumber), + % Set up sender process to send Action: Increment to receiver + ProcSender = test_genesis_wasm_process(), + hb_cache:write(ProcSender, #{}), + {ok, _SchedInit2} = hb_ao:resolve( + ProcSender, + #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => ProcSender + }, + Opts + ), + % First increment + push + {ok, ToPush} = + schedule_aos_call( + ProcSender, + << + "Send({ Target = \"", + (ProcReceiverId)/binary, + "\", Action = \"Increment\" })" + >> + ), + SlotToPush = hb_ao:get(<<"slot">>, ToPush, Opts), + ?assertEqual(1, SlotToPush), + {ok, PushRes1} = + hb_ao:resolve( + ProcSender, + #{ + <<"path">> => <<"push">>, + <<"slot">> => SlotToPush, + <<"result-depth">> => 1 + }, + Opts + ), + % Check that number incremented normally + schedule_aos_call(ProcReceiver, <<"return Number">>), + {ok, AfterIncrementResult} = + hb_ao:resolve( + ProcReceiver, + <<"now/results/data">>, + Opts + ), + ?assertEqual(<<"6">>, AfterIncrementResult), + % Send another increment and push it + {ok, ToPush2} = + schedule_aos_call( + ProcSender, + << + "Send({ Target = \"", + (ProcReceiverId)/binary, + "\", Action = \"Increment\" })" + >> + ), + SlotToPush2 = hb_ao:get(<<"slot">>, ToPush2, Opts), + ?assertEqual(3, SlotToPush2), + {ok, PushRes2} = + hb_ao:resolve( + ProcSender, + #{ + <<"path">> => <<"push">>, + <<"slot">> => SlotToPush2, + <<"result-depth">> => 1 + }, + Opts + ), + % Check that number incremented normally + schedule_aos_call(ProcReceiver, <<"return Number">>), + {ok, AfterIncrementResult2} = + hb_ao:resolve( + ProcReceiver, + <<"now/results/data">>, + Opts + ), + ?assertEqual(<<"7">>, AfterIncrementResult2), + % Test dryrun by calling compute with no assignment + % Should return result without changing state + DryrunMsg = + hb_message:commit( + #{ + <<"path">> => <<"as/compute">>, + <<"as-device">> => <<"execution">>, + <<"action">> => <<"Increment">>, + <<"target">> => ProcReceiverId + }, + Opts + ), + {ok, DryrunResult} = hb_ao:resolve(ProcReceiver, DryrunMsg, Opts), + {ok, DryrunData} = + hb_ao:resolve(DryrunResult, <<"results/outbox/1/Data">>, Opts), + ?assertEqual(<<"The current number is 8!">>, DryrunData), + % Ensure that number did not increment + schedule_aos_call(ProcReceiver, <<"return Number">>), + {ok, AfterDryrunResult} = + hb_ao:resolve( + ProcReceiver, + <<"now/results/data">>, + Opts + ), + ?assertEqual(<<"7">>, AfterDryrunResult). +``` + +--- + +*Generated from [dev_genesis_wasm.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_genesis_wasm.erl)* diff --git a/docs/book/src/dev_green_zone.erl.md b/docs/book/src/dev_green_zone.erl.md new file mode 100644 index 000000000..f2e6256e5 --- /dev/null +++ b/docs/book/src/dev_green_zone.erl.md @@ -0,0 +1,342 @@ +# dev_green_zone + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_green_zone.erl) + +The green zone device, which provides secure communication and identity +management between trusted nodes. +It handles node initialization, joining existing green zones, key exchange, +and node identity cloning. All operations are protected by hardware +commitment and encryption. + +--- + +## Exported Functions + +- `become/3` +- `info/1` +- `info/3` +- `init/3` +- `is_trusted/3` +- `join/3` +- `key/3` + +--- + +### info + +The green zone device, which provides secure communication and identity +Controls which functions are exposed via the device API. + +```erlang +info(_) -> + #{ exports => [info, init, join, become, key, is_trusted] }. +``` + +### info + +Provides information about the green zone device and its API. +Provides the default required options for a green zone. +Replace values of <<"self">> in a configuration map with corresponding values from Opts. +Returns `true` if the request is signed by a trusted node. + +```erlang +-spec replace_self_values(Config :: map(), Opts :: map()) -> map(). +replace_self_values(Config, Opts) -> + maps:map( + fun(Key, Value) -> + case Value of + <<"self">> -> + hb_opts:get(Key, not_found, Opts); + _ -> + Value + end + end, + Config + ). +``` + +```erlang +info(_Msg1, _Msg2, _Opts) -> + InfoBody = #{ + <<"description">> => + <<"Green Zone secure communication and identity management for trusted nodes">>, + <<"version">> => <<"1.0">>, + <<"api">> => #{ + <<"info">> => #{ + <<"description">> => <<"Get device info">> + }, + <<"init">> => #{ + <<"description">> => <<"Initialize the green zone">>, + <<"details">> => + <<"Sets up the node's cryptographic identity with wallet and AES key">> + }, + <<"join">> => #{ + <<"description">> => <<"Join an existing green zone">>, + <<"required_node_opts">> => #{ + <<"green_zone_peer_location">> => <<"Target peer's address">>, + <<"green_zone_peer_id">> => <<"Target peer's unique identifier">> + } + }, + <<"key">> => #{ + <<"description">> => <<"Retrieve and encrypt the node's private key">>, + <<"details">> => + <<"Returns the node's private key encrypted with the shared AES key">> + }, + <<"become">> => #{ + <<"description">> => <<"Clone the identity of a target node">>, + <<"required_node_opts">> => #{ + <<"green_zone_peer_location">> => <<"Target peer's address">>, + <<"green_zone_peer_id">> => <<"Target peer's unique identifier">> + } + } + } + }, + {ok, #{<<"status">> => 200, <<"body">> => InfoBody}}. +%% +%% +%% +%% +``` + +### is_trusted + +Provides information about the green zone device and its API. +Provides the default required options for a green zone. +Replace values of <<"self">> in a configuration map with corresponding values from Opts. +Returns `true` if the request is signed by a trusted node. + +```erlang +-spec replace_self_values(Config :: map(), Opts :: map()) -> map(). +replace_self_values(Config, Opts) -> + maps:map( + fun(Key, Value) -> + case Value of + <<"self">> -> + hb_opts:get(Key, not_found, Opts); + _ -> + Value + end + end, + Config + ). +``` + +```erlang +is_trusted(_M1, Req, Opts) -> + Signers = hb_message:signers(Req, Opts), + {ok, + hb_util:bin( + lists:any( + fun(Signer) -> + lists:member( + Signer, + maps:keys(hb_opts:get(trusted_nodes, #{}, Opts)) + ) + end, + Signers + ) + ) + }. +``` + +### join + +Initiates the join process for a node to enter an existing green zone. + +```erlang +-spec join(M1 :: term(), M2 :: term(), Opts :: map()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +join(M1, M2, Opts) -> + ?event(green_zone, {join, start}), + PeerLocation = hb_opts:get(<<"green_zone_peer_location">>, undefined, Opts), + PeerID = hb_opts:get(<<"green_zone_peer_id">>, undefined, Opts), + Identities = hb_opts:get(identities, #{}, Opts), + HasGreenZoneIdentity = maps:is_key(<<"green-zone">>, Identities), + ?event(green_zone, {join_peer, PeerLocation, PeerID, HasGreenZoneIdentity}), + if (not HasGreenZoneIdentity) andalso (PeerLocation =/= undefined) andalso (PeerID =/= undefined) -> + join_peer(PeerLocation, PeerID, M1, M2, Opts); + true -> + validate_join(M1, M2, hb_cache:ensure_all_loaded(Opts, Opts)) + end. +``` + +### key + +Encrypts and provides the node's private key for secure sharing. + +```erlang +-spec key(M1 :: term(), M2 :: term(), Opts :: map()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +key(_M1, _M2, Opts) -> + ?event(green_zone, {get_key, start}), + % Retrieve the shared AES key and the node's wallet. +``` + +### become + +Clones the identity of a target node in the green zone. + +```erlang +-spec become(M1 :: term(), M2 :: term(), Opts :: map()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +become(_M1, _M2, Opts) -> + ?event(green_zone, {become, start}), + % 1. Retrieve the target node's address from the incoming message. +``` + +### finalize_become + +```erlang +finalize_become(KeyResp, NodeLocation, NodeID, GreenZoneAES, Opts) -> + % 4. Decode the response to obtain the encrypted key and IV. +``` + +### join_peer + +Processes a join request to a specific peer node. + +```erlang +-spec join_peer( + PeerLocation :: binary(), + PeerID :: binary(), + M1 :: term(), + M2 :: term(), + Opts :: map()) -> {ok, map()} | {error, map() | binary()}. +``` + +```erlang +join_peer(PeerLocation, PeerID, _M1, _M2, InitOpts) -> + % Check here if the node is already part of a green zone. +``` + +### validate_join + +Validates an incoming join request from another node. + +```erlang +-spec validate_join(M1 :: term(), Req :: map(), Opts :: map()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +validate_join(M1, Req, Opts) -> + case validate_peer_opts(Req, Opts) of + true -> do_nothing; + false -> throw(invalid_join_request) + end, + ?event(green_zone, {join, start}), + % Retrieve the commitment report and address from the join request. +``` + +### add_trusted_node + +Adds a node to the trusted nodes list with its commitment report. + +```erlang +-spec add_trusted_node( + NodeAddr :: binary(), + Report :: map(), + RequesterPubKey :: term(), Opts :: map()) -> ok. +``` + +```erlang +add_trusted_node(NodeAddr, Report, RequesterPubKey, Opts) -> + % Retrieve the current trusted nodes map. +``` + +### decrypt_zone_key + +Decrypts an AES key using the node's RSA private key. + +```erlang +-spec decrypt_zone_key(EncZoneKey :: binary(), Opts :: map()) -> + {ok, binary()} | {error, binary()}. +``` + +```erlang +decrypt_zone_key(EncZoneKey, Opts) -> + % Decode if necessary + RawEncKey = case is_binary(EncZoneKey) of + true -> base64:decode(EncZoneKey); + false -> EncZoneKey + end, + % Get wallet and extract key components + {{_KeyType = {rsa, E}, Priv, Pub}, _PubKey} = + hb_opts:get(priv_wallet, #{}, Opts), + % Create RSA private key record + RSAPrivKey = #'RSAPrivateKey'{ + publicExponent = E, + modulus = crypto:bytes_to_integer(Pub), + privateExponent = crypto:bytes_to_integer(Priv) + }, + DecryptedKey = public_key:decrypt_private(RawEncKey, RSAPrivKey), + ?event(green_zone, {decrypt_zone_key, complete}), + {ok, DecryptedKey}. +``` + +### try_mount_encrypted_volume + +Attempts to mount an encrypted volume using the green zone AES key. + +```erlang +try_mount_encrypted_volume(Key, Opts) -> + ?event(debug_volume, {try_mount_encrypted_volume, start}), + % Set up options for volume mounting with default paths + VolumeOpts = Opts#{ + priv_volume_key => Key, + volume_skip_decryption => <<"true">> + }, + % Call the dev_volume:mount function to handle the complete process + case dev_volume:mount(undefined, undefined, VolumeOpts) of + {ok, Result} -> + ?event(debug_volume, {volume_mount, success, Result}), + ok; + {error, Error} -> + ?event(debug_volume, {volume_mount, error, Error}), + ok % Still return ok as this is an optional operation + end. +``` + +### rsa_wallet_integration_test + +Test RSA operations with the existing wallet structure. + +```erlang +rsa_wallet_integration_test() -> + % Create a new wallet using ar_wallet + Wallet = ar_wallet:new(), + {{KeyType, Priv, Pub}, {KeyType, Pub}} = Wallet, + % Create test message + PlainText = <<"HyperBEAM integration test message.">>, + % Create RSA public key record for encryption + RsaPubKey = #'RSAPublicKey'{ + publicExponent = 65537, + modulus = crypto:bytes_to_integer(Pub) + }, + % Encrypt using public key + Encrypted = public_key:encrypt_public(PlainText, RsaPubKey), + % Create RSA private key record for decryption + RSAPrivKey = #'RSAPrivateKey'{ + publicExponent = 65537, + modulus = crypto:bytes_to_integer(Pub), + privateExponent = crypto:bytes_to_integer(Priv) + }, + % Verify decryption works + Decrypted = public_key:decrypt_private(Encrypted, RSAPrivKey), + % Verify roundtrip + ?assertEqual(PlainText, Decrypted), + % Verify wallet structure +``` + +--- + +*Generated from [dev_green_zone.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_green_zone.erl)* diff --git a/docs/book/src/dev_hook.erl.md b/docs/book/src/dev_hook.erl.md new file mode 100644 index 000000000..041cee5f1 --- /dev/null +++ b/docs/book/src/dev_hook.erl.md @@ -0,0 +1,272 @@ +# dev_hook + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_hook.erl) + +A generalized interface for `hooking` into HyperBEAM nodes. +This module allows users to define `hooks` that are executed at various +points in the lifecycle of nodes and message evaluations. +Hooks are maintained in the `node message` options, under the key `on` +key. Each `hook` may have zero or many `handlers` which their request is +executed against. A new `handler` of a hook can be registered by simply +adding a new key to that message. If multiple hooks need to be executed for +a single event, the key's value can be set to a list of hooks. +`hook`s themselves do not need to be added explicitly. Any device can add +a hook by simply executing `dev_hook:on(HookName, Req, Opts)`. This +function is does not affect the hashpath of a message and is not exported on +the device's API, such that it is not possible to call it directly with +AO-Core resolution. +All handlers are expressed in the form of a message, upon which the hook's +request is evaluated: + AO(HookMsg, Req, Opts) => {Status, Result} +The `Status` and `Result` of the evaluation can be used at the `hook` caller's +discretion. If multiple handlers are to be executed for a single `hook`, the +result of each is used as the input to the next, on the assumption that the +status of the previous is `ok`. If a non-`ok` status is encountered, the +evaluation is halted and the result is returned to the caller. This means +that in most cases, hooks take the form of chainable pipelines of functions, +passing the most pertinent data in the `body` key of both the request and +result. Hook definitions can also set the `hook/result` key to `ignore`, if +the result of the execution should be discarded and the prior value (the +input to the hook) should be used instead. The `hook/commit-request` key can +also be set to `true` if the request should be committed by the node before +execution of the hook. +The default HyperBEAM node implements several useful hooks. They include: + start: Executed when the node starts. + Req/body: The node's initial configuration. + Result/body: The node's possibly updated configuration. + request: Executed when a request is received via the HTTP API. + Req/body: The sequence of messages that the node will evaluate. + Req/request: The raw, unparsed singleton request. + Result/body: The sequence of messages that the node will evaluate. + step: Executed after each message in a sequence has been evaluated. + Req/body: The result of the evaluation. + Result/body: The result of the evaluation. + response: Executed when a response is sent via the HTTP API. + Req/body: The result of the evaluation. + Req/request: The raw, unparsed singleton request that was used to + generate the response. + Result/body: The message to be sent in response to the request. +Additionally, this module implements a traditional device API, allowing the +node operator to register hooks to the node and find those that are +currently active. + +--- + +## Exported Functions + +- `find/2` +- `find/3` +- `info/1` +- `on/3` + +--- + +### info + +A generalized interface for `hooking` into HyperBEAM nodes. +Device API information +Execute a named hook with the provided request and options + +```erlang +info(_) -> + #{ excludes => [<<"on">>] }. +``` + +### on + +A generalized interface for `hooking` into HyperBEAM nodes. +Device API information +Execute a named hook with the provided request and options + +```erlang +on(HookName, Req, Opts) -> + ?event(hook, {attempting_execution_for_hook, HookName}), + % Get all handlers for this hook from the options + Handlers = find(HookName, Opts), + % If no handlers are found, return the original request with ok status + case Handlers of + [] -> + ?event(hook, {no_handlers_for_hook, HookName}), + {ok, Req}; + _ -> + % Execute each handler in sequence, passing the result of each to + % the next as input. +``` + +### find + +Get all handlers for a specific hook from the node message options. + +```erlang +find(HookName, Opts) -> + find(#{}, #{ <<"target">> => <<"body">>, <<"body">> => HookName }, Opts). +``` + +### find + +Get all handlers for a specific hook from the node message options. + +```erlang +find(_Base, Req, Opts) -> + HookName = maps:get(maps:get(<<"target">>, Req, <<"body">>), Req), + case maps:get(HookName, hb_opts:get(on, #{}, Opts), []) of + Handler when is_map(Handler) -> + % If a single handler is found, wrap it in a list. +``` + +### execute_handlers + +Execute a list of handlers in sequence. + +```erlang +execute_handlers(_HookName, [], Req, _Opts) -> + % If no handlers remain, return the final request with ok status + {ok, Req}; +``` + +### execute_handlers + +Execute a list of handlers in sequence. + +```erlang +execute_handlers(HookName, [Handler|Rest], Req, Opts) -> + % Execute the current handler + ?event(hook, {executing_handler, HookName, Handler, Req}), + % Check the status of the execution + case execute_handler(HookName, Handler, Req, Opts) of + {ok, NewReq} -> + % If status is ok, continue with the next handler + ?event(hook, {handler_executed_successfully, HookName, NewReq}), + execute_handlers(HookName, Rest, NewReq, Opts); + {Status, Res} -> + % If status is error, halt execution and return the error + {Status, Res}; + Other -> + % If status is unknown, convert to error and halt execution + ?event(hook_error, {unexpected_handler_result, HookName, Other}), + {failure, + << + "Handler for hook `", + (hb_ao:normalize_key(HookName))/binary, + "` returned unexpected result." + >> + } + end. +``` + +### execute_handler + +Execute a single handler + +```erlang +execute_handler(<<"step">>, Handler, Req, Opts = #{ on := On = #{ <<"step">> := _ }}) -> + % The `step' hook is a special case: It is executed during the course of + % a resolution, and as such, the key must be removed from the node message + % before execution of the handler. Failure to do so will result in infinite + % recursion. +``` + +### execute_handler + +```erlang +execute_handler(HookName, Handler, Req, Opts) -> + try + % Resolve the handler message, setting the path to the handler name if + % it is not already set. We ensure to ignore the hashpath such that the + % handler does not affect the hashpath of a request's output. If the + % `hook/commit` key is set to `true`, the handler request will be + % committed before execution. +``` + +### no_handlers_test + +Test that hooks with no handlers return the original request + +```erlang +no_handlers_test() -> + Req = #{ <<"test">> => <<"value">> }, + Opts = #{}, + {ok, Result} = on(<<"test_hook">>, Req, Opts), + ?assertEqual(Req, Result). +``` + +### single_handler_test + +Test that a single handler is executed correctly + +```erlang +single_handler_test() -> + % Create a message with a mock handler that adds a key to the request. +``` + +### multiple_handlers_test + +Test that multiple handlers form a pipeline +Test that pipeline execution halts on error + +```erlang +multiple_handlers_test() -> + % Create mock handlers that modify the request in sequence + Handler1 = #{ + <<"device">> => #{ + <<"test-hook">> => + fun(_, Req, _) -> + {ok, Req#{ <<"handler1">> => true }} + end + } + }, + Handler2 = #{ + <<"device">> => #{ + <<"test-hook">> => + fun(_, Req, _) -> + {ok, Req#{ <<"handler2">> => true }} + end + } + }, + Req = #{ <<"test">> => <<"value">> }, + Opts = #{ on => #{ <<"test-hook">> => [Handler1, Handler2] }}, + {ok, Result} = on(<<"test-hook">>, Req, Opts), + ?assertEqual(true, maps:get(<<"handler1">>, Result)), + ?assertEqual(true, maps:get(<<"handler2">>, Result)). +``` + +### halt_on_error_test + +Test that multiple handlers form a pipeline +Test that pipeline execution halts on error + +```erlang +halt_on_error_test() -> + % Create handlers where the second one returns an error + Handler1 = #{ + <<"device">> => #{ + <<"test-hook">> => + fun(_, Req, _) -> + {ok, Req#{ <<"handler1">> => true }} + end + } + }, + Handler2 = #{ + <<"device">> => #{ + <<"test-hook">> => + fun(_, _, _) -> + {error, <<"Error in handler2">>} + end + } + }, + Handler3 = #{ + <<"device">> => #{ + <<"test-hook">> => + fun(_, Req, _) -> + {ok, Req#{ <<"handler3">> => true }} + end + } + }, + Req = #{ <<"test">> => <<"value">> }, + Opts = #{ on => #{ <<"test-hook">> => [Handler1, Handler2, Handler3] }}, + {error, Result} = on(<<"test-hook">>, Req, Opts), +``` + +--- + +*Generated from [dev_hook.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_hook.erl)* diff --git a/docs/book/src/dev_hyperbuddy.erl.md b/docs/book/src/dev_hyperbuddy.erl.md new file mode 100644 index 000000000..7736eefa1 --- /dev/null +++ b/docs/book/src/dev_hyperbuddy.erl.md @@ -0,0 +1,291 @@ +# dev_hyperbuddy + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_hyperbuddy.erl) + +A device that renders a REPL-like interface for AO-Core via HTML. + +--- + +## Exported Functions + +- `events/3` +- `format/3` +- `info/0` +- `metrics/3` +- `return_error/2` +- `return_file/2` +- `throw/3` + +--- + +### info + +A device that renders a REPL-like interface for AO-Core via HTML. +Export an explicit list of files via http. + +```erlang +info() -> + #{ + default => fun serve/4, + routes => #{ + % Default message viewer page: + <<"index">> => <<"index.html">>, + % HyperBEAM default homepage: + <<"dashboard">> => <<"dashboard.html">>, + % Interactive REPL: + <<"console">> => <<"console.html">>, + <<"graph">> => <<"graph.html">>, + % Styling and scripts: + <<"styles.css">> => <<"styles.css">>, + <<"metrics.js">> => <<"metrics.js">>, + <<"devices.js">> => <<"devices.js">>, + <<"utils.js">> => <<"utils.js">>, + <<"dashboard.js">> => <<"dashboard.js">>, + <<"graph.js">> => <<"graph.js">>, + <<"404.html">> => <<"404.html">> + }, + excludes => [<<"return_file">>] + }. +``` + +### metrics + +The main HTML page for the REPL device. + +```erlang +metrics(_, Req, Opts) -> + case hb_opts:get(prometheus, not hb_features:test(), Opts) of + true -> + {_, HeaderList, Body} = + prometheus_http_impl:reply( + #{path => true, + headers => + fun(Name, Default) -> + hb_ao:get(Name, Req, Default, Opts) + end, + registry => prometheus_registry:exists(<<"default">>), + standalone => false} + ), + RawHeaderMap = + hb_maps:from_list( + prometheus_cowboy:to_cowboy_headers(HeaderList) + ), + Headers = + hb_maps:map( + fun(_, Value) -> hb_util:bin(Value) end, + RawHeaderMap, + Opts + ), + {ok, Headers#{ <<"body">> => Body }}; + false -> + {ok, #{ <<"body">> => <<"Prometheus metrics disabled.">> }} + end. +``` + +### events + +Return the current event counters as a message. + +```erlang +events(_, _Req, _Opts) -> + {ok, hb_event:counters()}. +``` + +### format + +Employ HyperBEAM's internal pretty printer to format a message. + +```erlang +format(Base, Req, Opts) -> + LoadedBase = hb_cache:ensure_all_loaded(Base, Opts), + LoadedReq = hb_cache:ensure_all_loaded(Req, Opts), + {ok, + #{ + <<"body">> => + hb_util:bin( + hb_format:message( + #{ + <<"base">> => + maps:without( + [<<"device">>], + hb_private:reset(LoadedBase)), + <<"request">> => + maps:without( + [<<"path">>], + hb_private:reset(LoadedReq) + ) + }, + Opts#{ + linkify_mode => discard, + cache_control => [<<"no-cache">>, <<"no-store">>] + } + ) + ) + } + }. +``` + +### throw + +Test key for validating the behavior of the `500` HTTP response. + +```erlang +throw(_Msg, _Req, Opts) -> + case hb_opts:get(mode, prod, Opts) of + prod -> {error, <<"Forced-throw unavailable in `prod` mode.">>}; + debug -> throw({intentional_error, Opts}) + end. +``` + +### serve + +Serve a file from the priv directory. Only serves files that are explicitly + +```erlang +serve(<<"keys">>, M1, _M2, Opts) -> dev_message:keys(M1, Opts); +``` + +### serve + +Serve a file from the priv directory. Only serves files that are explicitly + +```erlang +serve(<<"set">>, M1, M2, Opts) -> dev_message:set(M1, M2, Opts); +``` + +### serve + +Serve a file from the priv directory. Only serves files that are explicitly + +```erlang +serve(Key, _, _, Opts) -> + ?event({hyperbuddy_serving, Key}), + Routes = hb_maps:get(routes, info(), no_routes, Opts), + case hb_maps:get(Key, Routes, undefined, Opts) of + undefined -> {error, not_found}; + Filename -> return_file(Filename) + end. +``` + +### return_file + +Read a file from disk and serve it as a static HTML page. + +```erlang +return_file(Name) -> + return_file(<<"hyperbuddy@1.0">>, Name, #{}). +``` + +### return_file + +Read a file from disk and serve it as a static HTML page. + +```erlang +return_file(Device, Name) -> + return_file(Device, Name, #{}). +``` + +### return_file + +```erlang +return_file(Device, Name, Template) -> + Base = hb_util:bin(code:priv_dir(hb)), + Filename = <>, + ?event({hyperbuddy_serving, Filename}), + case file:read_file(Filename) of + {ok, RawBody} -> + Body = apply_template(RawBody, Template), + {ok, #{ + <<"body">> => Body, + <<"content-type">> => + case filename:extension(Filename) of + <<".html">> -> <<"text/html">>; + <<".js">> -> <<"text/javascript">>; + <<".css">> -> <<"text/css">>; + <<".png">> -> <<"image/png">>; + <<".ico">> -> <<"image/x-icon">> + end + } + }; + {error, _} -> + {error, not_found} + end. +``` + +### return_error + +Return an error page, with the `{{error}}` template variable replaced. + +```erlang +return_error(Error, Opts) when not is_map(Error) -> + return_error(#{ <<"body">> => Error }, Opts); +``` + +### return_error + +Return an error page, with the `{{error}}` template variable replaced. + +```erlang +return_error(ErrorMsg, Opts) -> + return_file( + <<"hyperbuddy@1.0">>, + <<"500.html">>, + #{ <<"error">> => hb_format:error(ErrorMsg, Opts) } + ). +``` + +### apply_template + +Apply a template to a body. + +```erlang +apply_template(Body, Template) when is_map(Template) -> + apply_template(Body, maps:to_list(Template)); +``` + +### apply_template + +Apply a template to a body. + +```erlang +apply_template(Body, []) -> + Body; +``` + +### apply_template + +Apply a template to a body. + +```erlang +apply_template(Body, [{Key, Value} | Rest]) -> + apply_template( + re:replace( + Body, + <<"\\{\\{", Key/binary, "\\}\\}">>, + hb_util:bin(Value), + [global, {return, binary}] + ), + Rest + ). +``` + +### return_templated_file_test + +```erlang +return_templated_file_test() -> + {ok, #{ <<"body">> := Body }} = + return_file( + <<"hyperbuddy@1.0">>, + <<"500.html">>, + #{ + <<"error">> => <<"This is an error message.">> + } + ), + ?assertNotEqual( + binary:match(Body, <<"This is an error message.">>), + nomatch +``` + +--- + +*Generated from [dev_hyperbuddy.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_hyperbuddy.erl)* diff --git a/docs/book/src/dev_json_iface.erl.md b/docs/book/src/dev_json_iface.erl.md new file mode 100644 index 000000000..c026f2389 --- /dev/null +++ b/docs/book/src/dev_json_iface.erl.md @@ -0,0 +1,685 @@ +# dev_json_iface + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_json_iface.erl) + +A device that provides a way for WASM execution to interact with +the HyperBEAM (and AO) systems, using JSON as a shared data representation. +The interface is easy to use. It works as follows: +1. The device is given a message that contains a process definition, WASM + environment, and a message that contains the data to be processed, + including the image to be used in part of `execute{pass=1}`. +2. The device is called with `execute{pass=2}`, which reads the result of + the process execution from the WASM environment and adds it to the + message. +The device has the following requirements and interface: +
+    M1/Computed when /Pass == 1 ->
+        Assumes:
+            M1/priv/wasm/instance
+            M1/Process
+            M2/Message
+            M2/Assignment/Block-Height
+        Generates:
+            /wasm/handler
+            /wasm/params
+        Side-effects:
+            Writes the process and message as JSON representations into the
+            WASM environment.
+    M1/Computed when M2/Pass == 2 ->
+        Assumes:
+            M1/priv/wasm/instance
+            M2/Results
+            M2/Process
+        Generates:
+            /Results/Outbox
+            /Results/Data
+ +--- + +## Exported Functions + +- `compute/3` +- `generate_aos_msg/2` +- `generate_stack/1` +- `generate_stack/2` +- `generate_stack/3` +- `init/3` +- `json_to_message/2` +- `message_to_json_struct/2` + +--- + +### init + +A device that provides a way for WASM execution to interact with +Initialize the device. +On first pass prepare the call, on second pass get the results. + +```erlang +init(M1, _M2, Opts) -> + {ok, hb_ao:set(M1, #{<<"function">> => <<"handle">>}, Opts)}. +``` + +### compute + +A device that provides a way for WASM execution to interact with +Initialize the device. +On first pass prepare the call, on second pass get the results. + +```erlang +compute(M1, M2, Opts) -> + case hb_ao:get(<<"pass">>, M1, Opts) of + 1 -> prep_call(M1, M2, Opts); + 2 -> results(M1, M2, Opts); + _ -> {ok, M1} + end. +``` + +### prep_call + +Prepare the WASM environment for execution by writing the process string and + +```erlang +prep_call(RawM1, RawM2, Opts) -> + M1 = hb_cache:ensure_all_loaded(RawM1, Opts), + M2 = hb_cache:ensure_all_loaded(RawM2, Opts), + ?event({prep_call, M1, M2, Opts}), + Process = hb_ao:get(<<"process">>, M1, Opts#{ hashpath => ignore }), + Message = hb_ao:get(<<"body">>, M2, Opts#{ hashpath => ignore }), + Image = hb_ao:get(<<"process/image">>, M1, Opts), + BlockHeight = hb_ao:get(<<"block-height">>, M2, Opts), + Props = message_to_json_struct(denormalize_message(Message, Opts), Opts), + MsgProps = + Props#{ + <<"Module">> => Image, + <<"Block-Height">> => BlockHeight + }, + MsgJson = hb_json:encode(MsgProps), + ProcessProps = + #{ + <<"Process">> => message_to_json_struct(Process, Opts) + }, + ProcessJson = hb_json:encode(ProcessProps), + env_write(ProcessJson, MsgJson, M1, M2, Opts). +``` + +### denormalize_message + +Normalize a message for AOS-compatibility. + +```erlang +denormalize_message(Message, Opts) -> + NormOwnerMsg = + case hb_message:signers(Message, Opts) of + [] -> Message; + [PrimarySigner|_] -> + {ok, _, Commitment} = hb_message:commitment(PrimarySigner, Message, Opts), + Message#{ + <<"owner">> => hb_util:human_id(PrimarySigner), + <<"signature">> => + hb_ao:get(<<"signature">>, Commitment, <<>>, Opts) + } + end, + NormOwnerMsg#{ + <<"id">> => hb_message:id(Message, all, Opts) + }. +``` + +### message_to_json_struct + +```erlang +message_to_json_struct(RawMsg, Opts) -> + message_to_json_struct(RawMsg, [owner_as_address], Opts). +``` + +### message_to_json_struct + +```erlang +message_to_json_struct(RawMsg, Features, Opts) -> + TABM = + hb_message:convert( + hb_private:reset(RawMsg), + tabm, + Opts + ), + MsgWithoutCommitments = hb_maps:without([<<"commitments">>], TABM, Opts), + ID = hb_message:id(RawMsg, all), + ?event({encoding, {id, ID}, {msg, RawMsg}}), + {Owner, Signature} = + case hb_message:signers(RawMsg, Opts) of + [] -> {<<>>, <<>>}; + [Signer|_] -> + {ok, _, Commitment} = + hb_message:commitment(Signer, RawMsg, Opts), + CommitmentSignature = + hb_ao:get(<<"signature">>, Commitment, <<>>, Opts), + case lists:member(owner_as_address, Features) of + true -> + { + hb_util:native_id(Signer), + CommitmentSignature + }; + false -> + CommitmentOwner = + hb_ao:get_first( + [ + {Commitment, <<"key">>}, + {Commitment, <<"owner">>} + ], + no_signing_public_key_found_in_commitment, + Opts + ), + {CommitmentOwner, CommitmentSignature} + end + end, + Last = + hb_ao:get( + <<"anchor">>, + {as, <<"message@1.0">>, MsgWithoutCommitments}, + <<>>, + Opts + ), + Data = + hb_ao:get( + <<"data">>, + {as, <<"message@1.0">>, MsgWithoutCommitments}, + <<>>, + Opts + ), + Target = + hb_ao:get( + <<"target">>, + {as, <<"message@1.0">>, MsgWithoutCommitments}, + <<>>, + Opts + ), + % Set "From" if From-Process is Tag or set with "Owner" address + From = + hb_ao:get( + <<"from-process">>, + {as, <<"message@1.0">>, MsgWithoutCommitments}, + hb_util:encode(Owner), + Opts + ), + #{ + <<"Id">> => safe_to_id(ID), + % NOTE: In Arweave TXs, these are called "last_tx" + <<"Anchor">> => Last, + % NOTE: When sent to ao "Owner" is the wallet address + <<"Owner">> => hb_util:encode(Owner), + <<"From">> => case ?IS_ID(From) of true -> safe_to_id(From); false -> From end, + <<"Tags">> => prepare_tags(TABM, Opts), + <<"Target">> => safe_to_id(Target), + <<"Data">> => Data, + <<"Signature">> => + case byte_size(Signature) of + 0 -> <<>>; + 512 -> hb_util:encode(Signature); + _ -> Signature + end + }. +``` + +### prepare_tags + +Prepare the tags of a message as a key-value list, for use in the + +```erlang +prepare_tags(Msg, Opts) -> + % Prepare an ANS-104 message for JSON-Struct construction. +``` + +### prepare_header_case_tags + +Convert a message without an `original-tags` field into a list of + +```erlang +prepare_header_case_tags(TABM, Opts) -> + % Prepare a non-ANS-104 message for JSON-Struct construction. +``` + +### json_to_message + +Translates a compute result -- either from a WASM execution using the + +```erlang +json_to_message(JSON, Opts) when is_binary(JSON) -> + json_to_message(hb_json:decode(JSON), Opts); +``` + +### json_to_message + +Translates a compute result -- either from a WASM execution using the + +```erlang +json_to_message(Resp, Opts) when is_map(Resp) -> + {ok, Data, Messages, Patches} = normalize_results(Resp), + Output = + #{ + <<"outbox">> => + hb_maps:from_list( + [ + {MessageNum, preprocess_results(Msg, Opts)} + || + {MessageNum, Msg} <- + lists:zip( + lists:seq(1, length(Messages)), + Messages + ) + ] + ), + <<"patches">> => lists:map(fun(Patch) -> tags_to_map(Patch, Opts) end, Patches), + <<"data">> => Data + }, + {ok, Output}; +``` + +### json_to_message + +Translates a compute result -- either from a WASM execution using the + +```erlang +json_to_message(#{ <<"ok">> := false, <<"error">> := Error }, _Opts) -> + {error, Error}; +``` + +### json_to_message + +Translates a compute result -- either from a WASM execution using the + +```erlang +json_to_message(Other, _Opts) -> + {error, + #{ + <<"error">> => <<"Invalid JSON message input.">>, + <<"received">> => Other + } + }. +``` + +### safe_to_id + +```erlang +safe_to_id(<<>>) -> <<>>; +``` + +### safe_to_id + +```erlang +safe_to_id(ID) -> hb_util:human_id(ID). +``` + +### maybe_list_to_binary + +```erlang +maybe_list_to_binary(List) when is_list(List) -> + list_to_binary(List); +``` + +### maybe_list_to_binary + +```erlang +maybe_list_to_binary(Bin) -> + Bin. +``` + +### header_case_string + +```erlang +header_case_string(Key) -> + NormKey = hb_ao:normalize_key(Key), + Words = string:lexemes(NormKey, "-"), + TitleCaseWords = + lists:map( + fun binary_to_list/1, + lists:map( + fun string:titlecase/1, + Words + ) + ), + TitleCaseKey = list_to_binary(string:join(TitleCaseWords, "-")), + TitleCaseKey. +``` + +### results + +Read the computed results out of the WASM environment, assuming that + +```erlang +results(M1, M2, Opts) -> + Prefix = dev_stack:prefix(M1, M2, Opts), + Type = hb_ao:get(<<"results/", Prefix/binary, "/type">>, M1, Opts), + Proc = hb_ao:get(<<"process">>, M1, Opts), + case hb_ao:normalize_key(Type) of + <<"error">> -> + {error, + hb_ao:set( + M1, + #{ + <<"outbox">> => undefined, + <<"results">> => + #{ + <<"body">> => <<"WASM execution error.">> + } + }, + Opts + ) + }; + <<"ok">> -> + {ok, Str} = env_read(M1, M2, Opts), + try hb_json:decode(Str) of + #{<<"ok">> := true, <<"response">> := Resp} -> + {ok, ProcessedResults} = json_to_message(Resp, Opts), + PostProcessed = postprocess_outbox(ProcessedResults, Proc, Opts), + Out = hb_ao:set( + M1, + <<"results">>, + PostProcessed, + Opts + ), + ?event(debug_iface, {results, {processed, ProcessedResults}, {out, Out}}), + {ok, Out} + catch + _:_ -> + ?event(error, {json_error, Str}), + {error, + hb_ao:set( + M1, + #{ + <<"results/outbox">> => undefined, + <<"results/body">> => + <<"JSON error parsing result output.">> + }, + Opts + ) + } + end + end. +``` + +### env_read + +Read the results out of the execution environment. + +```erlang +env_read(M1, M2, Opts) -> + Prefix = dev_stack:prefix(M1, M2, Opts), + Output = hb_ao:get(<<"results/", Prefix/binary, "/output">>, M1, Opts), + case hb_private:get(<>, M1, Opts) of + not_found -> + {ok, Output}; + ReadFn -> + {ok, Read} = ReadFn(Output), + {ok, Read} + end. +``` + +### env_write + +Write the message and process into the execution environment. + +```erlang +env_write(ProcessStr, MsgStr, Base, Req, Opts) -> + Prefix = dev_stack:prefix(Base, Req, Opts), + Params = + case hb_private:get(<>, Base, Opts) of + not_found -> + [MsgStr, ProcessStr]; + WriteFn -> + {ok, MsgJsonPtr} = WriteFn(MsgStr), + {ok, ProcessJsonPtr} = WriteFn(ProcessStr), + [MsgJsonPtr, ProcessJsonPtr] + end, + {ok, + hb_ao:set( + Base, + #{ + <<"function">> => <<"handle">>, + <<"parameters">> => Params + }, + Opts + ) + }. +``` + +### normalize_results + +Normalize the results of an evaluation. + +```erlang +normalize_results(#{ <<"Error">> := Error }) -> + {ok, Error, [], []}; +``` + +### normalize_results + +Normalize the results of an evaluation. + +```erlang +normalize_results(Msg) -> + try + Output = maps:get(<<"Output">>, Msg, #{}), + Data = maps:get(<<"data">>, Output, maps:get(<<"Data">>, Msg, <<>>)), + {ok, + Data, + maps:get(<<"Messages">>, Msg, []), + maps:get(<<"patches">>, Msg, []) + } + catch + _:_ -> + {ok, <<>>, [], []} + end. +``` + +### preprocess_results + +After the process returns messages from an evaluation, the + +```erlang +preprocess_results(Msg, Opts) -> + Tags = tags_to_map(Msg, Opts), + FilteredMsg = + hb_maps:without( + [<<"from-process">>, <<"from-image">>, <<"anchor">>, <<"tags">>], + Msg, + Opts + ), + hb_maps:merge( + hb_maps:from_list( + lists:map( + fun({Key, Value}) -> + {hb_ao:normalize_key(Key), Value} + end, + hb_maps:to_list(FilteredMsg, Opts) + ) + ), + Tags, + Opts + ). +``` + +### tags_to_map + +Convert a message with tags into a map of their key-value pairs. + +```erlang +tags_to_map(Msg, Opts) -> + NormMsg = hb_util:lower_case_key_map( + hb_ao:normalize_keys(Msg, Opts), + Opts), + RawTags = hb_maps:get(<<"tags">>, NormMsg, [], Opts), + TagList = + [ + {hb_maps:get(<<"name">>, Tag, Opts), hb_maps:get(<<"value">>, Tag, Opts)} + || + Tag <- RawTags + ], + hb_maps:from_list(TagList). +``` + +### postprocess_outbox + +Post-process messages in the outbox to add the correct `from-process` + +```erlang +postprocess_outbox(Msg, Proc, Opts) -> + AdjustedOutbox = + hb_maps:map( + fun(_Key, XMsg) -> + XMsg#{ + <<"from-process">> => hb_ao:get(id, Proc, Opts), + <<"from-image">> => hb_ao:get(<<"image">>, Proc, Opts) + } + end, + hb_ao:get(<<"outbox">>, Msg, #{}, Opts), + Opts + ), + hb_ao:set(Msg, <<"outbox">>, AdjustedOutbox, Opts). +%%% Tests +``` + +### normalize_test_opts + +Post-process messages in the outbox to add the correct `from-process` + +```erlang +normalize_test_opts(Opts) -> + Opts#{ + priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), Opts) + }. +``` + +### test_init + +```erlang +test_init() -> + application:ensure_all_started(hb). +``` + +### generate_stack + +```erlang +generate_stack(File) -> + generate_stack(File, <<"WASM">>). +``` + +### generate_stack + +```erlang +generate_stack(File, Mode) -> + generate_stack(File, Mode, #{}). +``` + +### generate_stack + +```erlang +generate_stack(File, _Mode, RawOpts) -> + Opts = normalize_test_opts(RawOpts), + test_init(), + Msg0 = dev_wasm:cache_wasm_image(File, Opts), + Image = hb_ao:get(<<"image">>, Msg0, Opts), + Msg1 = Msg0#{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + [ + <<"wasi@1.0">>, + <<"json-iface@1.0">>, + <<"wasm-64@1.0">>, + <<"multipass@1.0">> + ], + <<"input-prefix">> => <<"process">>, + <<"output-prefix">> => <<"wasm">>, + <<"passes">> => 2, + <<"stack-keys">> => [<<"init">>, <<"compute">>], + <<"process">> => + hb_message:commit(#{ + <<"type">> => <<"Process">>, + <<"image">> => Image, + <<"scheduler">> => hb:address(), + <<"authority">> => hb:address() + }, Opts) + }, + {ok, Msg2} = hb_ao:resolve(Msg1, <<"init">>, Opts), + Msg2. +``` + +### generate_aos_msg + +```erlang +generate_aos_msg(ProcID, Code) -> + generate_aos_msg(ProcID, Code, #{}). +``` + +### generate_aos_msg + +```erlang +generate_aos_msg(ProcID, Code, RawOpts) -> + Opts = normalize_test_opts(RawOpts), + hb_message:commit(#{ + <<"path">> => <<"compute">>, + <<"body">> => + hb_message:commit(#{ + <<"action">> => <<"Eval">>, + <<"data">> => Code, + <<"target">> => ProcID + }, Opts), + <<"block-height">> => 1 + }, Opts). +``` + +### basic_aos_call_test_ + +```erlang +basic_aos_call_test_() -> + {timeout, 20, fun() -> + Msg = generate_stack("test/aos-2-pure-xs.wasm"), + Proc = hb_ao:get(<<"process">>, Msg, #{ hashpath => ignore }), + ProcID = hb_message:id(Proc, all), + {ok, Msg3} = + hb_ao:resolve( + Msg, + generate_aos_msg(ProcID, <<"return 1+1">>), + #{} + ), + ?event({res, Msg3}), + Data = hb_ao:get(<<"results/data">>, Msg3, #{}), + ?assertEqual(<<"2">>, Data) + end}. +``` + +### aos_stack_benchmark_test_ + +```erlang +aos_stack_benchmark_test_() -> + {timeout, 20, fun() -> + BenchTime = 5, + Opts = #{ store => hb_test_utils:test_store() }, + RawWASMMsg = generate_stack("test/aos-2-pure-xs.wasm", <<"WASM">>, Opts), + Proc = hb_ao:get(<<"process">>, RawWASMMsg, Opts#{ hashpath => ignore }), + ProcID = hb_ao:get(id, Proc, Opts), + Msg = generate_aos_msg(ProcID, <<"return 1">>, Opts), + {ok, Initialized} = + hb_ao:resolve( + RawWASMMsg, + Msg, + Opts + ), + Msg2 = generate_aos_msg(ProcID, <<"return 1+1">>, Opts), + Iterations = + hb_test_utils:benchmark( + fun() -> hb_ao:resolve(Initialized, Msg2, Opts) end, + BenchTime + ), + hb_test_utils:benchmark_print( + <<"(Minimal AOS stack:) Evaluated">>, + <<"messages">>, + Iterations, + BenchTime + ), + ?assert(Iterations >= 10), + ok +``` + +--- + +*Generated from [dev_json_iface.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_json_iface.erl)* diff --git a/docs/book/src/dev_local_name.erl.md b/docs/book/src/dev_local_name.erl.md new file mode 100644 index 000000000..2efc2a199 --- /dev/null +++ b/docs/book/src/dev_local_name.erl.md @@ -0,0 +1,258 @@ +# dev_local_name + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_local_name.erl) + +A device for registering and looking up local names. This device uses +the node message to store a local cache of its known names, and the typical +non-volatile storage of the node message to store the names long-term. + +--- + +## Exported Functions + +- `direct_register/2` +- `info/1` +- `lookup/3` +- `register/3` + +--- + +### info + +A device for registering and looking up local names. This device uses +Export only the `lookup` and `register` functions. + +```erlang +info(_Opts) -> + #{ + excludes => [<<"direct_register">>, <<"keys">>, <<"set">>], + default => fun default_lookup/4 + }. +``` + +### lookup + +Takes a `key` argument and returns the value of the name, if it exists. + +```erlang +lookup(_, Req, Opts) -> + Key = hb_ao:get(<<"key">>, Req, no_key_specified, Opts), + ?event(local_name, {lookup, Key}), + hb_ao:resolve( + find_names(Opts), + Key, + Opts + ). +``` + +### default_lookup + +Handle all other requests by delegating to the lookup function. +Takes a `key` and `value` argument and registers the name. The caller + +```erlang +default_lookup(Key, _, Req, Opts) -> + lookup(Key, Req#{ <<"key">> => Key }, Opts). +``` + +### register + +Handle all other requests by delegating to the lookup function. +Takes a `key` and `value` argument and registers the name. The caller + +```erlang +register(_, Req, Opts) -> + case dev_meta:is(admin, Req, Opts) of + false -> + {error, + #{ + <<"status">> => 403, + <<"message">> => <<"Unauthorized.">> + } + }; + true -> + direct_register(Req, Opts) + end. +``` + +### direct_register + +Register a name without checking if the caller is an operator. Exported + +```erlang +direct_register(Req, Opts) -> + case hb_cache:write(hb_ao:get(<<"value">>, Req, Opts), Opts) of + {ok, MsgPath} -> + NormKey = hb_ao:normalize_key(hb_ao:get(<<"key">>, Req, Opts)), + hb_cache:link( + MsgPath, + LinkPath = << ?DEV_CACHE/binary, "/", NormKey/binary >>, + Opts + ), + load_names(Opts), + ?event( + local_name, + {registered, + {key, NormKey}, + {msg, MsgPath}, + {path, LinkPath} + } + ), + {ok, <<"Registered.">>}; + {error, _} -> + not_found + end. +``` + +### find_names + +Returns a message containing all known names. + +```erlang +find_names(Opts) -> + case hb_opts:get(local_names, not_found, Opts#{ only => local }) of + not_found -> + find_names(load_names(Opts)); + LocalNames -> + LocalNames + end. +``` + +### load_names + +Loads all known names from the cache and returns the new `node message` + +```erlang +load_names(Opts) -> + LocalNames = + maps:from_list(lists:map( + fun(Key) -> + NormKey = hb_ao:normalize_key(Key), + Path = << ?DEV_CACHE/binary, "/", NormKey/binary >>, + ?event(local_name, {loading, Path}), + case hb_cache:read(Path, Opts) of + {ok, Value} -> + {Key, Value}; + _ -> + {Key, not_found} + end + end, + hb_cache:list(?DEV_CACHE, Opts) + )), + ?event(local_name, {found_cache_keys, LocalNames}), + update_names(LocalNames, Opts). +``` + +### update_names + +Updates the node message with the new names. Further HTTP requests will + +```erlang +update_names(LocalNames, Opts) -> + hb_http_server:set_opts(NewOpts = Opts#{ local_names => LocalNames }), + NewOpts. +``` + +### generate_test_opts + +```erlang +generate_test_opts() -> + Opts = #{ + priv_wallet => ar_wallet:new() + }, + Opts. +``` + +### no_names_test + +```erlang +no_names_test() -> + ?assertEqual( + {error, not_found}, + lookup(#{}, #{ <<"key">> => <<"name1">> }, #{}) + ). +``` + +### lookup_opts_name_test + +```erlang +lookup_opts_name_test() -> + ?assertEqual( + {ok, <<"value1">>}, + lookup( + #{}, + #{ <<"key">> => <<"name1">> }, + #{ local_names => #{ <<"name1">> => <<"value1">>} } + ) + ). +``` + +### register_test + +```erlang +register_test() -> + TestName = <<"TEST-", (integer_to_binary(os:system_time(millisecond)))/binary>>, + Value = <<"TEST-VALUE-", (integer_to_binary(os:system_time(millisecond)))/binary>>, + Opts = generate_test_opts(), + ?assertEqual( + {ok, <<"Registered.">>}, + register( + #{}, + hb_message:commit( + #{ <<"key">> => TestName, <<"value">> => Value }, + Opts + ), + Opts + ) + ), + ?assertEqual( + {ok, Value}, + lookup(#{}, #{ <<"key">> => TestName, <<"load">> => false }, Opts) + ). +``` + +### unauthorized_test + +```erlang +unauthorized_test() -> + Opts = generate_test_opts(), + ?assertEqual( + {error, #{ <<"status">> => 403, <<"message">> => <<"Unauthorized.">> }}, + register( + #{}, + hb_message:commit( + #{ <<"key">> => <<"name1">>, <<"value">> => <<"value1">> }, + Opts#{ priv_wallet => ar_wallet:new() } + ), + Opts + ) + ). +``` + +### http_test + +```erlang +http_test() -> + Opts = generate_test_opts(), + Node = hb_http_server:start_node(Opts), + hb_http:post( + Node, + <<"/~local-name@1.0/register">>, + hb_message:commit( + #{ <<"key">> => <<"name1">>, <<"value">> => <<"value1">> }, + Opts + ), + Opts + ), + ?assertEqual( + {ok, <<"value1">>}, + hb_http:get( + Node, + <<"/~local-name@1.0/lookup?key=name1">>, + Opts + ) +``` + +--- + +*Generated from [dev_local_name.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_local_name.erl)* diff --git a/docs/book/src/dev_lookup.erl.md b/docs/book/src/dev_lookup.erl.md new file mode 100644 index 000000000..0feb4d247 --- /dev/null +++ b/docs/book/src/dev_lookup.erl.md @@ -0,0 +1,91 @@ +# dev_lookup + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lookup.erl) + +A device that looks up an ID from a local store and returns it, honoring +the `accept` key to return the correct format. + +--- + +## Exported Functions + +- `read/3` + +--- + +### read + +A device that looks up an ID from a local store and returns it, honoring +Fetch a resource from the cache using "target" ID extracted from the message + +```erlang +read(_M1, M2, Opts) -> + ID = hb_ao:get(<<"target">>, M2, Opts), + ?event({lookup, {id, ID}, {opts, Opts}}), + case hb_cache:read(ID, Opts) of + {ok, RawRes} -> + % We are sending the result over the wire, so make sure it is + % fully loaded, to save the recipient latency. +``` + +### binary_lookup_test + +```erlang +binary_lookup_test() -> + Bin = <<"Simple unsigned data item">>, + {ok, ID} = hb_cache:write(Bin, #{}), + {ok, RetrievedBin} = read(#{}, #{ <<"target">> => ID }, #{}), + ?assertEqual(Bin, RetrievedBin). +``` + +### message_lookup_test + +```erlang +message_lookup_test() -> + Msg = #{ <<"test-key">> => <<"test-value">>, <<"data">> => <<"test-data">> }, + {ok, ID} = hb_cache:write(Msg, #{}), + {ok, RetrievedMsg} = read(#{}, #{ <<"target">> => ID }, #{}), + ?assert(hb_message:match(Msg, RetrievedMsg)). +``` + +### aos2_message_lookup_test + +```erlang +aos2_message_lookup_test() -> + Msg = #{ <<"test-key">> => <<"test-value">>, <<"data">> => <<"test-data">> }, + {ok, ID} = hb_cache:write(Msg, #{}), + {ok, RetrievedMsg} = + read( + #{}, + #{ <<"target">> => ID, <<"accept">> => <<"application/aos-2">> }, + #{} + ), + {ok, Decoded} = dev_json_iface:json_to_message(hb_ao:get(<<"body">>, RetrievedMsg, #{}), #{}), + ?assertEqual(<<"test-data">>, hb_ao:get(<<"data">>, Decoded, #{})). +``` + +### http_lookup_test + +```erlang +http_lookup_test() -> + Store = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-mainnet">> + }, + Opts = #{ store => [Store] }, + Msg = #{ <<"test-key">> => <<"test-value">>, <<"data">> => <<"test-data">> }, + {ok, ID} = hb_cache:write(Msg, Opts), + Node = hb_http_server:start_node(Opts), + Wallet = hb:wallet(), + Req = hb_message:commit(#{ + <<"path">> => <<"/~lookup@1.0/read?target=", ID/binary>>, + <<"device">> => <<"lookup@1.0">>, + <<"accept">> => <<"application/aos-2">> + }, Wallet), + {ok, Res} = hb_http:post(Node, Req, Opts), + {ok, Decoded} = dev_json_iface:json_to_message(hb_ao:get(<<"body">>, Res, Opts), Opts), +``` + +--- + +*Generated from [dev_lookup.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lookup.erl)* diff --git a/docs/book/src/dev_lua.erl.md b/docs/book/src/dev_lua.erl.md new file mode 100644 index 000000000..507879f99 --- /dev/null +++ b/docs/book/src/dev_lua.erl.md @@ -0,0 +1,977 @@ +# dev_lua + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua.erl) + +A device that calls a Lua module upon a request and returns the result. + +--- + +## Exported Functions + +- `decode/2` +- `encode/2` +- `functions/3` +- `info/1` +- `init/3` +- `normalize/3` +- `pure_lua_process_benchmark/1` +- `snapshot/3` + +--- + +### info + +A device that calls a Lua module upon a request and returns the result. +All keys that are not directly available in the base message are + +```erlang +info(Base) -> + #{ + default => fun compute/4, + excludes => + [<<"keys">>, <<"set">>, <<"encode">>, <<"decode">>] + ++ maps:keys(Base) + }. +``` + +### init + +Initialize the device state, loading the script into memory if it is + +```erlang +init(Base, Req, Opts) -> + ensure_initialized(Base, Req, Opts). +``` + +### ensure_initialized + +Initialize the Lua VM if it is not already initialized. Optionally takes + +```erlang +ensure_initialized(Base, _Req, Opts) -> + case hb_private:from_message(Base) of + #{<<"state">> := _} -> + ?event(debug_lua, lua_state_already_initialized), + {ok, Base}; + _ -> + ?event(debug_lua, initializing_lua_state), + case find_modules(Base, Opts) of + {ok, Modules} -> + initialize(Base, Modules, Opts); + Error -> + Error + end + end. +``` + +### find_modules + +Find the script in the base message, either by ID or by string. + +```erlang +find_modules(Base, Opts) -> + case hb_ao:get(<<"module">>, {as, <<"message@1.0">>, Base}, Opts) of + not_found -> + {error, <<"no-modules-found">>}; + Module when is_binary(Module) -> + find_modules(Base#{ <<"module">> => [Module] }, Opts); + Module when is_map(Module) -> + % If the module is a map, check its content type to see if it is + % a literal Lua module, or a map of modules with content types. +``` + +### load_modules + +Load a list of modules for installation into the Lua VM. + +```erlang +load_modules(Modules, Opts) -> load_modules(Modules, Opts, []). +``` + +### load_modules + +Load a list of modules for installation into the Lua VM. + +```erlang +load_modules([], _Opts, Acc) -> + {ok, lists:reverse(Acc)}; +``` + +### load_modules + +Load a list of modules for installation into the Lua VM. + +```erlang +load_modules([ModuleID | Rest], Opts, Acc) when ?IS_ID(ModuleID) -> + case hb_cache:read(ModuleID, Opts) of + {ok, Module} when is_binary(Module) -> + % The ID referred to a binary module item, so we add it to the list + % as-is. +``` + +### load_modules + +```erlang +load_modules([Module | Rest], Opts, Acc) when is_map(Module) -> + % We have found a message with a Lua module inside. Search for the binary + % of the program in the body and the data. +``` + +### initialize + +Initialize a new Lua state with a given base message and module. + +```erlang +initialize(Base, Modules, Opts) -> + State0 = luerl:init(), + % Load each script into the Lua state. +``` + +### functions + +Return a list of all functions in the Lua environment. + +```erlang +functions(Base, _Req, Opts) -> + case hb_private:get(<<"state">>, Base, Opts) of + not_found -> + {error, not_found}; + State -> + {ok, [Res], _S2} = + luerl:do_dec( + << + """ + local __tests = {} + for k, v in pairs(_G) do + if type(v) == "function" then + table.insert(__tests, k) + end + end + return __tests + """ + >>, + State + ), + {ok, hb_util:message_to_ordered_list(decode(Res, Opts))} + end. +``` + +### sandbox + +Sandbox (render inoperable) a set of Lua functions. Each function is + +```erlang +sandbox(State, Map, Opts) when is_map(Map) -> + sandbox(State, maps:to_list(Map), Opts); +``` + +### sandbox + +Sandbox (render inoperable) a set of Lua functions. Each function is + +```erlang +sandbox(State, [], _Opts) -> + State; +``` + +### sandbox + +Sandbox (render inoperable) a set of Lua functions. Each function is + +```erlang +sandbox(State, [{Path, Value} | Rest], Opts) -> + {ok, NextState} = luerl:set_table_keys_dec(Path, Value, State), + sandbox(NextState, Rest, Opts); +``` + +### sandbox + +Sandbox (render inoperable) a set of Lua functions. Each function is + +```erlang +sandbox(State, [Path | Rest], Opts) -> + {ok, NextState} = luerl:set_table_keys_dec(Path, <<"sandboxed">>, State), + sandbox(NextState, Rest, Opts). +``` + +### compute + +Call the Lua script with the given arguments. + +```erlang +compute(Key, RawBase, Req, Opts) -> + ?event(debug_lua, compute_called), + {ok, Base} = ensure_initialized(RawBase, Req, Opts), + ?event(debug_lua, ensure_initialized_done), + % Get the state from the base message's private element. +``` + +### process_response + +Process a response to a Luerl invocation. Returns the typical AO-Core + +```erlang +process_response({ok, [Result], NewState}, Priv, Opts) -> + process_response({ok, [<<"ok">>, Result], NewState}, Priv, Opts); +``` + +### process_response + +Process a response to a Luerl invocation. Returns the typical AO-Core + +```erlang +process_response({ok, [Status, MsgResult], NewState}, Priv, Opts) -> + % If the result is a HyperBEAM device return (`{Status, Msg}'), decode it + % and add the previous `priv' element back into the resulting message. +``` + +### process_response + +```erlang +process_response({lua_error, RawError, State}, _Priv, Opts) -> + % An error occurred while calling the Lua function. Parse the stack trace + % and return it. +``` + +### process_response + +```erlang +process_response({error, Reason, Trace}, _Priv, _Opts) -> + % An Erlang error occurred while calling the Lua function. Return it. +``` + +### snapshot + +Snapshot the Lua state from a live computation. Normalizes its `priv` + +```erlang +snapshot(Base, _Req, Opts) -> + case hb_private:get(<<"state">>, Base, Opts) of + not_found -> + {error, <<"Cannot snapshot Lua state: state not initialized.">>}; + State -> + {ok, #{ <<"body">> => term_to_binary(luerl:externalize(State)) }} + end. +``` + +### normalize + +Restore the Lua state from a snapshot, if it exists. + +```erlang +normalize(Base, _Req, RawOpts) -> + Opts = RawOpts#{ hashpath => ignore }, + case hb_private:get(<<"state">>, Base, Opts) of + not_found -> + DeviceKey = + case hb_ao:get(<<"device-key">>, {as, <<"message@1.0">>, Base}, Opts) of + not_found -> []; + Key -> [Key] + end, + ?event(snapshot, + {attempting_to_restore_lua_state, + {msg1, Base}, {device_key, DeviceKey} + } + ), + SerializedState = + hb_ao:get( + [<<"snapshot">>] ++ DeviceKey ++ [<<"body">>], + {as, dev_message, Base}, + Opts + ), + case SerializedState of + not_found -> throw({error, no_lua_state_snapshot_found}); + State -> + ExternalizedState = binary_to_term(State), + InternalizedState = luerl:internalize(ExternalizedState), + ?event(snapshot, loaded_state_from_snapshot), + {ok, hb_private:set(Base, <<"state">>, InternalizedState, Opts)} + end; + _ -> + ?event(snapshot, state_already_initialized), + {ok, Base} + end. +``` + +### decode + +Decode a Lua result into a HyperBEAM `structured@1.0` message. + +```erlang +decode(EncMsg, _Opts) when is_list(EncMsg) andalso length(EncMsg) == 0 -> + % The value is an empty table, so we assume it is a message rather than + % a list. +``` + +### decode + +```erlang +decode(EncMsg = [{_K, _V} | _], Opts) when is_list(EncMsg) -> + decode( + maps:map( + fun(_, V) -> decode(V, Opts) end, + maps:from_list(EncMsg) + ), + Opts + ); +``` + +### decode + +```erlang +decode(Msg, Opts) when is_map(Msg) -> + % If the message is an ordered list encoded as a map, decode it to a list. +``` + +### decode + +```erlang +decode(Other, _Opts) -> + Other. +``` + +### encode + +Encode a HyperBEAM `structured@1.0` message into a Lua term. + +```erlang +encode(Map, Opts) when is_map(Map) -> + hb_cache:ensure_all_loaded( + case hb_util:is_ordered_list(Map, Opts) of + true -> encode(hb_util:message_to_ordered_list(Map), Opts); + false -> maps:to_list(maps:map(fun(_, V) -> encode(V, Opts) end, Map)) + end, + Opts + ); +``` + +### encode + +Encode a HyperBEAM `structured@1.0` message into a Lua term. + +```erlang +encode(List, Opts) when is_list(List) -> + hb_cache:ensure_all_loaded( + lists:map(fun(V) -> encode(V, Opts) end, List), + Opts + ); +``` + +### encode + +Encode a HyperBEAM `structured@1.0` message into a Lua term. + +```erlang +encode(Atom, _Opts) when is_atom(Atom) and (Atom /= false) and (Atom /= true)-> + hb_util:bin(Atom); +``` + +### encode + +Encode a HyperBEAM `structured@1.0` message into a Lua term. + +```erlang +encode(Other, _Opts) -> + Other. +``` + +### decode_stacktrace + +Parse a Lua stack trace into a list of messages. + +```erlang +decode_stacktrace(StackTrace, State0, Opts) -> + decode_stacktrace(StackTrace, State0, [], Opts). +``` + +### decode_stacktrace + +```erlang +decode_stacktrace([], _State, Acc, _Opts) -> + lists:reverse(Acc); +``` + +### decode_stacktrace + +```erlang +decode_stacktrace([{FuncBin, ParamRefs, FileInfo} | Rest], State0, Acc, Opts) -> + %% Decode all the Lua table refs into Erlang terms + DecodedParams = decode_params(ParamRefs, State0, Opts), + %% Pull out the line number + Line = proplists:get_value(line, FileInfo), + File = proplists:get_value(file, FileInfo, undefined), + ?event(debug_lua_stack, {stack_file, FileInfo}), + %% Build our message‐map + Entry = #{ + <<"function">> => FuncBin, + <<"parameters">> => hb_util:list_to_numbered_message(DecodedParams) + }, + MaybeLine = + if is_binary(File) andalso is_integer(Line) -> + #{ + <<"line">> => + iolist_to_binary( + io_lib:format("~s:~p", [File, Line]) + ) + }; + is_integer(Line) -> + #{ <<"line">> => Line }; + true -> + #{} + end, + decode_stacktrace(Rest, State0, [maps:merge(Entry, MaybeLine)|Acc], Opts). +``` + +### decode_params + +Decode a list of Lua references, as found in a stack trace, into a + +```erlang +decode_params([], _State, _Opts) -> []; +``` + +### decode_params + +Decode a list of Lua references, as found in a stack trace, into a + +```erlang +decode_params([Tref|Rest], State, Opts) -> + Decoded = decode(luerl:decode(Tref, State), Opts), + [Decoded|decode_params(Rest, State, Opts)]. +``` + +### simple_invocation_test + +```erlang +simple_invocation_test() -> + {ok, Script} = file:read_file("test/test.lua"), + Base = #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Script + }, + <<"parameters">> => [] + }, + ?assertEqual(2, hb_ao:get(<<"assoctable/b">>, Base, #{})). +``` + +### load_modules_by_id_test_ + +```erlang +load_modules_by_id_test_() -> + {timeout, 30, fun load_modules_by_id/0}. +``` + +### load_modules_by_id + +```erlang +load_modules_by_id() -> + % Start a node to ensure the HTTP services are available. +``` + +### multiple_modules_test + +```erlang +multiple_modules_test() -> + {ok, Module} = file:read_file("test/test.lua"), + Module2 = + << + """ + function test_second_script() + return 4 + end + """ + >>, + Base = #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => [ + #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + }, + #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module2 + } + ], + <<"parameters">> => [] + }, + ?assertEqual(2, hb_ao:get(<<"assoctable/b">>, Base, #{})), + ?assertEqual(4, hb_ao:get(<<"test_second_script">>, Base, #{})). +``` + +### error_response_test + +```erlang +error_response_test() -> + {ok, Module} = file:read_file("test/test.lua"), + Base = #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + }, + <<"parameters">> => [] + }, + ?assertEqual( + {error, <<"Very bad, but Lua caught it.">>}, + hb_ao:resolve(Base, <<"error_response">>, #{}) + ). +``` + +### sandboxed_failure_test + +Run an AO-Core resolution from the Lua environment. + +```erlang +sandboxed_failure_test() -> + {ok, Module} = file:read_file("test/test.lua"), + Base = #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + }, + <<"parameters">> => [], + <<"sandbox">> => true + }, + ?assertMatch({error, _}, hb_ao:resolve(Base, <<"sandboxed_fail">>, #{})). +``` + +### ao_core_sandbox_test + +Run an AO-Core resolution from the Lua environment. +Run an AO-Core resolution from the Lua environment. + +```erlang +ao_core_sandbox_test() -> + {ok, Module} = file:read_file("test/test.lua"), + Base = #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + }, + <<"parameters">> => [], + <<"device-sandbox">> => [<<"message@1.0">>] + }, + ?assertMatch({error, _}, hb_ao:resolve(Base, <<"ao_relay">>, #{})), + ?assertMatch({ok, _}, hb_ao:resolve(Base, <<"ao_resolve">>, #{})). +``` + +### ao_core_resolution_from_lua_test + +Run an AO-Core resolution from the Lua environment. +Run an AO-Core resolution from the Lua environment. +Benchmark the performance of Lua executions. + +```erlang +ao_core_resolution_from_lua_test() -> + {ok, Module} = file:read_file("test/test.lua"), + Base = #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + }, + <<"parameters">> => [] + }, + {ok, Res} = hb_ao:resolve(Base, <<"ao_resolve">>, #{}), + ?assertEqual(<<"Hello, AO world!">>, Res). +``` + +### direct_benchmark_test + +Run an AO-Core resolution from the Lua environment. +Run an AO-Core resolution from the Lua environment. +Benchmark the performance of Lua executions. + +```erlang +direct_benchmark_test() -> + BenchTime = 3, + {ok, Module} = file:read_file("test/test.lua"), + Base = #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + }, + <<"parameters">> => [] + }, + Iterations = hb_test_utils:benchmark( + fun(X) -> + {ok, _} = hb_ao:resolve(Base, <<"assoctable">>, #{}), + ?event({iteration, X}) + end, + BenchTime + ), + ?event({iterations, Iterations}), + hb_test_utils:benchmark_print( + <<"Direct Lua:">>, + <<"executions">>, + Iterations, + BenchTime + ), + ?assert(Iterations > 10). +``` + +### invoke_non_compute_key_test + +Call a non-compute key on a Lua device message and ensure that the +Use a Lua module as a hook on the HTTP server via `~meta@1.0`. + +```erlang +invoke_non_compute_key_test() -> + {ok, Module} = file:read_file("test/test.lua"), + Base = #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + }, + <<"test-value">> => 42 + }, + {ok, Result1} = hb_ao:resolve(Base, <<"hello">>, #{}), + ?event({result1, Result1}), + ?assertEqual(42, hb_ao:get(<<"test-value">>, Result1, #{})), + ?assertEqual(<<"world">>, hb_ao:get(<<"hello">>, Result1, #{})), + {ok, Result2} = + hb_ao:resolve( + Base, + #{<<"path">> => <<"hello">>, <<"name">> => <<"Alice">>}, + #{} + ), + ?event({result2, Result2}), + ?assertEqual(<<"Alice">>, hb_ao:get(<<"hello">>, Result2, #{})). +``` + +### lua_http_hook_test + +Call a non-compute key on a Lua device message and ensure that the +Use a Lua module as a hook on the HTTP server via `~meta@1.0`. +Call a process whose `execution-device` is set to `lua@5.3a`. + +```erlang +lua_http_hook_test() -> + {ok, Module} = file:read_file("test/test.lua"), + Node = hb_http_server:start_node( + #{ + priv_wallet => ar_wallet:new(), + on => #{ + <<"request">> => + #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + } + } + } + }), + {ok, Res} = hb_http:get(Node, <<"/hello?hello=world">>, #{}), + ?assertMatch(#{ <<"body">> := <<"i like turtles">> }, Res). +``` + +### pure_lua_process_test + +Call a non-compute key on a Lua device message and ensure that the +Use a Lua module as a hook on the HTTP server via `~meta@1.0`. +Call a process whose `execution-device` is set to `lua@5.3a`. +Call a process whose `execution-device` is set to `lua@5.3a`. + +```erlang +pure_lua_process_test() -> + Process = generate_lua_process("test/test.lua", #{}), + {ok, _} = hb_cache:write(Process, #{}), + Message = generate_test_message(Process, #{}), + {ok, _} = hb_ao:resolve(Process, Message, #{ hashpath => ignore }), + {ok, Results} = hb_ao:resolve(Process, <<"now">>, #{}), + ?assertEqual(42, hb_ao:get(<<"results/output/body">>, Results, #{})). +``` + +### pure_lua_restore_test + +Call a non-compute key on a Lua device message and ensure that the +Use a Lua module as a hook on the HTTP server via `~meta@1.0`. +Call a process whose `execution-device` is set to `lua@5.3a`. +Call a process whose `execution-device` is set to `lua@5.3a`. + +```erlang +pure_lua_restore_test() -> + Opts = #{ process_cache_frequency => 1 }, + Process = generate_lua_process("test/test.lua", Opts), + {ok, _} = hb_cache:write(Process, Opts), + Message = generate_test_message(Process, Opts, #{ <<"path">> => <<"inc">>}), + {ok, _} = hb_ao:resolve(Process, Message, Opts#{ hashpath => ignore }), + {ok, Count1} = hb_ao:resolve(Process, <<"now/count">>, Opts), + ?assertEqual(1, Count1), + hb_ao:resolve( + Process, + generate_test_message(Process, #{}, #{ <<"path">> => <<"inc">>}), + Opts + ), + {ok, Count2} = hb_ao:resolve(Process, <<"now/count">>, Opts), + ?assertEqual(2, Count2). +``` + +### pure_lua_process_benchmark_test_ + +```erlang +pure_lua_process_benchmark_test_() -> + {timeout, + 30, + fun() -> + pure_lua_process_benchmark(#{ + process_snapshot_slots => 50 + }) + end}. +``` + +### pure_lua_process_benchmark + +```erlang +pure_lua_process_benchmark(Opts) -> + BenchMsgs = 50, + hb:init(), + Process = generate_lua_process("test/test.lua", Opts), + {ok, _} = hb_cache:write(Process, Opts), + Message = generate_test_message(Process, Opts), + lists:foreach( + fun(X) -> + hb_ao:resolve(Process, Message, Opts#{ hashpath => ignore }), + ?event(debug_lua, {scheduled, X}) + end, + lists:seq(1, BenchMsgs) + ), + ?event(debug_lua, {executing, BenchMsgs}), + BeforeExec = os:system_time(millisecond), + {ok, _} = hb_ao:resolve(Process, <<"now">>, Opts), + AfterExec = os:system_time(millisecond), + hb_test_utils:benchmark_print( + <<"Pure Lua process: Computed">>, + <<"slots">>, + BenchMsgs, + (AfterExec - BeforeExec) / 1000 + ). +``` + +### invoke_aos_test + +```erlang +invoke_aos_test() -> + Opts = #{ priv_wallet => hb:wallet() }, + Process = generate_lua_process("test/hyper-aos.lua", Opts), + {ok, _Proc} = hb_cache:write(Process, Opts), + Message = generate_test_message(Process, Opts), + {ok, _Assignment} = hb_ao:resolve(Process, Message, Opts#{ hashpath => ignore }), + {ok, Results} = hb_ao:resolve(Process, <<"now/results/output">>, Opts), + ?assertEqual(<<"1">>, hb_ao:get(<<"data">>, Results, #{})), + ?assertEqual(<<"aos> ">>, hb_ao:get(<<"prompt">>, Results, #{})). +``` + +### aos_authority_not_trusted_test + +Benchmark the performance of Lua executions. + +```erlang +aos_authority_not_trusted_test() -> + Opts = #{ priv_wallet => ar_wallet:new() }, + Process = generate_lua_process("test/hyper-aos.lua", Opts), + ProcID = hb_message:id(Process, all), + {ok, _} = hb_cache:write(Process, Opts), + Message = hb_message:commit( + #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + #{ + <<"target">> => ProcID, + <<"type">> => <<"Message">>, + <<"data">> => <<"1 + 1">>, + <<"random-seed">> => rand:uniform(1337), + <<"action">> => <<"Eval">>, + <<"from-process">> => <<"1234">> + }, + Opts + ) + }, + Opts + ), + ?event({message, Message}), + {ok, _} = hb_ao:resolve(Process, Message, Opts#{ hashpath => ignore }), + {ok, Results} = hb_ao:resolve(Process, <<"now/results/output/data">>, Opts), + ?assertEqual(<<"Message is not trusted.">>, Results). +``` + +### aos_process_benchmark_test_ + +Benchmark the performance of Lua executions. + +```erlang +aos_process_benchmark_test_() -> + {timeout, 30, fun() -> + BenchMsgs = 10, + Opts = #{ + process_async_cache => true, + hashpath => ignore, + process_snapshot_slots => 50 + }, + Process = generate_lua_process("test/hyper-aos.lua", Opts), + Message = generate_test_message(Process, Opts), + lists:foreach( + fun(X) -> + hb_ao:resolve(Process, Message, Opts), + ?event(debug_lua, {scheduled, X}) + end, + lists:seq(1, BenchMsgs) + ), + ?event(debug_lua, {executing, BenchMsgs}), + BeforeExec = os:system_time(millisecond), + {ok, _} = hb_ao:resolve( + Process, + <<"now">>, + Opts + ), + AfterExec = os:system_time(millisecond), + hb_test_utils:benchmark_print( + <<"HyperAOS process: Computed">>, + <<"slots">>, + BenchMsgs, + (AfterExec - BeforeExec) / 1000 + ) + end}. +``` + +### generate_lua_process + +Generate a Lua process message. + +```erlang +generate_lua_process(File, Opts) -> + NormOpts = Opts#{ priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), Opts) }, + Wallet = hb_opts:get(priv_wallet, hb:wallet(), NormOpts), + Address = hb_util:human_id(ar_wallet:to_address(Wallet)), + {ok, Module} = file:read_file(File), + hb_message:commit( + #{ + <<"device">> => <<"process@1.0">>, + <<"type">> => <<"Process">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"execution-device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + }, + <<"authority">> => [ + Address, + <<"E3FJ53E6xtAzcftBpaw2E1H4ZM9h6qy6xz9NXh5lhEQ">> + ], + <<"scheduler-location">> => + hb_util:human_id(ar_wallet:to_address(Wallet)), + <<"test-random-seed">> => rand:uniform(1337) + }, + NormOpts + ). +``` + +### generate_test_message + +Generate a test message for a Lua process. + +```erlang +generate_test_message(Process, Opts) -> + generate_test_message( + Process, + Opts, + <<""" + Count = 0 + function add() + Send({Target = 'Foo', Data = 'Bar' }); + Count = Count + 1 + end + add() + return Count + """>> + ). +``` + +### generate_test_message + +```erlang +generate_test_message(Process, Opts, ToEval) when is_binary(ToEval) -> + generate_test_message( + Process, + Opts, + #{ + <<"action">> => <<"Eval">>, + <<"body">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => hb_util:bin(ToEval) + } + } + ); +``` + +### generate_test_message + +```erlang +generate_test_message(Process, Opts, MsgBase) -> + ProcID = hb_message:id(Process, all), + NormOpts = Opts#{ priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), Opts) }, + hb_message:commit(#{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + MsgBase#{ + <<"target">> => ProcID, + <<"type">> => <<"Message">>, + <<"random-seed">> => rand:uniform(1337) + }, + NormOpts + ) + }, + NormOpts + ). +``` + +### generate_stack + +Generate a stack message for the Lua process. + +```erlang +generate_stack(File) -> + Wallet = hb:wallet(), + {ok, Module} = file:read_file(File), + Msg1 = #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + [ + <<"json-iface@1.0">>, + <<"lua@5.3a">>, + <<"multipass@1.0">> + ], + <<"function">> => <<"json_result">>, + <<"passes">> => 2, + <<"stack-keys">> => [<<"init">>, <<"compute">>], + <<"module">> => Module, + <<"process">> => + hb_message:commit(#{ + <<"type">> => <<"Process">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Module + }, + <<"scheduler">> => hb:address(), + <<"authority">> => hb:address() + }, Wallet) + }, + {ok, Msg2} = hb_ao:resolve(Msg1, <<"init">>, #{}), + Msg2. +``` + +--- + +*Generated from [dev_lua.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua.erl)* diff --git a/docs/book/src/dev_lua_lib.erl.md b/docs/book/src/dev_lua_lib.erl.md new file mode 100644 index 000000000..7ebfcdb96 --- /dev/null +++ b/docs/book/src/dev_lua_lib.erl.md @@ -0,0 +1,199 @@ +# dev_lua_lib + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_lib.erl) + +A module for providing AO library functions to the Lua environment. +This module contains the implementation of the functions, each by the name +that should be used in the `ao` table in the Lua environment. Every export +is imported into the Lua environment. +Each function adheres closely to the Luerl calling convention, adding the +appropriate node message as a third argument: + fun(Args, State, NodeMsg) -> {ResultTerms, NewState} +As Lua allows for multiple return values, each function returns a list of +terms to grant to the caller. Matching the tuple convention used by AO-Core, +the first term is typically the status, and the second term is the result. + +--- + +## Exported Functions + +- `event/3` +- `get/3` +- `install/3` +- `resolve/3` +- `set/3` + +--- + +### install + +A module for providing AO library functions to the Lua environment. +Install the library into the given Lua environment. + +```erlang +install(Base, State, Opts) -> + % Calculate and set the new `preloaded_devices' option. +``` + +### return + +Helper function for returning a result from a Lua function. + +```erlang +return(Result, ExecState, Opts) -> + ?event(lua_import, {import_returning, {result, Result}}), + TableEncoded = dev_lua:encode(hb_cache:ensure_all_loaded(Result, Opts), Opts), + {ReturnParams, ResultingState} = + lists:foldr( + fun(LuaEncoded, {Params, StateIn}) -> + {NewParam, NewState} = luerl:encode(LuaEncoded, StateIn), + {[NewParam | Params], NewState} + end, + {[], ExecState}, + TableEncoded + ), + ?event({lua_encoded, ReturnParams}), + {ReturnParams, ResultingState}. +``` + +### resolve + +A wrapper function for performing AO-Core resolutions. Offers both the + +```erlang +resolve([SingletonMsg], ExecState, ExecOpts) -> + ?event({ao_core_resolver, {msg, SingletonMsg}}), + ParsedMsgs = hb_singleton:from(SingletonMsg, ExecOpts), + ?event({parsed_msgs_to_resolve, ParsedMsgs}), + resolve({many, ParsedMsgs}, ExecState, ExecOpts); +``` + +### resolve + +A wrapper function for performing AO-Core resolutions. Offers both the + +```erlang +resolve([Base, Path], ExecState, ExecOpts) when is_binary(Path) -> + PathParts = hb_path:term_to_path_parts(Path, ExecOpts), + resolve({many, [Base] ++ PathParts}, ExecState, ExecOpts); +``` + +### resolve + +A wrapper function for performing AO-Core resolutions. Offers both the + +```erlang +resolve(Msgs, ExecState, ExecOpts) when is_list(Msgs) -> + resolve({many, Msgs}, ExecState, ExecOpts); +``` + +### resolve + +A wrapper function for performing AO-Core resolutions. Offers both the + +```erlang +resolve({many, Msgs}, ExecState, ExecOpts) -> + MaybeAsMsgs = lists:map(fun convert_as/1, Msgs), + try hb_ao:resolve_many(MaybeAsMsgs, ExecOpts) of + {Status, Res} -> + ?event({resolved_msgs, {status, Status}, {res, Res}, {exec_opts, ExecOpts}}), + {[Status, Res], ExecState} + catch + Error -> + ?event(lua_error, {ao_core_resolver_error, Error}), + {[<<"error">>, Error], ExecState} + end. +``` + +### get + +A wrapper for `hb_ao`'s `get` functionality. + +```erlang +get([Key, Base], ExecState, ExecOpts) -> + ?event({ao_core_get, {base, Base}, {key, Key}}), + NewRes = hb_ao:get(convert_as(Key), convert_as(Base), ExecOpts), + ?event({ao_core_get_result, {result, NewRes}}), + {[NewRes], ExecState}. +``` + +### convert_as + +Converts any `as` terms from Lua to their HyperBEAM equivalents. + +```erlang +convert_as([<<"as">>, Device, RawMsg]) -> + {as, Device, RawMsg}; +``` + +### convert_as + +Converts any `as` terms from Lua to their HyperBEAM equivalents. + +```erlang +convert_as(Other) -> + Other. +``` + +### set + +Wrapper for `hb_ao`'s `set` functionality. + +```erlang +set([Base, Key, Value], ExecState, ExecOpts) -> + ?event({ao_core_set, {base, Base}, {key, Key}, {value, Value}}), + NewRes = hb_ao:set(Base, Key, Value, ExecOpts), + ?event({ao_core_set_result, {result, NewRes}}), + {[NewRes], ExecState}; +``` + +### set + +Wrapper for `hb_ao`'s `set` functionality. + +```erlang +set([Base, NewValues], ExecState, ExecOpts) -> + ?event({ao_core_set, {base, Base}, {new_values, NewValues}}), + NewRes = hb_ao:set(Base, NewValues, ExecOpts), + ?event({ao_core_set_result, {result, NewRes}}), + {[NewRes], ExecState}. +``` + +### event + +Allows Lua scripts to signal events using the HyperBEAM hosts internal + +```erlang +event([Event], ExecState, Opts) -> + ?event({recalling_event, Event}), + event([global, Event], ExecState, Opts); +``` + +### event + +Allows Lua scripts to signal events using the HyperBEAM hosts internal + +```erlang +event([Group, Event], State, Opts) when is_list(Event) -> + event([Group, list_to_tuple(Event)], State, Opts); +``` + +### event + +Allows Lua scripts to signal events using the HyperBEAM hosts internal + +```erlang +event([Group, Event], ExecState, Opts) -> + ?event( + lua_event, + {event, + {group, Group}, + {event, Event} + } + ), + ?event(Group, Event), +``` + +--- + +*Generated from [dev_lua_lib.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_lib.erl)* diff --git a/docs/book/src/dev_lua_test.erl.md b/docs/book/src/dev_lua_test.erl.md new file mode 100644 index 000000000..acc6f7145 --- /dev/null +++ b/docs/book/src/dev_lua_test.erl.md @@ -0,0 +1,166 @@ +# dev_lua_test + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_test.erl) + +A wrapper module for generating and executing EUnit tests for all Lua modules. +When executed with `rebar3 lua-test`, this module will be invoked and scan the +`scripts` directory for all Lua files, and generate an EUnit test suite for +each one. By default, an individual test is generated for each function in +the global `_G` table that ends in `_test`. +In order to specify other tests to run instead, the user may employ the +`LUA_TESTS` and `LUA_SCRIPTS` environment variables. The syntax for these +variables is described in the function documentation for `parse_spec`. + +--- + +## Exported Functions + +- `parse_spec/1` + +--- + +### parse_spec + +Parse a string representation of test descriptions received from the + +```erlang +parse_spec(Str) when is_list(Str) -> + parse_spec(hb_util:bin(Str)); +``` + +### parse_spec + +Parse a string representation of test descriptions received from the + +```erlang +parse_spec(tests) -> + % The user has not given a test spec, so we default to running all tests in + % the `LUA_SCRIPTS' directory (defaulting to `scripts/'). +``` + +### parse_spec + +```erlang +parse_spec(Str) -> + lists:map( + fun(ModDef) -> + [ModName|TestDefs] = binary:split(ModDef, <<":">>, [global, trim_all]), + ScriptDir = hb_util:bin(hb_opts:get(lua_scripts)), + File = + case terminates_with(ModName, <<".lua">>) of + true -> ModName; + false -> << ScriptDir/binary, "/", ModName/binary, ".lua" >> + end, + Tests = + case TestDefs of + [] -> tests; + TestDefs -> TestDefs + end, + {File, Tests} + end, + binary:split(Str, <<",">>, [global, trim_all]) + ). +``` + +### exec_test_ + +Main entrypoint for Lua tests. + +```erlang +exec_test_() -> + ScriptDefs = hb_opts:get(lua_tests), + lists:map( + fun({File, Funcs}) -> suite(File, Funcs) end, + ScriptDefs + ). +``` + +### suite + +Generate an EUnit test suite for a given Lua script. If the `Funcs` is + +```erlang +suite(File, Funcs) -> + {ok, State} = new_state(File), + {foreach, + fun() -> ok end, + fun(_) -> ok end, + lists:map( + fun(FuncName) -> + { + hb_util:list(File) ++ ":" ++ hb_util:list(FuncName), + fun() -> exec_test(State, FuncName) end + } + end, + case Funcs of + tests -> + lists:filter( + fun(FuncName) -> + terminates_with(FuncName, <<"_test">>) + end, + hb_ao:get(<<"functions">>, State, #{}) + ); + FuncNames -> FuncNames + end + ) + }. +``` + +### new_state + +Create a new Lua environment for a given script. + +```erlang +new_state(File) -> + ?event(debug_lua_test, {generating_state_for, File}), + {ok, Module} = file:read_file(hb_util:list(File)), + {ok, _} = + hb_ao:resolve( + #{ + <<"device">> => <<"lua@5.3a">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"name">> => File, + <<"body">> => Module + } + }, + <<"init">>, + #{} + ). +``` + +### exec_test + +Generate an EUnit test for a given function. + +```erlang +exec_test(State, Function) -> + {Status, Result} = + hb_ao:resolve( + State, + #{ <<"path">> => Function, <<"parameters">> => [] }, + #{} + ), + case Status of + ok -> ok; + error -> + hb_format:print(Result, <<"Lua">>, Function, 1), + ?assertEqual( + ok, + Status + ) + end. +``` + +### terminates_with + +Check if a string terminates with a given suffix. + +```erlang +terminates_with(String, Suffix) -> + binary:longest_common_suffix(lists:map(fun hb_util:bin/1, [String, Suffix])) +``` + +--- + +*Generated from [dev_lua_test.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_test.erl)* diff --git a/docs/book/src/dev_lua_test_ledgers.erl.md b/docs/book/src/dev_lua_test_ledgers.erl.md new file mode 100644 index 000000000..795f864d8 --- /dev/null +++ b/docs/book/src/dev_lua_test_ledgers.erl.md @@ -0,0 +1,887 @@ +# dev_lua_test_ledgers + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_test_ledgers.erl) + +A collection of Eunit tests for the `lua@5.3a` device, and the +`hyper-token.lua` script. These tests are designed to validate the +functionality of both of these components, and to provide examples +of how to use the `lua@5.3a` device. +The module is split into four components: +1. A simple ledger client library. +2. Assertion functions that verify specific invariants about the state + of ledgers in a test environment. +3. Utility functions for normalizing the state of a test environment. +4. Test cases that generate and manipulate ledger networks in test + environments. +Many client and utility functions in this module handle the conversion of +wallet IDs to human-readable addresses when found in transfers, balances, +and other fields. This is done to make the test cases more readable and +easier to understand -- be careful if following their patterns in other +contexts to either mimic a similar pattern, or to ensure you pass addresses +in these contexts rather that full wallet objects. + +--- + +### ledger + +Generate a Lua process definition message. + +```erlang +ledger(Script, Opts) -> + ledger(Script, #{}, Opts). +``` + +### ledger + +```erlang +ledger(Script, Extra, Opts) -> + % If the `balance' key is set in the `Extra' map, ensure that any wallets + % given as keys in the message are converted to human-readable addresses. +``` + +### lua_script + +Generate a Lua `script` key from a file or list of files. + +```erlang +lua_script(Files) when is_list(Files) -> + [ + #{ + <<"content-type">> => <<"application/lua">>, + <<"module">> => File, + <<"body">> => + hb_util:ok( + file:read_file( + if is_binary(File) -> binary_to_list(File); + true -> File + end + ) + ) + } + || + File <- Files + ]; +``` + +### lua_script + +Generate a Lua `script` key from a file or list of files. + +```erlang +lua_script(File) when is_binary(File) -> + hd(lua_script([File])). +``` + +### subledger + +Generate a test sub-ledger process definition message. + +```erlang +subledger(Root, Opts) -> + subledger(Root, #{}, Opts). +``` + +### subledger + +```erlang +subledger(Root, Extra, Opts) -> + BareRoot = + maps:without( + [<<"token">>, <<"balance">>], + hb_message:uncommitted(Root) + ), + Proc = + hb_message:commit( + maps:merge( + BareRoot#{ + <<"token">> => hb_message:id(Root, all) + }, + Extra + ), + hb_opts:get(priv_wallet, hb:wallet(), Opts) + ), + hb_cache:write(Proc, Opts), + Proc. +``` + +### transfer + +Generate a test transfer message. + +```erlang +transfer(ProcMsg, Sender, Recipient, Quantity, Opts) -> + transfer(ProcMsg, Sender, Recipient, Quantity, undefined, Opts). +``` + +### transfer + +```erlang +transfer(ProcMsg, Sender, Recipient, Quantity, Route, Opts) -> + MaybeRoute = + if Route == undefined -> #{}; + true -> + #{ + <<"route">> => + if is_map(Route) -> hb_message:id(Route, all); + true -> Route + end + } + end, + Xfer = + hb_message:commit(#{ + <<"path">> => <<"push">>, + <<"body">> => + hb_message:commit(MaybeRoute#{ + <<"action">> => <<"Transfer">>, + <<"target">> => hb_message:id(ProcMsg, all), + <<"recipient">> => hb_util:human_id(Recipient), + <<"quantity">> => Quantity + }, + Sender + ) + }, + Sender + ), + hb_ao:resolve( + ProcMsg, + Xfer, + Opts#{ priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), Opts) } + ). +``` + +### register + +Request that a peer register with a without sub-ledger. + +```erlang +register(ProcMsg, Peer, Opts) when is_map(Peer) -> + register(ProcMsg, hb_message:id(Peer, all), Opts); +``` + +### register + +Request that a peer register with a without sub-ledger. + +```erlang +register(ProcMsg, PeerID, RawOpts) -> + Opts = + RawOpts#{ + priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), RawOpts) + }, + Reg = + hb_message:commit( + #{ + <<"path">> => <<"push">>, + <<"body">> => + hb_message:commit( + #{ + <<"action">> => <<"register-remote">>, + <<"target">> => hb_message:id(ProcMsg, all), + <<"peer">> => PeerID + }, + Opts + ) + }, + Opts + ), + hb_ao:resolve( + ProcMsg, + Reg, + Opts + ). +``` + +### balance + +Retreive a single balance from the ledger. + +```erlang +balance(ProcMsg, User, Opts) when not ?IS_ID(User) -> + balance(ProcMsg, hb_util:human_id(ar_wallet:to_address(User)), Opts); +``` + +### balance + +Retreive a single balance from the ledger. +Get the total balance for an ID across all ledgers in a set. + +```erlang +balance(ProcMsg, ID, Opts) -> + hb_ao:get(<<"now/balance/", ID/binary>>, ProcMsg, 0, Opts). +``` + +### balance_total + +Retreive a single balance from the ledger. +Get the total balance for an ID across all ledgers in a set. + +```erlang +balance_total(Procs, ID, Opts) -> + lists:sum( + lists:map( + fun(Proc) -> balance(Proc, ID, Opts) end, + maps:values(normalize_env(Procs)) + ) + ). +``` + +### balances + +Get the balances of a ledger. + +```erlang +balances(ProcMsg, Opts) -> + balances(now, ProcMsg, Opts). +``` + +### balances + +```erlang +balances(initial, ProcMsg, Opts) -> + balances(<<"">>, ProcMsg, Opts); +``` + +### balances + +```erlang +balances(Mode, ProcMsg, Opts) when is_atom(Mode) -> + balances(hb_util:bin(Mode), ProcMsg, Opts); +``` + +### balances + +```erlang +balances(Prefix, ProcMsg, Opts) -> + Balances = hb_ao:get(<>, ProcMsg, #{}, Opts), + hb_private:reset(hb_cache:ensure_all_loaded(Balances, Opts)). +``` + +### supply + +Get the supply of a ledger, either `now` or `initial`. + +```erlang +supply(ProcMsg, Opts) -> + supply(now, ProcMsg, Opts). +``` + +### supply + +```erlang +supply(Mode, ProcMsg, Opts) -> + lists:sum(maps:values(balances(Mode, ProcMsg, Opts))). +``` + +### subledger_supply + +Calculate the supply of tokens in all sub-ledgers, from the balances of + +```erlang +subledger_supply(RootProc, AllProcs, Opts) -> + supply(now, RootProc, Opts) - user_supply(RootProc, AllProcs, Opts). +``` + +### user_supply + +Calculate the supply of tokens held by users on a ledger, excluding + +```erlang +user_supply(Proc, AllProcs, Opts) -> + NormProcs = normalize_without_root(Proc, AllProcs), + SubledgerIDs = maps:keys(NormProcs), + lists:sum( + maps:values( + maps:without( + SubledgerIDs, + balances(now, Proc, Opts) + ) + ) + ). +``` + +### ledgers + +Get the local expectation of a ledger's balances with peer ledgers. + +```erlang +ledgers(ProcMsg, Opts) -> + case hb_cache:ensure_all_loaded( + hb_ao:get(<<"now/ledgers">>, ProcMsg, #{}, Opts), + Opts + ) of + Msg when is_map(Msg) -> hb_private:reset(Msg); + [] -> #{} + end. +``` + +### map + +Generate a complete overview of the test environment's balances and + +```erlang +map(Procs, Opts) -> + NormProcs = normalize_env(Procs), + maps:merge_with( + fun(Key, Balances, Ledgers) -> + MaybeRoot = + case maps:get(Key, NormProcs, #{}) of + #{ <<"token">> := _ } -> #{}; + _ -> #{ root => true } + end, + MaybeRoot#{ + balances => Balances, + ledgers => Ledgers + } + end, + maps:map(fun(_, Proc) -> balances(Proc, Opts) end, NormProcs), + maps:map(fun(_, Proc) -> ledgers(Proc, Opts) end, NormProcs) + ). +``` + +### map + +```erlang +map(Procs, EnvNames, Opts) -> + apply_names(map(Procs, Opts), EnvNames, Opts). +``` + +### apply_names + +Apply a map of environment names to elements in either a map or list. + +```erlang +apply_names(Map, EnvNames, Opts) -> + IDs = + maps:from_list( + lists:filtermap( + fun({Key, V}) -> + try {true, {hb_util:human_id(Key), V}} + catch _:_ -> + try {true, {hb_message:id(Key, all), V}} + catch _:_ -> false + end + end + end, + maps:to_list(EnvNames) + ) + ), + do_apply_names(Map, maps:merge(IDs, EnvNames), Opts). +``` + +### do_apply_names + +```erlang +do_apply_names(Map, EnvNames, Opts) when is_map(Map) -> + maps:from_list( + lists:map( + fun({Key, Proc}) -> + { + apply_names(Key, EnvNames, Opts), + apply_names(Proc, EnvNames, Opts) + } + end, + maps:to_list(Map) + ) + ); +``` + +### do_apply_names + +```erlang +do_apply_names(List, EnvNames, Opts) when is_list(List) -> + lists:map( + fun(Proc) -> + apply_names(Proc, EnvNames, Opts) + end, + List + ); +``` + +### do_apply_names + +```erlang +do_apply_names(Item, Names, _Opts) when is_map_key(Item, Names) -> + maps:get(Item, Names); +``` + +### do_apply_names + +```erlang +do_apply_names(Item, Names, _Opts) -> + try maps:get(hb_util:human_id(Item), Names, Item) + catch _:_ -> Item + end. +``` + +### verify_net + +Execute all invariant checks for a pair of root ledger and sub-ledgers. + +```erlang +verify_net(RootProc, AllProcs, Opts) -> + verify_net_supply(RootProc, AllProcs, Opts), + verify_net_peer_balances(AllProcs, Opts). +``` + +### verify_root_supply + +Verify that the initial supply of tokens on the root ledger is the same + +```erlang +verify_root_supply(RootProc, Opts) -> + ?assert( + supply(initial, RootProc, Opts) == + supply(now, RootProc, Opts) + + lists:sum(maps:values(ledgers(RootProc, Opts))) + ). +``` + +### verify_net_supply + +Verify that the sum of all spendable balances held by ledgers in a + +```erlang +verify_net_supply(RootProc, AllProcs, Opts) -> + verify_root_supply(RootProc, Opts), + StartingRootSupply = supply(initial, RootProc, Opts), + NormProcsWithoutRoot = normalize_without_root(RootProc, AllProcs), + SubledgerIDs = maps:keys(NormProcsWithoutRoot), + RootUserSupply = user_supply(RootProc, NormProcsWithoutRoot, Opts), + SubledgerSupply = subledger_supply(RootProc, AllProcs, Opts), + ?event({verify_net_supply, {root, RootUserSupply}, {subledger, SubledgerSupply}}), + ?assert( + StartingRootSupply == + RootUserSupply + SubledgerSupply + ). +``` + +### verify_net_peer_balances + +Verify the consistency of all expected ledger balances with their peer + +```erlang +verify_net_peer_balances(AllProcs, Opts) -> + NormProcs = normalize_env(AllProcs), + maps:map( + fun(ValidateProc, _) -> + verify_peer_balances(ValidateProc, NormProcs, Opts) + end, + NormProcs + ). +``` + +### verify_peer_balances + +Verify that a ledger's expectation of its balances with peer ledgers + +```erlang +verify_peer_balances(ValidateProc, AllProcs, Opts) -> + Ledgers = ledgers(ValidateProc, Opts), + NormProcs = normalize_env(AllProcs), + maps:map( + fun(PeerID, ExpectedBalance) -> + ?assertEqual( + ExpectedBalance, + balance(ValidateProc, + maps:get(PeerID, NormProcs), + Opts + ) + ) + end, + Ledgers + ). +``` + +### normalize_env + +Normalize a set of processes, representing ledgers in a test environment, + +```erlang +normalize_env(Procs) when is_map(Procs) -> + normalize_env(maps:values(Procs)); +``` + +### normalize_env + +Normalize a set of processes, representing ledgers in a test environment, + +```erlang +normalize_env(Procs) when is_list(Procs) -> + maps:from_list( + lists:map( + fun(Proc) -> + {hb_message:id(Proc, all), Proc} + end, + Procs + ) + ). +``` + +### normalize_without_root + +Return the normalized environment without the root ledger. + +```erlang +normalize_without_root(RootProc, Procs) -> + maps:without([hb_message:id(RootProc, all)], normalize_env(Procs)). +``` + +### test_opts + +Create a node message for the test that avoids looking up unknown + +```erlang +test_opts() -> + hb:init(), + #{}. +``` + +### transfer_test_ + +Test the `transfer` function. + +```erlang +transfer_test_() -> {timeout, 30, fun transfer/0}. +``` + +### transfer + +Test the `transfer` function. + +```erlang +transfer() -> + Opts = test_opts(), + Alice = ar_wallet:new(), + Bob = ar_wallet:new(), + Proc = + ledger( + <<"scripts/hyper-token.lua">>, + #{ <<"balance">> => #{ Alice => 100 } }, + Opts + ), + ?assertEqual(100, supply(Proc, Opts)), + transfer(Proc, Alice, Bob, 1, Opts), + ?assertEqual(99, balance(Proc, Alice, Opts)), + ?assertEqual(1, balance(Proc, Bob, Opts)), + ?assertEqual(100, supply(Proc, Opts)). +``` + +### transfer_unauthorized_test_ + +User's must not be able to send tokens they do not own. We test three + +```erlang +transfer_unauthorized_test_() -> {timeout, 30, fun transfer_unauthorized/0}. +``` + +### transfer_unauthorized + +User's must not be able to send tokens they do not own. We test three + +```erlang +transfer_unauthorized() -> + Opts = test_opts(), + Alice = ar_wallet:new(), + Bob = ar_wallet:new(), + Proc = + ledger( + <<"scripts/hyper-token.lua">>, + #{ <<"balance">> => #{ Alice => 100 } }, + Opts + ), + % 1. Transferring a token when the sender has no tokens. +``` + +### subledger_deposit_test_ + +Verify that a user can deposit tokens into a sub-ledger. + +```erlang +subledger_deposit_test_() -> {timeout, 30, fun subledger_deposit/0}. +``` + +### subledger_deposit + +Verify that a user can deposit tokens into a sub-ledger. + +```erlang +subledger_deposit() -> + Opts = test_opts(), + Alice = ar_wallet:new(), + Proc = + ledger( + <<"scripts/hyper-token.lua">>, + #{ <<"balance">> => #{ Alice => 100 } }, + Opts + ), + SubLedger = subledger(Proc, Opts), + % 1. Alice has tokens on the root ledger. +``` + +### subledger_transfer_test_ + +Simulate inter-ledger payments between users on a single sub-ledger: + +```erlang +subledger_transfer_test_() -> {timeout, 10, fun subledger_transfer/0}. +``` + +### subledger_transfer + +Simulate inter-ledger payments between users on a single sub-ledger: + +```erlang +subledger_transfer() -> + Opts = test_opts(), + Alice = ar_wallet:new(), + Bob = ar_wallet:new(), + RootLedger = + ledger( + <<"scripts/hyper-token.lua">>, + #{ <<"balance">> => #{ Alice => 100 } }, + Opts + ), + SubLedger = subledger(RootLedger, Opts), + EnvNames = #{ + Alice => alice, + Bob => bob, + RootLedger => root, + SubLedger => subledger + }, + % 1. Alice has tokens on the root ledger. +``` + +### subledger_registration_test_disabled + +Verify that peer ledgers on the same token are able to register mutually + +```erlang +subledger_registration_test_disabled() -> + Opts = test_opts(), + Alice = ar_wallet:new(), + RootLedger = + ledger( + <<"scripts/hyper-token.lua">>, + #{ <<"balance">> => #{ Alice => 100 } }, + Opts + ), + SubLedger1 = subledger(RootLedger, Opts), + SubLedger2 = subledger(RootLedger, Opts), + Names = #{ + SubLedger1 => subledger1, + SubLedger2 => subledger2 + }, + ?event(debug, + {subledger, + {sl1, hb_message:id(SubLedger1, none)}, + {sl2, hb_message:id(SubLedger2, none)} + } + ), + % There are no registered peers on either sub-ledger. +``` + +### single_subledger_to_subledger_test_ + +```erlang +single_subledger_to_subledger_test_() -> {timeout, 30, fun single_subledger_to_subledger/0}. +``` + +### single_subledger_to_subledger + +```erlang +single_subledger_to_subledger() -> + Opts = test_opts(), + Alice = ar_wallet:new(), + Bob = ar_wallet:new(), + RootLedger = + ledger( + <<"scripts/hyper-token.lua">>, + #{ <<"balance">> => #{ Alice => 100 } }, + Opts + ), + SubLedger1 = subledger(RootLedger, Opts), + SL1ID = hb_message:id(SubLedger1, signed, Opts), + ?event({sl1ID, SL1ID}), + SubLedger2 = subledger(RootLedger, Opts), + SL2ID = hb_message:id(SubLedger2, signed, Opts), + ?event({sl2ID, SL2ID}), + Names = #{ + Alice => alice, + Bob => bob, + RootLedger => root, + SubLedger1 => subledger1, + SubLedger2 => subledger2 + }, + ?event({root_ledger, RootLedger}), + ?event({sl1, SubLedger1}), + ?event({sl2, SubLedger2}), + ?assertEqual(100, balance(RootLedger, Alice, Opts)), + % 2. Alice sends 90 tokens to herself on SubLedger1. +``` + +### subledger_to_subledger_test_ + +Verify that registered sub-ledgers are able to send tokens to each other + +```erlang +subledger_to_subledger_test_() -> {timeout, 30, fun subledger_to_subledger/0}. +``` + +### subledger_to_subledger + +Verify that registered sub-ledgers are able to send tokens to each other + +```erlang +subledger_to_subledger() -> + Opts = test_opts(), + Alice = ar_wallet:new(), + Bob = ar_wallet:new(), + RootLedger = + ledger( + <<"scripts/hyper-token.lua">>, + #{ <<"balance">> => #{ Alice => 100 } }, + Opts + ), + SubLedger1 = subledger(RootLedger, Opts), + SubLedger2 = subledger(RootLedger, Opts), + Names = #{ + Alice => alice, + Bob => bob, + RootLedger => root, + SubLedger1 => subledger1, + SubLedger2 => subledger2 + }, + % 1. Alice has tokens on the root ledger. +``` + +### unregistered_peer_transfer_test_ + +Verify that a ledger can send tokens to a peer ledger that is not + +```erlang +unregistered_peer_transfer_test_() -> {timeout, 30, fun unregistered_peer_transfer/0}. +``` + +### unregistered_peer_transfer + +Verify that a ledger can send tokens to a peer ledger that is not + +```erlang +unregistered_peer_transfer() -> + Opts = #{}, + Alice = ar_wallet:new(), + Bob = ar_wallet:new(), + RootLedger = + ledger( + <<"scripts/hyper-token.lua">>, + #{ <<"balance">> => #{ Alice => 100 } }, + Opts + ), + SubLedgers = [ subledger(RootLedger, Opts) || _ <- lists:seq(1, 3) ], + SubLedger1 = lists:nth(1, SubLedgers), + SubLedger2 = lists:nth(2, SubLedgers), + SubLedger3 = lists:nth(3, SubLedgers), + Names = #{ + Alice => alice, + Bob => bob, + RootLedger => root, + SubLedger1 => subledger1, + SubLedger2 => subledger2, + SubLedger3 => subledger3 + }, + % 1. Alice has tokens on the root ledger. +``` + +### multischeduler_test_disabled + +Verify that sub-ledgers can request and enforce multiple scheduler + +```erlang +multischeduler_test_disabled() -> {timeout, 30, fun multischeduler/0}. +``` + +### multischeduler + +Verify that sub-ledgers can request and enforce multiple scheduler + +```erlang +multischeduler() -> + BaseOpts = test_opts(), + NodeWallet = ar_wallet:new(), + Scheduler2 = ar_wallet:new(), + Scheduler3 = ar_wallet:new(), + Opts = BaseOpts#{ + priv_wallet => NodeWallet, + identities => #{ + <<"extra-scheduler">> => #{ + priv_wallet => Scheduler2 + } + } + }, + Alice = ar_wallet:new(), + Bob = ar_wallet:new(), + RootLedger = + ledger( + <<"scripts/hyper-token.lua">>, + ProcExtra = + #{ + <<"balance">> => #{ Alice => 100 }, + <<"scheduler">> => + [ + hb_util:human_id(NodeWallet), + hb_util:human_id(Scheduler2) + ], + <<"scheduler-required">> => + [ + hb_util:human_id(NodeWallet) + ] + }, + Opts + ), + % Alice has tokens on the root ledger. She moves them to Bob. +``` + +### comma_separated_scheduler_list_test + +Ensure that the `hyper-token.lua` script can parse comma-separated + +```erlang +comma_separated_scheduler_list_test() -> + NodeWallet = hb:wallet(), + Scheduler2 = ar_wallet:new(), + Alice = ar_wallet:new(), + Bob = ar_wallet:new(), + Opts = (test_opts())#{ priv_wallet => NodeWallet, identities => #{ + <<"extra-scheduler">> => #{ + priv_wallet => Scheduler2 + } + } }, + Ledger = + ledger( + <<"scripts/hyper-token.lua">>, + ProcExtra = + #{ + <<"balance">> => #{ Alice => 100 }, + <<"scheduler">> => + iolist_to_binary( + [ + <<"\"">>, + hb_util:human_id(NodeWallet), + <<"\",\"">>, + hb_util:human_id(Scheduler2), + <<"\"">> + ] + ), + <<"scheduler-required">> => + [ + hb_util:human_id(NodeWallet) + ] + }, + Opts + ), + % Alice has tokens on the root ledger. She moves them to Bob. +``` + +--- + +*Generated from [dev_lua_test_ledgers.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_test_ledgers.erl)* diff --git a/docs/book/src/dev_manifest.erl.md b/docs/book/src/dev_manifest.erl.md new file mode 100644 index 000000000..0f89076b1 --- /dev/null +++ b/docs/book/src/dev_manifest.erl.md @@ -0,0 +1,188 @@ +# dev_manifest + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_manifest.erl) + +An Arweave path manifest resolution device. Follows the v1 schema: +https://specs.ar.io/?tx=lXLd0OPwo-dJLB_Amz5jgIeDhiOkjXuM3-r0H_aiNj0 + +--- + +## Exported Functions + +- `index/3` +- `info/0` + +--- + +### info + +An Arweave path manifest resolution device. Follows the v1 schema: +Use the `route/4` function as the handler for all requests, aside + +```erlang +info() -> + #{ + default => fun route/4, + excludes => [keys, set, committers] + }. +``` + +### index + +Return the fallback index page when the manifest itself is requested. + +```erlang +index(M1, M2, Opts) -> + ?event({manifest_index_request, M1, M2}), + case route(<<"index">>, M1, M2, Opts) of + {ok, Index} -> + ?event({manifest_index_returned, Index}), + {ok, Index}; + {error, not_found} -> + {error, not_found} + end. +``` + +### route + +Route a request to the associated data via its manifest. + +```erlang +route(<<"index">>, M1, M2, Opts) -> + ?event({manifest_index, M1, M2}), + case manifest(M1, M2, Opts) of + {ok, JSONStruct} -> + ?event({manifest_json_struct, JSONStruct}), + % Get the path to the index page from the manifest. We make + % sure to use `hb_maps:get/4' to ensure that we do not recurse + % on the `index' key with an `ao' resolve. +``` + +### route + +```erlang +route(Key, M1, M2, Opts) -> + ?event({manifest_lookup, Key}), + {ok, Manifest} = manifest(M1, M2, Opts), + {ok, + hb_ao:get( + <<"paths/", Key/binary>>, + {as, <<"message@1.0">>, Manifest}, + Opts + ) + }. +``` + +### manifest + +Find and deserialize a manifest from the given base. + +```erlang +manifest(Base, _Req, Opts) -> + JSON = + hb_ao:get_first( + [ + {{as, <<"message@1.0">>, Base}, [<<"data">>]}, + {{as, <<"message@1.0">>, Base}, [<<"body">>]} + ], + Opts + ), + ?event({manifest_json, JSON}), + Structured = + hb_cache:ensure_all_loaded( + hb_message:convert(JSON, <<"structured@1.0">>, <<"json@1.0">>, Opts), + Opts + ), + ?event({manifest_structured, {explicit, Structured}}), + Linkified = linkify(Structured, Opts), + ?event({manifest_linkified, {explicit, Linkified}}), + {ok, Linkified}. +``` + +### linkify + +Generate a nested message of links to content from a parsed (and + +```erlang +linkify(#{ <<"id">> := ID }, Opts) -> + LinkOptsBase = (maps:with([store], Opts))#{ scope => [local, remote]}, + {link, ID, LinkOptsBase#{ <<"type">> => <<"link">>, <<"lazy">> => false }}; +``` + +### linkify + +Generate a nested message of links to content from a parsed (and + +```erlang +linkify(Manifest, Opts) when is_map(Manifest) -> + hb_maps:map( + fun(_Key, Val) -> linkify(Val, Opts) end, + Manifest, + Opts + ); +``` + +### linkify + +Generate a nested message of links to content from a parsed (and + +```erlang +linkify(Manifest, Opts) when is_list(Manifest) -> + lists:map( + fun(Item) -> linkify(Item, Opts) end, + Manifest + ); +``` + +### linkify + +Generate a nested message of links to content from a parsed (and + +```erlang +linkify(Manifest, _Opts) -> + Manifest. +``` + +### resolve_test + +```erlang +resolve_test() -> + Opts = #{ store => hb_opts:get(store, no_viable_store, #{}) }, + IndexPage = #{ + <<"content-type">> => <<"text/html">>, + <<"body">> => <<"Page 1">> + }, + {ok, IndexID} = hb_cache:write(IndexPage, Opts), + Page2 = #{ + <<"content-type">> => <<"text/html">>, + <<"body">> => <<"Page 2">> + }, + {ok, Page2ID} = hb_cache:write(Page2, Opts), + Manifest = #{ + <<"paths">> => #{ + <<"nested">> => #{ <<"page2">> => #{ <<"id">> => Page2ID } }, + <<"page1">> => #{ <<"id">> => IndexID } + }, + <<"index">> => #{ <<"path">> => <<"page1">> } + }, + JSON = hb_message:convert(Manifest, <<"json@1.0">>, <<"structured@1.0">>, Opts), + ManifestMsg = + #{ + <<"device">> => <<"manifest@1.0">>, + <<"body">> => JSON + }, + {ok, ManifestID} = hb_cache:write(ManifestMsg, Opts), + ?event({manifest_id, ManifestID}), + Node = hb_http_server:start_node(Opts), + ?assertMatch( + {ok, #{ <<"body">> := <<"Page 1">> }}, + hb_http:get(Node, << ManifestID/binary, "/index" >>, Opts) + ), + {ok, Res} = hb_http:get(Node, << ManifestID/binary, "/nested/page2" >>, Opts), + ?event({manifest_resolve_test, Res}), + ?assertEqual(<<"Page 2">>, hb_maps:get(<<"body">>, Res, <<"NO BODY">>, Opts)), +``` + +--- + +*Generated from [dev_manifest.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_manifest.erl)* diff --git a/docs/book/src/dev_message.erl.md b/docs/book/src/dev_message.erl.md new file mode 100644 index 000000000..94e4ec92e --- /dev/null +++ b/docs/book/src/dev_message.erl.md @@ -0,0 +1,625 @@ +# dev_message + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_message.erl) + +The identity device: For non-reserved keys, it simply returns a key +from the message as it is found in the message's underlying Erlang map. +Private keys (`priv[.*]`) are not included. +Reserved keys are: `id`, `commitments`, `committers`, `keys`, `path`, +`set`, `remove`, `get`, and `verify`. Their function comments describe the +behaviour of the device when these keys are set. + +--- + +## Exported Functions + +- `commit/3` +- `committed/3` +- `committers/1` +- `committers/2` +- `committers/3` +- `get/3` +- `get/4` +- `id/1` +- `id/2` +- `id/3` +- `index/3` +- `info/0` +- `keys/1` +- `keys/2` +- `remove/2` +- `remove/3` +- `set_path/3` +- `set/3` +- `verify/3` + +--- + +### info + +The identity device: For non-reserved keys, it simply returns a key +Return the info for the identity device. + +```erlang +info() -> + #{ + default => fun dev_message:get/4 + }. +``` + +### index + +Generate an index page for a message, in the event that the `body` and + +```erlang +index(Msg, Req, Opts) -> + case hb_opts:get(default_index, not_found, Opts) of + not_found -> + {error, <<"No default index message set.">>}; + DefaultIndex -> + hb_ao:resolve( + case is_map(DefaultIndex) of + true -> maps:merge(Msg, DefaultIndex); + false -> {as, DefaultIndex, Msg} + end, + Req#{ + <<"path">> => + case hb_maps:find(<<"path">>, DefaultIndex, Opts) of + {ok, Path} -> Path; + _ -> + hb_opts:get(default_index_path, <<"index">>, Opts) + end + }, + Opts + ) + end. +``` + +### id + +Return the ID of a message, using the `committers` list if it exists. + +```erlang +id(Base) -> id(Base, #{}). +``` + +### id + +Return the ID of a message, using the `committers` list if it exists. + +```erlang +id(Base, Req) -> id(Base, Req, #{}). +``` + +### id + +Return the ID of a message, using the `committers` list if it exists. + +```erlang +id(Base, _, NodeOpts) when is_binary(Base) -> + % Return the hashpath of the message in native format, to match the native + % format of the message ID return. +``` + +### id + +```erlang +id(RawBase, Req, NodeOpts) -> + % Ensure that the base message is a normalized before proceeding. +``` + +### calculate_id + +```erlang +calculate_id(Base, Req, NodeOpts) -> + % Find the ID device for the message. +``` + +### id_device + +Locate the ID device of a message. The ID device is determined the + +```erlang +id_device(#{ <<"commitments">> := Commitments }, Opts) -> + % Get the device from the first commitment. +``` + +### id_device + +```erlang +id_device(_, _) -> + {ok, ?DEFAULT_ID_DEVICE}. +``` + +### committers + +Return the committers of a message that are present in the given request. + +```erlang +committers(Base) -> committers(Base, #{}). +``` + +### committers + +Return the committers of a message that are present in the given request. + +```erlang +committers(Base, Req) -> committers(Base, Req, #{}). +``` + +### committers + +Return the committers of a message that are present in the given request. + +```erlang +committers(#{ <<"commitments">> := Commitments }, _, NodeOpts) -> + ?event(debug_commitments, {calculating_committers, {commitments, Commitments}}), + {ok, + hb_maps:values( + hb_maps:filtermap( + fun(_ID, Commitment) -> + Committer = maps:get(<<"committer">>, Commitment, undefined), + ?event(debug_commitments, {committers, {committer, Committer}}), + case Committer of + undefined -> false; + Committer -> {true, Committer} + end + end, + Commitments, + NodeOpts + ), + NodeOpts + ) + }; +``` + +### committers + +Return the committers of a message that are present in the given request. + +```erlang +committers(_, _, _) -> + {ok, []}. +``` + +### commit + +Commit to a message, using the `commitment-device` key to specify the + +```erlang +commit(Self, Req, Opts) -> + {ok, Base} = hb_message:find_target(Self, Req, Opts), + AttDev = + case hb_maps:get(<<"commitment-device">>, Req, not_specified, Opts) of + not_specified -> + hb_opts:get(commitment_device, no_viable_commitment_device, Opts); + Dev -> Dev + end, + % We _do not_ set the `device' key in the message, as the device will be + % part of the commitment. Instead, we find the device module's `commit' + % function and apply it. +``` + +### verify + +Verify a message. By default, all commitments are verified. The + +```erlang +verify(Self, Req, Opts) -> + % Get the target message of the verification request. +``` + +### verify_commitment + +Execute a function for a single commitment in the context of its + +```erlang +verify_commitment(Base, Commitment, Opts) -> + ?event(verify, {verifying_commitment, {commitment, Commitment}, {msg, Base}}), + AttDev = + hb_maps:get( + <<"commitment-device">>, + Commitment, + ?DEFAULT_ATT_DEVICE, + Opts + ), + AttMod = + hb_ao:message_to_device( + #{ <<"device">> => AttDev }, + Opts + ), + {ok, AttFun} = + hb_ao:find_exported_function( + Base, + AttMod, + verify, + 3, + Opts + ), + apply(AttFun, [Base, Commitment, Opts]). +``` + +### committed + +Return the list of committed keys from a message. + +```erlang +committed(Self, Req, Opts) -> + % Get the target message of the verification request and ensure its + % commitments are loaded. +``` + +### with_relevant_commitments + +Return a message with only the relevant commitments for a given request. +Implements a standardized form of specifying commitment IDs for a + +```erlang +with_relevant_commitments(Base, Req, Opts) -> + Commitments = maps:get(<<"commitments">>, Base, #{}), + CommitmentIDs = commitment_ids_from_request(Base, Req, Opts), + Base#{ <<"commitments">> => maps:with(CommitmentIDs, Commitments) }. +``` + +### commitment_ids_from_request + +Return a message with only the relevant commitments for a given request. +Implements a standardized form of specifying commitment IDs for a + +```erlang +commitment_ids_from_request(Base, Req, Opts) -> + Commitments = maps:get(<<"commitments">>, Base, #{}), + ReqCommitters = + case maps:get(<<"committers">>, Req, <<"none">>) of + X when is_list(X) -> X; + CommitterDescriptor -> hb_ao:normalize_key(CommitterDescriptor) + end, + RawReqCommitments = maps:get(<<"commitments">>, Req, <<"none">>), + ReqCommitments = + case RawReqCommitments of + X2 when is_list(X2) -> X2; + CommitmentDescriptor -> hb_ao:normalize_key(CommitmentDescriptor) + end, + ?event(debug_commitments, + {commitment_ids_from_request, + {req_commitments, ReqCommitments}, + {req_committers, ReqCommitters}} + ), + % Get the commitments to verify. +``` + +### ensure_commitments_loaded + +Ensure that the `commitments` submessage of a base message is fully + +```erlang +ensure_commitments_loaded(NonRelevant, _Opts) when not is_map(NonRelevant) -> + NonRelevant; +``` + +### ensure_commitments_loaded + +Ensure that the `commitments` submessage of a base message is fully + +```erlang +ensure_commitments_loaded(M = #{ <<"commitments">> := Link}, Opts) when ?IS_LINK(Link) -> + M#{ + <<"commitments">> => hb_cache:ensure_all_loaded(Link, Opts) + }; +``` + +### ensure_commitments_loaded + +Ensure that the `commitments` submessage of a base message is fully + +```erlang +ensure_commitments_loaded(M, _Opts) -> + M. +``` + +### commitment_ids_from_committers + +Returns a list of commitment IDs in a commitments map that are relevant + +```erlang +commitment_ids_from_committers(CommitterAddrs, Commitments, Opts) -> + % Get the IDs of all commitments for each committer. +``` + +### set + +Deep merge keys in a message. Takes a map of key-value pairs and sets + +```erlang +set(Message1, NewValuesMsg, Opts) -> + OriginalPriv = hb_private:from_message(Message1), + % Filter keys that are in the default device (this one). +``` + +### set_path + +Special case of `set/3` for setting the `path` key. This cannot be set + +```erlang +set_path(Base, #{ <<"value">> := Value }, Opts) -> + set_path(Base, Value, Opts); +``` + +### set_path + +Special case of `set/3` for setting the `path` key. This cannot be set + +```erlang +set_path(Base, Value, Opts) when not is_map(Value) -> + % Determine whether the `path' key is committed. If it is, we remove the + % commitment if the new value is different. We try to minimize work by + % doing the `hb_maps:get` first, as it is far cheaper than calculating + % the committed keys. +``` + +### remove + +Remove a key or keys from a message. + +```erlang +remove(Message1, Key) -> + remove(Message1, Key, #{}). +``` + +### remove + +```erlang +remove(Message1, #{ <<"item">> := Key }, Opts) -> + remove(Message1, #{ <<"items">> => [Key] }, Opts); +``` + +### remove + +```erlang +remove(Message1, #{ <<"items">> := Keys }, Opts) -> + { ok, hb_maps:without(Keys, Message1, Opts) }. +``` + +### keys + +Get the public keys of a message. + +```erlang +keys(Msg) -> + keys(Msg, #{}). +``` + +### keys + +```erlang +keys(Msg, Opts) when not is_map(Msg) -> + case hb_ao:normalize_keys(Msg, Opts) of + NormMsg when is_map(NormMsg) -> keys(NormMsg, Opts); + _ -> throw(badarg) + end; +``` + +### keys + +```erlang +keys(Msg, Opts) -> + { + ok, + lists:filter( + fun(Key) -> not hb_private:is_private(Key) end, + hb_maps:keys(Msg, Opts) + ) + }. +``` + +### get + +Return the value associated with the key as it exists in the message's + +```erlang +get(Key, Msg, Opts) -> get(Key, Msg, #{ <<"path">> => <<"get">> }, Opts). +``` + +### get + +Return the value associated with the key as it exists in the message's + +```erlang +get(Key, Msg, _Msg2, Opts) -> + case hb_private:is_private(Key) of + true -> {error, not_found}; + false -> + case hb_maps:get(Key, Msg, not_found, Opts) of + not_found -> case_insensitive_get(Key, Msg, Opts); + Value -> {ok, Value} + end + end. +``` + +### case_insensitive_get + +Key matching should be case insensitive, following RFC-9110, so we + +```erlang +case_insensitive_get(Key, Msg, Opts) -> + NormKey = hb_util:to_lower(hb_util:bin(Key)), + NormMsg = hb_ao:normalize_keys(Msg, Opts), + case hb_maps:get(NormKey, NormMsg, not_found, Opts) of + not_found -> {error, not_found}; + Value -> {ok, Value} + end. +``` + +### get_keys_mod_test + +```erlang +get_keys_mod_test() -> + ?assertEqual([a], hb_maps:keys(#{a => 1}, #{})). +``` + +### is_private_mod_test + +```erlang +is_private_mod_test() -> + ?assertEqual(true, hb_private:is_private(<<"private">>)), + ?assertEqual(true, hb_private:is_private(<<"private.foo">>)), + ?assertEqual(false, hb_private:is_private(<<"a">>)). +%%% Device functionality tests: +``` + +### keys_from_device_test + +```erlang +keys_from_device_test() -> + ?assertEqual({ok, [<<"a">>]}, hb_ao:resolve(#{ <<"a">> => 1 }, keys, #{})). +``` + +### case_insensitive_get_test + +```erlang +case_insensitive_get_test() -> + ?assertEqual({ok, 1}, case_insensitive_get(<<"a">>, #{ <<"a">> => 1 }, #{})), +% ?assertEqual({ok, 1}, case_insensitive_get(<<"a">>, #{ <<"A">> => 1 }, #{})), + ?assertEqual({ok, 1}, case_insensitive_get(<<"A">>, #{ <<"a">> => 1 }, #{})). + %?assertEqual({ok, 1}, case_insensitive_get(<<"A">>, #{ <<"A">> => 1 }, #{})). +``` + +### private_keys_are_filtered_test + +```erlang +private_keys_are_filtered_test() -> + ?assertEqual( + {ok, [<<"a">>]}, + hb_ao:resolve(#{ <<"a">> => 1, <<"private">> => 2 }, keys, #{}) + ), + ?assertEqual( + {ok, [<<"a">>]}, + hb_ao:resolve(#{ <<"a">> => 1, <<"priv_foo">> => 4 }, keys, #{}) + ). +``` + +### cannot_get_private_keys_test + +```erlang +cannot_get_private_keys_test() -> + ?assertEqual( + {error, not_found}, + hb_ao:resolve( + #{ <<"a">> => 1, <<"private_key">> => 2 }, + <<"private_key">>, + #{ hashpath => ignore } + ) + ). +``` + +### key_from_device_test + +```erlang +key_from_device_test() -> + ?assertEqual({ok, 1}, hb_ao:resolve(#{ <<"a">> => 1 }, <<"a">>, #{})). +``` + +### remove_test + +```erlang +remove_test() -> + Msg = #{ <<"key1">> => <<"Value1">>, <<"key2">> => <<"Value2">> }, + ?assertMatch({ok, #{ <<"key2">> := <<"Value2">> }}, + hb_ao:resolve( + Msg, + #{ <<"path">> => <<"remove">>, <<"item">> => <<"key1">> }, + #{ hashpath => ignore } + ) + ), + ?assertMatch({ok, #{}}, + hb_ao:resolve( + Msg, + #{ <<"path">> => <<"remove">>, <<"items">> => [<<"key1">>, <<"key2">>] }, + #{ hashpath => ignore } + ) + ). +``` + +### set_conflicting_keys_test + +```erlang +set_conflicting_keys_test() -> + Msg1 = #{ <<"dangerous">> => <<"Value1">> }, + Msg2 = #{ <<"path">> => <<"set">>, <<"dangerous">> => <<"Value2">> }, + ?assertMatch({ok, #{ <<"dangerous">> := <<"Value2">> }}, + hb_ao:resolve(Msg1, Msg2, #{})). +``` + +### unset_with_set_test + +```erlang +unset_with_set_test() -> + Msg1 = #{ <<"dangerous">> => <<"Value1">> }, + Msg2 = #{ <<"path">> => <<"set">>, <<"dangerous">> => unset }, + ?assertMatch({ok, Msg3} when ?IS_EMPTY_MESSAGE(Msg3), + hb_ao:resolve(Msg1, Msg2, #{ hashpath => ignore })). +``` + +### deep_unset_test + +```erlang +deep_unset_test() -> + Opts = #{ hashpath => ignore }, + Msg1 = #{ + <<"test-key1">> => <<"Value1">>, + <<"deep">> => #{ + <<"test-key2">> => <<"Value2">>, + <<"test-key3">> => <<"Value3">> + } + }, + Msg2 = hb_ao:set(Msg1, #{ <<"deep/test-key2">> => unset }, Opts), + ?assertEqual(#{ + <<"test-key1">> => <<"Value1">>, + <<"deep">> => #{ <<"test-key3">> => <<"Value3">> } + }, + Msg2 + ), + Msg3 = hb_ao:set(Msg2, <<"deep/test-key3">>, unset, Opts), + ?assertEqual(#{ + <<"test-key1">> => <<"Value1">>, + <<"deep">> => #{} + }, + Msg3 + ), + Msg4 = hb_ao:set(Msg3, #{ <<"deep">> => unset }, Opts), + ?assertEqual(#{ <<"test-key1">> => <<"Value1">> }, Msg4). +``` + +### set_ignore_undefined_test + +```erlang +set_ignore_undefined_test() -> + Msg1 = #{ <<"test-key">> => <<"Value1">> }, + Msg2 = #{ <<"path">> => <<"set">>, <<"test-key">> => undefined }, + ?assertEqual(#{ <<"test-key">> => <<"Value1">> }, + hb_private:reset(hb_util:ok(set(Msg1, Msg2, #{ hashpath => ignore })))). +``` + +### verify_test + +```erlang +verify_test() -> + Unsigned = #{ <<"a">> => <<"b">> }, + Signed = hb_message:commit(Unsigned, hb:wallet()), + ?event({signed, Signed}), + BadSigned = Signed#{ <<"a">> => <<"c">> }, + ?event({bad_signed, BadSigned}), + ?assertEqual(false, hb_message:verify(BadSigned)), + ?assertEqual({ok, true}, + hb_ao:resolve( + #{ <<"device">> => <<"message@1.0">> }, + #{ <<"path">> => <<"verify">>, <<"body">> => Signed }, + #{ hashpath => ignore } + ) + ), + % Test that we can verify a message without specifying the device explicitly. +``` + +--- + +*Generated from [dev_message.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_message.erl)* diff --git a/docs/book/src/dev_meta.erl.md b/docs/book/src/dev_meta.erl.md new file mode 100644 index 000000000..98475b034 --- /dev/null +++ b/docs/book/src/dev_meta.erl.md @@ -0,0 +1,981 @@ +# dev_meta + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_meta.erl) + +The hyperbeam meta device, which is the default entry point +for all messages processed by the machine. This device executes a +AO-Core singleton request, after first applying the node's +pre-processor, if set. The pre-processor can halt the request by +returning an error, or return a modified version if it deems necessary -- +the result of the pre-processor is used as the request for the AO-Core +resolver. Additionally, a post-processor can be set, which is executed after +the AO-Core resolver has returned a result. + +--- + +## Exported Functions + +- `adopt_node_message/2` +- `build/3` +- `handle/2` +- `info/1` +- `info/3` +- `is_operator/2` +- `is/2` +- `is/3` + +--- + +### info + +The hyperbeam meta device, which is the default entry point +Ensure that the helper function `adopt_node_message/2` is not exported. +Utility function for determining if a request is from the `operator` of + +```erlang +info(_) -> #{ exports => [info, build] }. +``` + +### is_operator + +The hyperbeam meta device, which is the default entry point +Ensure that the helper function `adopt_node_message/2` is not exported. +Utility function for determining if a request is from the `operator` of + +```erlang +is_operator(Request, NodeMsg) -> + RequestSigners = hb_message:signers(Request, NodeMsg), + Operator = + hb_opts:get( + operator, + case hb_opts:get(priv_wallet, no_viable_wallet, NodeMsg) of + no_viable_wallet -> unclaimed; + Wallet -> ar_wallet:to_address(Wallet) + end, + NodeMsg + ), + EncOperator = + case Operator of + unclaimed -> unclaimed; + NativeAddress -> hb_util:human_id(NativeAddress) + end, + EncOperator == unclaimed orelse lists:member(EncOperator, RequestSigners). +``` + +### build + +Emits the version number and commit hash of the HyperBEAM node source, + +```erlang +build(_, _, _NodeMsg) -> + {ok, + #{ + <<"node">> => <<"HyperBEAM">>, + <<"version">> => ?HYPERBEAM_VERSION, + <<"source">> => ?HB_BUILD_SOURCE, + <<"source-short">> => ?HB_BUILD_SOURCE_SHORT, + <<"build-time">> => ?HB_BUILD_TIME + } + }. +``` + +### handle + +Normalize and route messages downstream based on their path. Messages + +```erlang +handle(NodeMsg, RawRequest) -> + ?event({singleton_tabm_request, RawRequest}), + NormRequest = hb_singleton:from(RawRequest, NodeMsg), + ?event( + http, + {request, + hb_cache:ensure_all_loaded( + hb_ao:normalize_keys(NormRequest, NodeMsg), + NodeMsg + ) + } + ), + case hb_opts:get(initialized, false, NodeMsg) of + false -> + Res = + embed_status( + hb_ao:force_message( + handle_initialize(NormRequest, NodeMsg), + NodeMsg + ), + NodeMsg + ), + Res; + _ -> handle_resolve(RawRequest, NormRequest, NodeMsg) + end. +``` + +### handle_initialize + +```erlang +handle_initialize([Base = #{ <<"device">> := Dev}, Req = #{ <<"path">> := Path }|_], NodeMsg) -> + ?event({got, {device, Dev}, {path, Path}}), + case {Dev, Path} of + {<<"meta@1.0">>, <<"info">>} -> info(Base, Req, NodeMsg); + _ -> {error, <<"Node must be initialized before use.">>} + end; +``` + +### handle_initialize + +```erlang +handle_initialize([{as, <<"meta@1.0">>, _}|Rest], NodeMsg) -> + handle_initialize([#{ <<"device">> => <<"meta@1.0">>}|Rest], NodeMsg); +``` + +### handle_initialize + +```erlang +handle_initialize([_|Rest], NodeMsg) -> + handle_initialize(Rest, NodeMsg); +``` + +### handle_initialize + +Get/set the node message. If the request is a `POST`, we check that the + +```erlang +handle_initialize([], _NodeMsg) -> + {error, <<"Node must be initialized before use.">>}. +``` + +### info + +Get/set the node message. If the request is a `POST`, we check that the + +```erlang +info(_, Request, NodeMsg) -> + case hb_ao:get(<<"method">>, Request, NodeMsg) of + <<"POST">> -> + case hb_ao:get(<<"initialized">>, NodeMsg, not_found, NodeMsg) of + permanent -> + embed_status( + {error, + <<"The node message of this machine is already " + "permanent. It cannot be changed.">> + }, + NodeMsg + ); + _ -> + update_node_message(Request, NodeMsg) + end; + _ -> + ?event({get_config_req, Request, NodeMsg}), + DynamicKeys = add_dynamic_keys(NodeMsg), + embed_status({ok, filter_node_msg(DynamicKeys, NodeMsg)}, NodeMsg) + end. +``` + +### filter_node_msg + +Remove items from the node message that are not encodable into a + +```erlang +filter_node_msg(Msg, NodeMsg) when is_map(Msg) -> + hb_maps:map(fun(_, Value) -> filter_node_msg(Value, NodeMsg) end, hb_private:reset(Msg), NodeMsg); +``` + +### filter_node_msg + +Remove items from the node message that are not encodable into a + +```erlang +filter_node_msg(Msg, NodeMsg) when is_list(Msg) -> + lists:map(fun(Item) -> filter_node_msg(Item, NodeMsg) end, Msg); +``` + +### filter_node_msg + +Remove items from the node message that are not encodable into a + +```erlang +filter_node_msg(Tuple, _NodeMsg) when is_tuple(Tuple) -> + <<"Unencodable value.">>; +``` + +### filter_node_msg + +Remove items from the node message that are not encodable into a + +```erlang +filter_node_msg(Other, _NodeMsg) -> + Other. +``` + +### add_dynamic_keys + +Add dynamic keys to the node message. + +```erlang +add_dynamic_keys(NodeMsg) -> + UpdatedNodeMsg = + case hb_opts:get(priv_wallet, no_viable_wallet, NodeMsg) of + no_viable_wallet -> + NodeMsg; + Wallet -> + %% Create a new map with address and merge it (overwriting existing) + Address = hb_util:id(ar_wallet:to_address(Wallet)), + NodeMsg#{ address => Address, <<"address">> => Address } + end, + add_identity_addresses(UpdatedNodeMsg). +``` + +### add_identity_addresses + +Validate that the request is signed by the operator of the node, then + +```erlang +add_identity_addresses(NodeMsg) -> + Identities = hb_opts:get(identities, #{}, NodeMsg), + NewIdentities = maps:map(fun(_, Identity) -> + Identity#{ + <<"address">> => hb_util:human_id( + hb_opts:get(priv_wallet, hb:wallet(), Identity) + ) + } + end, Identities), + NodeMsg#{ <<"identities">> => NewIdentities }. +``` + +### update_node_message + +Validate that the request is signed by the operator of the node, then + +```erlang +update_node_message(Request, NodeMsg) -> + case is(admin, Request, NodeMsg) of + false -> + ?event({set_node_message_fail, Request}), + embed_status({error, <<"Unauthorized">>}, NodeMsg); + true -> + case adopt_node_message(Request, NodeMsg) of + {ok, NewNodeMsg} -> + NewH = hb_opts:get(node_history, [], NewNodeMsg), + embed_status( + {ok, + #{ + <<"body">> => + iolist_to_binary( + io_lib:format( + "Node message updated. History: ~p" + "updates.", + [length(NewH)] + ) + ), + <<"history-length">> => length(NewH) + } + }, + NodeMsg + ); + {error, Reason} -> + ?event({set_node_message_fail, Request, Reason}), + embed_status({error, Reason}, NodeMsg) + end + end. +``` + +### adopt_node_message + +Attempt to adopt changes to a node message. + +```erlang +adopt_node_message(Request, NodeMsg) -> + ?event({set_node_message_success, Request}), + % Ensure that the node history is updated and the http_server ID is + % not overridden. +``` + +### handle_resolve + +Handle an AO-Core request, which is a list of messages. We apply + +```erlang +handle_resolve(Req, Msgs, NodeMsg) -> + TracePID = hb_opts:get(trace, no_tracer_set, NodeMsg), + % Apply the pre-processor to the request. +``` + +### resolve_hook + +Execute a hook from the node message upon the user's request. The + +```erlang +resolve_hook(HookName, InitiatingRequest, Body, NodeMsg) -> + HookReq = + #{ + <<"request">> => InitiatingRequest, + <<"body">> => Body + }, + ?event(hook, {resolve_hook, HookName, HookReq}), + case dev_hook:on(HookName, HookReq, NodeMsg) of + {ok, #{ <<"body">> := ResponseBody }} -> + ?event(hook, + {resolve_hook_success, + {name, HookName}, + {response_body, ResponseBody} + } + ), + {ok, ResponseBody}; + {error, _} = Error -> + ?event(hook, + {resolve_hook_error, + {name, HookName}, + {error, Error} + } + ), + Error; + Other -> + {error, Other} + end. +``` + +### embed_status + +Wrap the result of a device call in a status. + +```erlang +embed_status({ErlStatus, Res}, NodeMsg) when is_map(Res) -> + case lists:member(<<"status">>, hb_message:committed(Res, all, NodeMsg)) of + false -> + HTTPCode = status_code({ErlStatus, Res}, NodeMsg), + {ok, Res#{ <<"status">> => HTTPCode }}; + true -> + {ok, Res} + end; +``` + +### embed_status + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +embed_status({ErlStatus, Res}, NodeMsg) -> + HTTPCode = status_code({ErlStatus, Res}, NodeMsg), + {ok, #{ <<"status">> => HTTPCode, <<"body">> => Res }}. +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +status_code({ErlStatus, Msg}, NodeMsg) -> + case message_to_status(Msg, NodeMsg) of + default -> status_code(ErlStatus, NodeMsg); + RawStatus -> RawStatus + end; +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +status_code(ok, _NodeMsg) -> 200; +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +status_code(error, _NodeMsg) -> 400; +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +status_code(created, _NodeMsg) -> 201; +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +status_code(not_found, _NodeMsg) -> 404; +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +status_code(failure, _NodeMsg) -> 500; +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +status_code(unavailable, _NodeMsg) -> 503; +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +status_code(unauthorized, _NodeMsg) -> 401; +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. + +```erlang +status_code(forbidden, _NodeMsg) -> 403; +``` + +### status_code + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. +Get the HTTP status code from a transaction (if it exists). + +```erlang +status_code(_, _NodeMsg) -> 200. +``` + +### message_to_status + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. +Get the HTTP status code from a transaction (if it exists). + +```erlang +message_to_status(#{ <<"body">> := Status }, NodeMsg) when is_atom(Status) -> + status_code(Status, NodeMsg); +``` + +### message_to_status + +Wrap the result of a device call in a status. +Calculate the appropriate HTTP status code for an AO-Core result. +Get the HTTP status code from a transaction (if it exists). + +```erlang +message_to_status(Item, NodeMsg) when is_map(Item) -> + % Note: We use `dev_message' directly here, such that we do not cause + % additional AO-Core calls for every request. This is particularly important + % if a remote server is being used for all AO-Core requests by a node. +``` + +### message_to_status + +```erlang +message_to_status(Item, NodeMsg) when is_atom(Item) -> + status_code(Item, NodeMsg); +``` + +### message_to_status + +```erlang +message_to_status(_Item, _NodeMsg) -> + default. +``` + +### maybe_sign + +Sign the result of a device call if the node is configured to do so. + +```erlang +maybe_sign({Status, Res}, NodeMsg) -> + {Status, maybe_sign(Res, NodeMsg)}; +``` + +### maybe_sign + +Sign the result of a device call if the node is configured to do so. + +```erlang +maybe_sign(Res, NodeMsg) -> + ?event({maybe_sign, Res}), + case hb_opts:get(force_signed, false, NodeMsg) of + true -> + case hb_message:signers(Res, NodeMsg) of + [] -> hb_message:commit(Res, NodeMsg); + _ -> Res + end; + false -> Res + end. +``` + +### is + +Check if the request in question is signed by a given `role` on the node. + +```erlang +is(Request, NodeMsg) -> + is(operator, Request, NodeMsg). +``` + +### is + +```erlang +is(admin, Request, NodeMsg) -> + % Does the caller have the right to change the node message? + RequestSigners = hb_message:signers(Request, NodeMsg), + ValidOperator = + hb_util:bin( + hb_opts:get( + operator, + case hb_opts:get(priv_wallet, no_viable_wallet, NodeMsg) of + no_viable_wallet -> unclaimed; + Wallet -> ar_wallet:to_address(Wallet) + end, + NodeMsg + ) + ), + EncOperator = + case ValidOperator of + <<"unclaimed">> -> unclaimed; + NativeAddress -> hb_util:human_id(NativeAddress) + end, + ?event({is, + {operator, + {valid_operator, ValidOperator}, + {encoded_operator, EncOperator}, + {request_signers, RequestSigners} + } + }), + EncOperator == unclaimed orelse lists:member(EncOperator, RequestSigners); +``` + +### is + +```erlang +is(operator, Req, NodeMsg) -> + % Is the caller explicitly set to be the operator? + % Get the operator from the node message + Operator = hb_opts:get(operator, unclaimed, NodeMsg), + % Get the request signers + RequestSigners = hb_message:signers(Req, NodeMsg), + % Ensure the operator is present in the request + lists:member(Operator, RequestSigners); +``` + +### is + +```erlang +is(initiator, Request, NodeMsg) -> + % Is the caller the first identity that configured the node message? + NodeHistory = hb_opts:get(node_history, [], NodeMsg), + % Check if node_history exists and is not empty + case NodeHistory of + [] -> + ?event(green_zone, {init, node_history, empty}), + false; + [InitializationRequest | _] -> + % Extract signature from first entry + InitializationRequestSigners = hb_message:signers(InitializationRequest, NodeMsg), + % Get request signers + RequestSigners = hb_message:signers(Request, NodeMsg), + % Ensure all signers of the initalization request are present in the + % request. +``` + +### config_test + +Test that we can get the node message. +Test that we can't get the node message if the requested key is private. + +```erlang +config_test() -> + StoreOpts = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + }, + Node = hb_http_server:start_node(Opts = #{ test_config_item => <<"test">>, store => StoreOpts }), + {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), + ?assertEqual(<<"test">>, hb_ao:get(<<"test_config_item">>, Res, Opts)). +``` + +### priv_inaccessible_test + +Test that we can get the node message. +Test that we can't get the node message if the requested key is private. +Test that we can't set the node message if the request is not signed by + +```erlang +priv_inaccessible_test() -> + Node = hb_http_server:start_node( + #{ + test_config_item => <<"test">>, + priv_key => <<"BAD">> + } + ), + {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, #{}), + ?event({res, Res}), + ?assertEqual(<<"test">>, hb_ao:get(<<"test_config_item">>, Res, #{})), + ?assertEqual(not_found, hb_ao:get(<<"priv_key">>, Res, #{})). +``` + +### unauthorized_set_node_msg_fails_test + +Test that we can get the node message. +Test that we can't get the node message if the requested key is private. +Test that we can't set the node message if the request is not signed by +Test that we can set the node message if the request is signed by the + +```erlang +unauthorized_set_node_msg_fails_test() -> + StoreOpts = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + }, + Node = hb_http_server:start_node(Opts = #{ store => StoreOpts, priv_wallet => ar_wallet:new() }), + {error, _} = + hb_http:post( + Node, + hb_message:commit( + #{ + <<"path">> => <<"/~meta@1.0/info">>, + <<"evil_config_item">> => <<"BAD">> + }, + Opts#{ priv_wallet => ar_wallet:new() } + ), + #{} + ), + {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), + ?assertEqual(not_found, hb_ao:get(<<"evil_config_item">>, Res, Opts)), + ?assertEqual(0, length(hb_ao:get(<<"node_history">>, Res, [], Opts))). +``` + +### authorized_set_node_msg_succeeds_test + +Test that we can get the node message. +Test that we can't get the node message if the requested key is private. +Test that we can't set the node message if the request is not signed by +Test that we can set the node message if the request is signed by the +Test that an uninitialized node will not run computation. + +```erlang +authorized_set_node_msg_succeeds_test() -> + StoreOpts = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + }, + Owner = ar_wallet:new(), + Node = hb_http_server:start_node( + Opts = #{ + operator => hb_util:human_id(ar_wallet:to_address(Owner)), + test_config_item => <<"test">>, + store => StoreOpts + } + ), + {ok, SetRes} = + hb_http:post( + Node, + hb_message:commit( + #{ + <<"path">> => <<"/~meta@1.0/info">>, + <<"test_config_item">> => <<"test2">> + }, + Opts#{ priv_wallet => Owner } + ), + Opts + ), + ?event({res, SetRes}), + {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), + ?event({res, Res}), + ?assertEqual(<<"test2">>, hb_ao:get(<<"test_config_item">>, Res, Opts)), + ?assertEqual(1, length(hb_ao:get(<<"node_history">>, Res, [], Opts))). +``` + +### uninitialized_node_test + +Test that we can get the node message. +Test that we can't get the node message if the requested key is private. +Test that we can't set the node message if the request is not signed by +Test that we can set the node message if the request is signed by the +Test that an uninitialized node will not run computation. +Test that a permanent node message cannot be changed. + +```erlang +uninitialized_node_test() -> + Node = hb_http_server:start_node(#{ initialized => false }), + {error, Res} = hb_http:get(Node, <<"/key1?1.key1=value1">>, #{}), + ?event({res, Res}), + ?assertEqual(<<"Node must be initialized before use.">>, Res). +``` + +### permanent_node_message_test + +Test that we can get the node message. +Test that we can't get the node message if the requested key is private. +Test that we can't set the node message if the request is not signed by +Test that we can set the node message if the request is signed by the +Test that an uninitialized node will not run computation. +Test that a permanent node message cannot be changed. +Test that we can claim the node correctly and set the node message after. + +```erlang +permanent_node_message_test() -> + StoreOpts = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + }, + Owner = ar_wallet:new(), + Node = hb_http_server:start_node( + Opts =#{ + operator => <<"unclaimed">>, + initialized => false, + test_config_item => <<"test">>, + store => StoreOpts + } + ), + {ok, SetRes1} = + hb_http:post( + Node, + hb_message:commit( + #{ + <<"path">> => <<"/~meta@1.0/info">>, + <<"test_config_item">> => <<"test2">>, + initialized => <<"permanent">> + }, + Opts#{ priv_wallet => Owner } + ), + Opts + ), + ?event({set_res, SetRes1}), + {ok, Res} = hb_http:get(Node, #{ <<"path">> => <<"/~meta@1.0/info">> }, Opts), + ?event({get_res, Res}), + ?assertEqual(<<"test2">>, hb_ao:get(<<"test_config_item">>, Res, Opts)), + {error, SetRes2} = + hb_http:post( + Node, + hb_message:commit( + #{ + <<"path">> => <<"/~meta@1.0/info">>, + <<"test_config_item">> => <<"bad_value">> + }, + Opts#{ priv_wallet => Owner } + ), + Opts + ), + ?event({set_res, SetRes2}), + {ok, Res2} = hb_http:get(Node, #{ <<"path">> => <<"/~meta@1.0/info">> }, Opts), + ?event({get_res, Res2}), + ?assertEqual(<<"test2">>, hb_ao:get(<<"test_config_item">>, Res2, Opts)), + ?assertEqual(1, length(hb_ao:get(<<"node_history">>, Res2, [], Opts))). +``` + +### claim_node_test + +Test that we can get the node message. +Test that we can't get the node message if the requested key is private. +Test that we can't set the node message if the request is not signed by +Test that we can set the node message if the request is signed by the +Test that an uninitialized node will not run computation. +Test that a permanent node message cannot be changed. +Test that we can claim the node correctly and set the node message after. + +```erlang +claim_node_test() -> + StoreOpts = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + }, + Owner = ar_wallet:new(), + Address = ar_wallet:to_address(Owner), + Node = hb_http_server:start_node( + Opts = #{ + operator => unclaimed, + test_config_item => <<"test">>, + store => StoreOpts + } + ), + {ok, SetRes} = + hb_http:post( + Node, + hb_message:commit( + #{ + <<"path">> => <<"/~meta@1.0/info">>, + <<"operator">> => hb_util:human_id(Address) + }, + Opts#{ priv_wallet => Owner} + ), + Opts + ), + ?event({res, SetRes}), + {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), + ?event({res, Res}), + ?assertEqual(hb_util:human_id(Address), hb_ao:get(<<"operator">>, Res, Opts)), + {ok, SetRes2} = + hb_http:post( + Node, + hb_message:commit( + #{ + <<"path">> => <<"/~meta@1.0/info">>, + <<"test_config_item">> => <<"test2">> + }, + Opts#{ priv_wallet => Owner } + ), + Opts + ), + ?event({res, SetRes2}), + {ok, Res2} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), + ?event({res, Res2}), + ?assertEqual(<<"test2">>, hb_ao:get(<<"test_config_item">>, Res2, Opts)), + ?assertEqual(2, length(hb_ao:get(<<"node_history">>, Res2, [], Opts))). +%% Test that we can use a hook upon a request. +``` + +### request_response_hooks_test + +```erlang +request_response_hooks_test() -> + Parent = self(), + Node = hb_http_server:start_node( + #{ + on => + #{ + <<"request">> => + #{ + <<"device">> => #{ + <<"request">> => + fun(_, #{ <<"body">> := Msgs }, _) -> + Parent ! {hook, request}, + {ok, #{ <<"body">> => Msgs} } + end + } + }, + <<"response">> => + #{ + <<"device">> => #{ + <<"response">> => + fun(_, #{ <<"body">> := Msgs }, _) -> + Parent ! {hook, response}, + {ok, #{ <<"body">> => Msgs} } + end + } + } + }, + http_extra_opts => #{ + <<"cache-control">> => [<<"no-store">>, <<"no-cache">>] + } + }), + hb_http:get(Node, <<"/~meta@1.0/info">>, #{}), + % Receive both of the responses from the hooks, if possible. +``` + +### halt_request_test + +Test that we can halt a request if the hook returns an error. +Test that a hook can modify a request. + +```erlang +halt_request_test() -> + Node = hb_http_server:start_node( + #{ + on => + #{ + <<"request">> => + #{ + <<"device">> => #{ + <<"request">> => + fun(_, _, _) -> + {error, <<"Bad">>} + end + } + } + } + }), + {error, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, #{}), + ?assertEqual(<<"Bad">>, Res). +``` + +### modify_request_test + +Test that we can halt a request if the hook returns an error. +Test that a hook can modify a request. +Test that version information is available and returned correctly. + +```erlang +modify_request_test() -> + Node = hb_http_server:start_node( + #{ + on => + #{ + <<"request">> => + #{ + <<"device">> => #{ + <<"request">> => + fun(_, #{ <<"body">> := [M|Ms] }, _) -> + { + ok, + #{ + <<"body">> => + [ + M#{ + <<"added">> => + <<"value">> + } + | + Ms + ] + } + } + end + } + } + } + }), + {ok, Res} = hb_http:get(Node, <<"/added">>, #{}), + ?assertEqual(<<"value">>, Res). +``` + +### buildinfo_test + +Test that we can halt a request if the hook returns an error. +Test that a hook can modify a request. +Test that version information is available and returned correctly. + +```erlang +buildinfo_test() -> + Node = hb_http_server:start_node(#{}), + ?assertEqual( + {ok, <<"HyperBEAM">>}, + hb_http:get(Node, <<"/~meta@1.0/build/node">>, #{}) + ), + ?assertEqual( + {ok, ?HYPERBEAM_VERSION}, + hb_http:get(Node, <<"/~meta@1.0/build/version">>, #{}) + ), + ?assertEqual( + {ok, ?HB_BUILD_SOURCE}, + hb_http:get(Node, <<"/~meta@1.0/build/source">>, #{}) + ), + ?assertEqual( + {ok, ?HB_BUILD_SOURCE_SHORT}, + hb_http:get(Node, <<"/~meta@1.0/build/source-short">>, #{}) + ), + ?assertEqual( + {ok, ?HB_BUILD_TIME}, + hb_http:get(Node, <<"/~meta@1.0/build/build-time">>, #{}) + ). +``` + +--- + +*Generated from [dev_meta.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_meta.erl)* diff --git a/docs/book/src/dev_monitor.erl.md b/docs/book/src/dev_monitor.erl.md new file mode 100644 index 000000000..72bfdb2ab --- /dev/null +++ b/docs/book/src/dev_monitor.erl.md @@ -0,0 +1,75 @@ +# dev_monitor + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_monitor.erl) + +A simple device that allows flexible monitoring of a process execution. +Adding a dev_monitor device to a process will cause the listed functions +to be called with the current process state during each pass. The monitor +functions must not mutate state. + +--- + +## Exported Functions + +- `add_monitor/2` +- `end_of_schedule/1` +- `execute/2` +- `init/3` +- `uses/0` + +--- + +### init + +```erlang +init(State, _, InitState) -> + {ok, State#{ <<"monitors">> => InitState }}. +``` + +### execute + +```erlang +execute(Message, State = #{ <<"pass">> := Pass, <<"passes">> := Passes }) when Pass == Passes -> + signal(State, {message, Message}); +``` + +### execute + +```erlang +execute(_, S) -> {ok, S}. +``` + +### add_monitor + +```erlang +add_monitor(Mon, State = #{ <<"monitors">> := Monitors }) -> + {ok, State#{ <<"monitors">> => [Mon | Monitors] }}. +``` + +### end_of_schedule + +```erlang +end_of_schedule(State) -> signal(State, end_of_schedule). +``` + +### signal + +```erlang +signal(State = #{ <<"monitors">> := StartingMonitors }, Signal) -> + RemainingMonitors = + lists:filter( + fun(Mon) -> + case Mon(State, Signal) of + done -> false; + _ -> true + end + end, + StartingMonitors + ), + ?event({remaining_monitors, length(RemainingMonitors)}), + {ok, State#{ <<"monitors">> := RemainingMonitors }}. +``` + +--- + +*Generated from [dev_monitor.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_monitor.erl)* diff --git a/docs/book/src/dev_multipass.erl.md b/docs/book/src/dev_multipass.erl.md new file mode 100644 index 000000000..9c6278798 --- /dev/null +++ b/docs/book/src/dev_multipass.erl.md @@ -0,0 +1,77 @@ +# dev_multipass + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_multipass.erl) + +A device that triggers repass events until a certain counter has been +reached. This is useful for certain types of stacks that need various +execution passes to be completed in sequence across devices. + +--- + +## Exported Functions + +- `info/1` + +--- + +### info + +A device that triggers repass events until a certain counter has been + +```erlang +info(_M1) -> + #{ + handler => fun handle/4 + }. +``` + +### handle + +Forward the keys function to the message device, handle all others + +```erlang +handle(<<"keys">>, M1, _M2, Opts) -> + dev_message:keys(M1, Opts); +``` + +### handle + +Forward the keys function to the message device, handle all others + +```erlang +handle(<<"set">>, M1, M2, Opts) -> + dev_message:set(M1, M2, Opts); +``` + +### handle + +Forward the keys function to the message device, handle all others + +```erlang +handle(_Key, M1, _M2, Opts) -> + Passes = hb_ao:get(<<"passes">>, {as, dev_message, M1}, 1, Opts), + Pass = hb_ao:get(<<"pass">>, {as, dev_message, M1}, 1, Opts), + case Pass < Passes of + true -> {pass, M1}; + false -> {ok, M1} + end. +``` + +### basic_multipass_test + +```erlang +basic_multipass_test() -> + Msg1 = + #{ + <<"device">> => <<"multipass@1.0">>, + <<"passes">> => 2, + <<"pass">> => 1 + }, + Msg2 = Msg1#{ <<"pass">> => 2 }, + ?assertMatch({pass, _}, hb_ao:resolve(Msg1, <<"Compute">>, #{})), + ?event(alive), +``` + +--- + +*Generated from [dev_multipass.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_multipass.erl)* diff --git a/docs/book/src/dev_name.erl.md b/docs/book/src/dev_name.erl.md new file mode 100644 index 000000000..5d540157e --- /dev/null +++ b/docs/book/src/dev_name.erl.md @@ -0,0 +1,200 @@ +# dev_name + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_name.erl) + +A device for resolving names to their corresponding values, through the +use of a `resolver` interface. Each `resolver` is a message that can be +given a `key` and returns an associated value. The device will attempt to +match the key against each resolver in turn, and return the value of the +first resolver that matches. + +--- + +## Exported Functions + +- `info/1` + +--- + +### info + +A device for resolving names to their corresponding values, through the +Configure the `default` key to proxy to the `resolver/4` function. + +```erlang +info(_) -> + #{ + default => fun resolve/4, + excludes => [<<"keys">>, <<"set">>] + }. +``` + +### resolve + +Resolve a name to its corresponding value. The name is given by the key + +```erlang +resolve(Key, _, Req, Opts) -> + Resolvers = hb_opts:get(name_resolvers, [], Opts), + ?event({resolvers, Resolvers}), + case match_resolver(Key, Resolvers, Opts) of + {ok, Resolved} -> + case hb_util:atom(hb_ao:get(<<"load">>, Req, true, Opts)) of + false -> + {ok, Resolved}; + true -> + hb_cache:read(Resolved, Opts) + end; + not_found -> + not_found + end. +``` + +### match_resolver + +Find the first resolver that matches the key and return its value. + +```erlang +match_resolver(_Key, [], _Opts) -> + not_found; +``` + +### match_resolver + +Find the first resolver that matches the key and return its value. + +```erlang +match_resolver(Key, [Resolver | Resolvers], Opts) -> + case execute_resolver(Key, Resolver, Opts) of + {ok, Value} -> + ?event({resolver_found, {key, Key}, {value, Value}}), + {ok, Value}; + _ -> + match_resolver(Key, Resolvers, Opts) + end. +``` + +### execute_resolver + +Execute a resolver with the given key and return its value. + +```erlang +execute_resolver(Key, Resolver, Opts) -> + ?event({executing, {key, Key}, {resolver, Resolver}}), + hb_ao:resolve( + Resolver, + #{ <<"path">> => <<"lookup">>, <<"key">> => Key }, + Opts + ). +``` + +### no_resolvers_test + +```erlang +no_resolvers_test() -> + ?assertEqual( + not_found, + resolve(<<"hello">>, #{}, #{}, #{ only => local }) + ). +``` + +### message_lookup_device_resolver + +```erlang +message_lookup_device_resolver(Msg) -> + #{ + <<"device">> => #{ + <<"lookup">> => fun(_, Req, Opts) -> + Key = hb_ao:get(<<"key">>, Req, Opts), + ?event({test_resolver_executing, {key, Key}, {req, Req}, {msg, Msg}}), + case maps:get(Key, Msg, not_found) of + not_found -> + ?event({test_resolver_not_found, {key, Key}, {msg, Msg}}), + {error, not_found}; + Value -> + ?event({test_resolver_found, {key, Key}, {value, Value}}), + {ok, Value} + end + end + } + }. +``` + +### single_resolver_test + +```erlang +single_resolver_test() -> + ?assertEqual( + {ok, <<"world">>}, + resolve( + <<"hello">>, + #{}, + #{ <<"load">> => false }, + #{ + name_resolvers => [ + message_lookup_device_resolver( + #{<<"hello">> => <<"world">>} + ) + ] + } + ) + ). +``` + +### multiple_resolvers_test + +```erlang +multiple_resolvers_test() -> + ?assertEqual( + {ok, <<"bigger-world">>}, + resolve( + <<"hello">>, + #{}, + #{ <<"load">> => false }, + #{ + name_resolvers => [ + message_lookup_device_resolver( + #{<<"irrelevant">> => <<"world">>} + ), + message_lookup_device_resolver( + #{<<"hello">> => <<"bigger-world">>} + ) + ] + } + ) + ). +``` + +### load_and_execute_test + +Test that we can resolve messages from a name loaded with the device. + +```erlang +load_and_execute_test() -> + TestKey = <<"test-key", (hb_util:bin(erlang:system_time(millisecond)))/binary>>, + {ok, ID} = hb_cache:write( + #{ + <<"deep">> => <<"PING">> + }, + #{} + ), + ?assertEqual( + {ok, <<"PING">>}, + hb_ao:resolve_many( + [ + #{ <<"device">> => <<"name@1.0">> }, + #{ <<"path">> => TestKey }, + #{ <<"path">> => <<"deep">> } + ], + #{ + name_resolvers => [ + message_lookup_device_resolver(#{ <<"irrelevant">> => ID }), + message_lookup_device_resolver(#{ TestKey => ID }) + ] + } + ) +``` + +--- + +*Generated from [dev_name.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_name.erl)* diff --git a/docs/book/src/dev_node_process.erl.md b/docs/book/src/dev_node_process.erl.md new file mode 100644 index 000000000..34382b371 --- /dev/null +++ b/docs/book/src/dev_node_process.erl.md @@ -0,0 +1,204 @@ +# dev_node_process + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_node_process.erl) + +A device that implements the singleton pattern for processes specific +to an individual node. This device uses the `local-name@1.0` device to +register processes with names locally, persistenting them across reboots. +Definitions of singleton processes are expected to be found with their +names in the `node_processes` section of the node message. + +--- + +## Exported Functions + +- `info/1` + +--- + +### info + +A device that implements the singleton pattern for processes specific +Register a default handler for the device. Inherits `keys` and `set` + +```erlang +info(_Opts) -> + #{ + default => fun lookup/4, + excludes => [<<"set">>, <<"keys">>] + }. +``` + +### lookup + +Lookup a process by name. + +```erlang +lookup(Name, _Base, Req, Opts) -> + ?event(node_process, {lookup, {name, Name}}), + LookupRes = + hb_ao:resolve( + #{ <<"device">> => <<"local-name@1.0">> }, + #{ <<"path">> => <<"lookup">>, <<"key">> => Name, <<"load">> => true }, + Opts + ), + case LookupRes of + {ok, ProcessID} -> + hb_cache:read(ProcessID, Opts); + {error, not_found} -> + case hb_ao:get(<<"spawn">>, Req, true, Opts) of + true -> + spawn_register(Name, Opts); + false -> + {error, not_found} + end + end. +``` + +### spawn_register + +Spawn a new process according to the process definition found in the + +```erlang +spawn_register(Name, Opts) -> + case hb_opts:get(node_processes, #{}, Opts) of + #{ Name := BaseDef } -> + % We have found the base process definition. Augment it with the + % node's address as necessary, then commit to the result. +``` + +### augment_definition + +Augment the given process definition with the node's address. + +```erlang +augment_definition(BaseDef, Opts) -> + Address = + hb_util:human_id( + ar_wallet:to_address( + hb_opts:get(priv_wallet, no_viable_wallet, Opts) + ) + ), + SchedulersFromBase = + hb_util:binary_to_addresses( + hb_ao:get(<<"scheduler">>, BaseDef, <<>>, Opts) + ), + AuthoritiesFromBase = + hb_util:binary_to_addresses( + hb_ao:get(<<"authority">>, BaseDef, <<>>, Opts) + ), + Schedulers = (SchedulersFromBase -- [Address]) ++ [Address], + Authorities = (AuthoritiesFromBase -- [Address]) ++ [Address], + % Normalize the scheduler and authority lists to binary strings. +``` + +### generate_test_opts + +Helper function to generate a test environment and its options. + +```erlang +generate_test_opts() -> + {ok, Module} = file:read_file(<<"test/test.lua">>), + generate_test_opts(#{ + ?TEST_NAME => #{ + <<"device">> => <<"process@1.0">>, + <<"execution-device">> => <<"lua@5.3a">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"module">> => #{ + <<"content-type">> => <<"text/x-lua">>, + <<"body">> => Module + } + } + }). +``` + +### generate_test_opts + +```erlang +generate_test_opts(Defs) -> + #{ + node_processes => Defs, + priv_wallet => ar_wallet:new() + }. +``` + +### lookup_no_spawn_test + +```erlang +lookup_no_spawn_test() -> + Opts = generate_test_opts(), + ?assertEqual( + {error, not_found}, + lookup(<<"name1">>, #{}, #{}, Opts) + ). +``` + +### lookup_spawn_test + +```erlang +lookup_spawn_test() -> + Opts = generate_test_opts(), + Res1 = {_, Process1} = + hb_ao:resolve( + #{ <<"device">> => <<"node-process@1.0">> }, + ?TEST_NAME, + Opts + ), + ?assertMatch( + {ok, #{ <<"device">> := <<"process@1.0">> }}, + Res1 + ), + {ok, Process2} = hb_ao:resolve( + #{ <<"device">> => <<"node-process@1.0">> }, + ?TEST_NAME, + Opts + ), + ?assertEqual( + hb_cache:ensure_all_loaded(Process1, Opts), + hb_cache:ensure_all_loaded(Process2, Opts) + ). +``` + +### lookup_execute_test + +Test that a process can be spawned, executed upon, and its result retrieved. + +```erlang +lookup_execute_test() -> + Opts = generate_test_opts(), + Res1 = + hb_ao:resolve_many( + [ + #{ <<"device">> => <<"node-process@1.0">> }, + ?TEST_NAME, + #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + #{ + <<"path">> => <<"compute">>, + <<"test-key">> => <<"test-value">> + }, + Opts + ) + } + ], + Opts + ), + ?assertMatch( + {ok, #{ <<"slot">> := 1 }}, + Res1 + ), + ?assertMatch( + 42, + hb_ao:get( + << ?TEST_NAME/binary, "/now/results/output/body" >>, + #{ <<"device">> => <<"node-process@1.0">> }, + Opts + ) +``` + +--- + +*Generated from [dev_node_process.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_node_process.erl)* diff --git a/docs/book/src/dev_p4.erl.md b/docs/book/src/dev_p4.erl.md new file mode 100644 index 000000000..6964fac5a --- /dev/null +++ b/docs/book/src/dev_p4.erl.md @@ -0,0 +1,308 @@ +# dev_p4 + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_p4.erl) + +The HyperBEAM core payment ledger. This module allows the operator to +specify another device that can act as a pricing mechanism for transactions +on the node, as well as orchestrating a payment ledger to calculate whether +the node should fulfil services for users. +The device requires the following node message settings in order to function: +- `p4_pricing-device`: The device that will estimate the cost of a request. +- `p4_ledger-device`: The device that will act as a payment ledger. +The pricing device should implement the following keys: +
+            `GET /estimate?type=pre|post&body=[...]&request=RequestMessage`
+            `GET /price?type=pre|post&body=[...]&request=RequestMessage`
+
+The `body` key is used to pass either the request or response messages to the +device. The `type` key is used to specify whether the inquiry is for a request +(pre) or a response (post) object. Requests carry lists of messages that will +be executed, while responses carry the results of the execution. The `price` +key may return `infinity` if the node will not serve a user under any +circumstances. Else, the value returned by the `price` key will be passed to +the ledger device as the `amount` key. +A ledger device should implement the following keys: +
+            `POST /credit?message=PaymentMessage&request=RequestMessage`
+            `POST /charge?amount=PriceMessage&request=RequestMessage`
+            `GET /balance?request=RequestMessage`
+
+The `type` key is optional and defaults to `pre`. If `type` is set to `post`, +the charge must be applied to the ledger, whereas the `pre` type is used to +check whether the charge would succeed before execution. + +--- + +## Exported Functions + +- `balance/3` +- `request/3` +- `response/3` + +--- + +### request + +The HyperBEAM core payment ledger. This module allows the operator to +Estimate the cost of a transaction and decide whether to proceed with + +```erlang +request(State, Raw, NodeMsg) -> + PricingDevice = hb_ao:get(<<"pricing-device">>, State, false, NodeMsg), + LedgerDevice = hb_ao:get(<<"ledger-device">>, State, false, NodeMsg), + Messages = hb_ao:get(<<"body">>, Raw, NodeMsg#{ hashpath => ignore }), + Request = hb_ao:get(<<"request">>, Raw, NodeMsg), + IsChargable = is_chargable_req(Request, NodeMsg), + ?event(payment, + {preprocess_with_devices, + PricingDevice, + LedgerDevice, + {chargable, IsChargable} + } + ), + case {IsChargable, (PricingDevice =/= false) and (LedgerDevice =/= false)} of + {false, _} -> + ?event(payment, non_chargable_route), + {ok, #{ <<"body">> => Messages }}; + {true, false} -> + ?event(payment, {p4_pre_pricing_response, {error, <<"infinity">>}}), + {ok, #{ <<"body">> => Messages }}; + {true, true} -> + PricingMsg = State#{ <<"device">> => PricingDevice }, + LedgerMsg = State#{ <<"device">> => LedgerDevice }, + PricingReq = #{ + <<"path">> => <<"estimate">>, + <<"request">> => Request, + <<"body">> => Messages + }, + ?event({p4_pricing_request, {devmsg, PricingMsg}, {req, PricingReq}}), + case hb_ao:resolve(PricingMsg, PricingReq, NodeMsg) of + {ok, <<"infinity">>} -> + % The device states that under no circumstances should we + % proceed with the request. +``` + +### response + +Postprocess the request after it has been fulfilled. + +```erlang +response(State, RawResponse, NodeMsg) -> + PricingDevice = hb_ao:get(<<"pricing-device">>, State, false, NodeMsg), + LedgerDevice = hb_ao:get(<<"ledger-device">>, State, false, NodeMsg), + Response = + hb_ao:get( + <<"body">>, + RawResponse, + NodeMsg#{ hashpath => ignore } + ), + Request = hb_ao:get(<<"request">>, RawResponse, NodeMsg), + ?event(payment, {post_processing_with_devices, PricingDevice, LedgerDevice}), + ?event({response_hook, {request, Request}, {response, Response}}), + case ((PricingDevice =/= false) and (LedgerDevice =/= false)) andalso + is_chargable_req(Request, NodeMsg) of + false -> + {ok, #{ <<"body">> => Response }}; + true -> + PricingMsg = State#{ <<"device">> => PricingDevice }, + LedgerMsg = State#{ <<"device">> => LedgerDevice }, + PricingReq = #{ + <<"path">> => <<"price">>, + <<"request">> => Request, + <<"body">> => Response + }, + ?event({post_pricing_request, PricingReq}), + PricingRes = + case hb_ao:resolve(PricingMsg, PricingReq, NodeMsg) of + {error, _Error} -> + % The pricing device is unable to give us a cost for + % the request, so we try to estimate it instead. +``` + +### balance + +Get the balance of a user in the ledger. + +```erlang +balance(_, Req, NodeMsg) -> + case dev_hook:find(<<"request">>, NodeMsg) of + [] -> + {error, <<"No request hook found.">>}; + [Handler] -> + LedgerDevice = + hb_ao:get(<<"ledger-device">>, Handler, false, NodeMsg), + LedgerMsg = Handler#{ <<"device">> => LedgerDevice }, + LedgerReq = #{ + <<"path">> => <<"balance">>, + <<"request">> => Req + }, + ?event({ledger_message, {ledger_msg, LedgerMsg}}), + case hb_ao:resolve(LedgerMsg, LedgerReq, NodeMsg) of + {ok, Balance} -> + {ok, Balance}; + {error, Error} -> + {error, Error} + end + end. +``` + +### is_chargable_req + +The node operator may elect to make certain routes non-chargable, using + +```erlang +is_chargable_req(Req, NodeMsg) -> + NonChargableRoutes = + hb_opts:get( + p4_non_chargable_routes, + ?DEFAULT_NON_CHARGABLE_ROUTES, + NodeMsg + ), + Matches = + dev_router:match( + #{ <<"routes">> => NonChargableRoutes }, + Req, + NodeMsg + ), + ?event( + { + is_chargable, + {non_chargable_routes, NonChargableRoutes}, + {req, Req}, + {matches, Matches} + } + ), + case Matches of + {error, no_matching_route} -> true; + _ -> false + end. +``` + +### test_opts + +```erlang +test_opts(Opts) -> + test_opts(Opts, <<"faff@1.0">>). +``` + +### test_opts + +```erlang +test_opts(Opts, PricingDev) -> + test_opts(Opts, PricingDev, <<"faff@1.0">>). +``` + +### test_opts + +```erlang +test_opts(Opts, PricingDev, LedgerDev) -> + ProcessorMsg = + #{ + <<"device">> => <<"p4@1.0">>, + <<"pricing-device">> => PricingDev, + <<"ledger-device">> => LedgerDev + }, + Opts#{ + on => #{ + <<"request">> => ProcessorMsg, + <<"response">> => ProcessorMsg + } + }. +``` + +### faff_test + +Simple test of p4's capabilities with the `faff@1.0` device. + +```erlang +faff_test() -> + GoodWallet = ar_wallet:new(), + BadWallet = ar_wallet:new(), + Node = hb_http_server:start_node( + test_opts( + #{ + faff_allow_list => + [hb_util:human_id(ar_wallet:to_address(GoodWallet))] + } + ) + ), + Req = #{ + <<"path">> => <<"/greeting">>, + <<"greeting">> => <<"Hello, world!">> + }, + GoodSignedReq = hb_message:commit(Req, GoodWallet), + ?event({req, GoodSignedReq}), + BadSignedReq = hb_message:commit(Req, BadWallet), + ?event({req, BadSignedReq}), + {ok, Res} = hb_http:get(Node, GoodSignedReq, #{}), + ?event(payment, {res, Res}), + ?assertEqual(<<"Hello, world!">>, Res), + ?assertMatch({error, _}, hb_http:get(Node, BadSignedReq, #{})). +``` + +### non_chargable_route_test + +Test that a non-chargable route is not charged for. + +```erlang +non_chargable_route_test() -> + Wallet = ar_wallet:new(), + Processor = + #{ + <<"device">> => <<"p4@1.0">>, + <<"ledger-device">> => <<"simple-pay@1.0">>, + <<"pricing-device">> => <<"simple-pay@1.0">> + }, + Node = hb_http_server:start_node( + #{ + p4_non_chargable_routes => + [ + #{ <<"template">> => <<"/~p4@1.0/balance">> }, + #{ <<"template">> => <<"/~meta@1.0/*/*">> } + ], + on => #{ + <<"request">> => Processor, + <<"response">> => Processor + }, + operator => hb:address() + } + ), + Req = #{ + <<"path">> => <<"/~p4@1.0/balance">> + }, + GoodSignedReq = hb_message:commit(Req, Wallet), + Res = hb_http:get(Node, GoodSignedReq, #{}), + ?event({res1, Res}), + ?assertMatch({ok, 0}, Res), + Req2 = #{ <<"path">> => <<"/~meta@1.0/info/operator">> }, + GoodSignedReq2 = hb_message:commit(Req2, Wallet), + Res2 = hb_http:get(Node, GoodSignedReq2, #{}), + ?event({res2, Res2}), + OperatorAddress = hb_util:human_id(hb:address()), + ?assertEqual({ok, OperatorAddress}, Res2), + Req3 = #{ <<"path">> => <<"/~scheduler@1.0">> }, + BadSignedReq3 = hb_message:commit(Req3, Wallet), + Res3 = hb_http:get(Node, BadSignedReq3, #{}), + ?event({res3, Res3}), + ?assertMatch({error, _}, Res3). +``` + +### hyper_token_ledger_test_ + +Ensure that Lua scripts can be used as pricing and ledger devices. Our + +```erlang +hyper_token_ledger_test_() -> + {timeout, 60, fun hyper_token_ledger/0}. +``` + +### hyper_token_ledger + +```erlang +hyper_token_ledger() -> + % Create the wallets necessary and read the files containing the scripts. +``` + +--- + +*Generated from [dev_p4.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_p4.erl)* diff --git a/docs/book/src/dev_patch.erl.md b/docs/book/src/dev_patch.erl.md new file mode 100644 index 000000000..7fc36cc2e --- /dev/null +++ b/docs/book/src/dev_patch.erl.md @@ -0,0 +1,288 @@ +# dev_patch + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_patch.erl) + +A device that can be used to reorganize a message: Moving data from +one path inside it to another. This device's function runs in two modes: +1. When using `all` to move all data at the path given in `from` to the + path given in `to`. +2. When using `patches` to move all submessages in the source to the target, + _if_ they have a `method` key of `PATCH` or a `device` key of `patch@1.0`. +Source and destination paths may be prepended by `base:` or `req:` keys to +indicate that they are relative to either of the message's that the +computation is being performed on. +The search order for finding the source and destination keys is as follows, +where `X` is either `from` or `to`: +1. The `patch-X` key of the execution message. +2. The `X` key of the execution message. +3. The `patch-X` key of the request message. +4. The `X` key of the request message. +Additionally, this device implements the standard computation device keys, +allowing it to be used as an element of an execution stack pipeline, etc. + +--- + +## Exported Functions + +- `all/3` +- `compute/3` +- `init/3` +- `normalize/3` +- `patches/3` +- `snapshot/3` + +--- + +### init + +A device that can be used to reorganize a message: Moving data from +Necessary hooks for compliance with the `execution-device` standard. + +```erlang +init(Msg1, _Msg2, _Opts) -> {ok, Msg1}. +``` + +### normalize + +A device that can be used to reorganize a message: Moving data from +Necessary hooks for compliance with the `execution-device` standard. + +```erlang +normalize(Msg1, _Msg2, _Opts) -> {ok, Msg1}. +``` + +### snapshot + +A device that can be used to reorganize a message: Moving data from +Necessary hooks for compliance with the `execution-device` standard. + +```erlang +snapshot(Msg1, _Msg2, _Opts) -> {ok, Msg1}. +``` + +### compute + +A device that can be used to reorganize a message: Moving data from +Necessary hooks for compliance with the `execution-device` standard. +Get the value found at the `patch-from` key of the message, or the + +```erlang +compute(Msg1, Msg2, Opts) -> patches(Msg1, Msg2, Opts). +``` + +### all + +A device that can be used to reorganize a message: Moving data from +Necessary hooks for compliance with the `execution-device` standard. +Get the value found at the `patch-from` key of the message, or the + +```erlang +all(Msg1, Msg2, Opts) -> + move(all, Msg1, Msg2, Opts). +``` + +### patches + +Find relevant `PATCH` messages in the given source key of the execution + +```erlang +patches(Msg1, Msg2, Opts) -> + move(patches, Msg1, Msg2, Opts). +``` + +### move + +Unified executor for the `all` and `patches` modes. + +```erlang +move(Mode, Msg1, Msg2, Opts) -> + maybe + % Find the input paths. +``` + +### uninitialized_patch_test + +```erlang +uninitialized_patch_test() -> + InitState = #{ + <<"device">> => <<"patch@1.0">>, + <<"results">> => #{ + <<"outbox">> => #{ + <<"1">> => #{ + <<"method">> => <<"PATCH">>, + <<"prices">> => #{ + <<"apple">> => 100, + <<"banana">> => 200 + } + }, + <<"2">> => #{ + <<"method">> => <<"GET">>, + <<"prices">> => #{ + <<"apple">> => 1000 + } + } + } + }, + <<"other-message">> => <<"other-value">>, + <<"patch-to">> => <<"/">>, + <<"patch-from">> => <<"/results/outbox">> + }, + {ok, ResolvedState} = + hb_ao:resolve( + InitState, + <<"compute">>, + #{} + ), + ?event({resolved_state, ResolvedState}), + ?assertEqual( + 100, + hb_ao:get(<<"prices/apple">>, ResolvedState, #{}) + ), + ?assertMatch( + not_found, + hb_ao:get(<<"results/outbox/1">>, ResolvedState, #{}) + ). +``` + +### patch_to_submessage_test + +```erlang +patch_to_submessage_test() -> + InitState = #{ + <<"device">> => <<"patch@1.0">>, + <<"results">> => #{ + <<"outbox">> => #{ + <<"1">> => + hb_message:commit(#{ + <<"method">> => <<"PATCH">>, + <<"prices">> => #{ + <<"apple">> => 100, + <<"banana">> => 200 + } + }, + hb:wallet() + ) + } + }, + <<"state">> => #{ + <<"prices">> => #{ + <<"apple">> => 1000 + } + }, + <<"other-message">> => <<"other-value">>, + <<"patch-to">> => <<"/state">>, + <<"patch-from">> => <<"/results/outbox">> + }, + {ok, ResolvedState} = + hb_ao:resolve( + InitState, + <<"compute">>, + #{} + ), + ?event({resolved_state, ResolvedState}), + ?assertEqual( + 100, + hb_ao:get(<<"state/prices/apple">>, ResolvedState, #{}) + ). +``` + +### all_mode_test + +```erlang +all_mode_test() -> + InitState = #{ + <<"device">> => <<"patch@1.0">>, + <<"input">> => #{ + <<"zones">> => #{ + <<"1">> => #{ + <<"method">> => <<"PATCH">>, + <<"prices">> => #{ + <<"apple">> => 100, + <<"banana">> => 200 + } + }, + <<"2">> => #{ + <<"method">> => <<"GET">>, + <<"prices">> => #{ + <<"orange">> => 300 + } + } + } + }, + <<"state">> => #{ + <<"prices">> => #{ + <<"apple">> => 1000 + } + } + }, + {ok, ResolvedState} = + hb_ao:resolve( + InitState, + #{ + <<"path">> => <<"all">>, + <<"patch-to">> => <<"/state">>, + <<"patch-from">> => <<"/input/zones">> + }, + #{} + ), + ?event({resolved_state, ResolvedState}), + ?assertEqual( + 100, + hb_ao:get(<<"state/1/prices/apple">>, ResolvedState, #{}) + ), + ?assertEqual( + 300, + hb_ao:get(<<"state/2/prices/orange">>, ResolvedState, #{}) + ), + ?assertEqual( + not_found, + hb_ao:get(<<"input/zones">>, ResolvedState, #{}) + ). +``` + +### req_prefix_test + +```erlang +req_prefix_test() -> + BaseMsg = #{ + <<"device">> => <<"patch@1.0">>, + <<"state">> => #{ + <<"prices">> => #{ + <<"apple">> => 1000 + } + } + }, + ReqMsg = #{ + <<"path">> => <<"all">>, + <<"patch-from">> => <<"req:/results/outbox/1">>, + <<"patch-to">> => <<"/state">>, + <<"results">> => #{ + <<"outbox">> => #{ + <<"1">> => #{ + <<"method">> => <<"PATCH">>, + <<"prices">> => #{ + <<"apple">> => 100, + <<"banana">> => 200 + } + } + } + } + }, + {ok, ResolvedState} = hb_ao:resolve(BaseMsg, ReqMsg, #{}), + ?event({resolved_state, ResolvedState}), + ?assertEqual( + 100, + hb_ao:get(<<"state/prices/apple">>, ResolvedState, #{}) + ), + ?assertEqual( + 200, + hb_ao:get(<<"state/prices/banana">>, ResolvedState, #{}) + ), + ?assertEqual( + not_found, + hb_ao:get(<<"results/outbox/1">>, ResolvedState, #{}) +``` + +--- + +*Generated from [dev_patch.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_patch.erl)* diff --git a/docs/book/src/dev_poda.erl.md b/docs/book/src/dev_poda.erl.md new file mode 100644 index 000000000..27df7947a --- /dev/null +++ b/docs/book/src/dev_poda.erl.md @@ -0,0 +1,298 @@ +# dev_poda + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_poda.erl) + +A simple exemplar decentralized proof of authority consensus algorithm +A simple exemplar decentralized proof of authority consensus algorithm +for AO processes. This device is split into two flows, spanning three +actions. +Execution flow: +1. Initialization. +2. Validation of incoming messages before execution. +Commitment flow: +1. Adding commitments to results, either on a CU or MU. + +--- + +## Exported Functions + +- `execute/3` +- `init/2` +- `is_user_signed/1` +- `push/3` + +--- + +### init + +A simple exemplar decentralized proof of authority consensus algorithm + +```erlang +init(S, Params) -> + {ok, S, extract_opts(Params)}. +``` + +### extract_opts + +```erlang +extract_opts(Params) -> + Authorities = + lists:filtermap( + fun({<<"authority">>, Addr}) -> {true, Addr}; + (_) -> false end, + Params + ), + {_, RawQuorum} = lists:keyfind(<<"quorum">>, 1, Params), + Quorum = binary_to_integer(RawQuorum), + ?event({poda_authorities, Authorities}), + #{ + authorities => Authorities, + quorum => Quorum + }. +``` + +### execute + +```erlang +execute(Outer = #tx { data = #{ <<"body">> := Msg } }, S = #{ <<"pass">> := 1 }, Opts) -> + case is_user_signed(Msg) of + true -> + {ok, S}; + false -> + case validate(Msg, Opts) of + true -> + ?event({poda_validated, ok}), + % Add the validations to the VFS. +``` + +### execute + +```erlang +execute(_M, S = #{ <<"pass">> := 3, <<"results">> := _Results }, _Opts) -> + {ok, S}; +``` + +### execute + +```erlang +execute(_M, S, _Opts) -> + {ok, S}. +``` + +### validate + +```erlang +validate(Msg, Opts) -> + validate_stage(1, Msg, Opts). +``` + +### validate_stage + +```erlang +validate_stage(1, Msg, Opts) when is_record(Msg, tx) -> + validate_stage(1, Msg#tx.data, Opts); +``` + +### validate_stage + +```erlang +validate_stage(1, #{ <<"commitments">> := Commitments, <<"body">> := Content }, Opts) -> + validate_stage(2, Commitments, Content, Opts); +``` + +### validate_stage + +```erlang +validate_stage(1, _M, _Opts) -> {false, <<"Required PoDA messages missing">>}. +``` + +### validate_stage + +```erlang +validate_stage(2, #tx { data = Commitments }, Content, Opts) -> + validate_stage(2, Commitments, Content, Opts); +``` + +### validate_stage + +```erlang +validate_stage(2, Commitments, Content, Opts) -> + % Ensure that all commitments are valid and signed by a + % trusted authority. +``` + +### validate_stage + +```erlang +validate_stage(3, Content, Commitments, Opts = #{ <<"quorum">> := Quorum }) -> + Validations = + lists:filter( + fun({_, Comm}) -> validate_commitment(Content, Comm, Opts) end, + hb_maps:to_list(Commitments, Opts) + ), + ?event({poda_validations, length(Validations)}), + case length(Validations) >= Quorum of + true -> + ?event({poda_quorum_reached, length(Validations)}), + true; + false -> {false, <<"Not enough validations">>} + end. +``` + +### validate_commitment + +```erlang +validate_commitment(Msg, Comm, Opts) -> + MsgID = hb_util:encode(ar_bundles:id(Msg, unsigned)), + AttSigner = hb_util:encode(ar_bundles:signer(Comm)), + ?event({poda_commitment, {signer, AttSigner, hb_maps:get(authorities, Opts, undefined, Opts)}, {msg_id, MsgID}}), + ValidSigner = lists:member(AttSigner, hb_maps:get(authorities, Opts, undefined, Opts)), + ValidSignature = ar_bundles:verify_item(Comm), + RelevantMsg = ar_bundles:id(Comm, unsigned) == MsgID orelse + (lists:keyfind(<<"commitment-for">>, 1, Comm#tx.tags) + == {<<"commitment-for">>, MsgID}) orelse + ar_bundles:member(ar_bundles:id(Msg, unsigned), Comm), + case ValidSigner and ValidSignature and RelevantMsg of + false -> + ?event({poda_commitment_invalid, + {commitment, ar_bundles:id(Comm, signed)}, + {signer, AttSigner}, + {valid_signer, ValidSigner}, + {valid_signature, ValidSignature}, + {relevant_msg, RelevantMsg}} + ), + false; + true -> true + end. +``` + +### return_error + +```erlang +return_error(S = #{ <<"wallet">> := Wallet }, Reason) -> + ?event({poda_return_error, Reason}), + ?debug_wait(10000), + {skip, S#{ + results => #{ + <<"/outbox">> => + ar_bundles:sign_item( + #tx{ + data = Reason, + tags = [{<<"error">>, <<"PoDA">>}] + }, + Wallet + ) + } + }}. +``` + +### is_user_signed + +Determines if a user committed + +```erlang +is_user_signed(#tx { data = #{ <<"body">> := Msg } }) -> + ?no_prod(use_real_commitment_detection), + lists:keyfind(<<"from-process">>, 1, Msg#tx.tags) == false; +``` + +### is_user_signed + +Determines if a user committed + +```erlang +is_user_signed(_) -> true. +%%% Commitment flow: Adding commitments to results. +``` + +### push + +Hook used by the MU pathway (currently) to add commitments to an + +```erlang +push(_Item, S = #{ <<"results">> := ResultsMsg }, Opts) -> + NewRes = commit_to_results(ResultsMsg, S, Opts), + {ok, S#{ <<"results">> => NewRes }}. +``` + +### commit_to_results + +Hook used by the MU pathway (currently) to add commitments to an + +```erlang +commit_to_results(Msg, S, Opts) -> + case is_map(Msg#tx.data) of + true -> + % Add commitments to the outbox and spawn items. +``` + +### add_commitments + +```erlang +add_commitments(NewMsg, S = #{ <<"assignment">> := Assignment, <<"store">> := _Store, <<"logger">> := _Logger, <<"wallet">> := Wallet }, Opts) -> + Process = find_process(NewMsg, S), + case is_record(Process, tx) andalso lists:member({<<"device">>, <<"PODA">>}, Process#tx.tags) of + true -> + #{ <<"authorities">> := InitAuthorities, <<"quorum">> := Quorum } = + extract_opts(Process#tx.tags), + ?event({poda_push, InitAuthorities, Quorum}), + % Aggregate validations from other nodes. +``` + +### pfiltermap + +Helper function for parallel execution of commitment + +```erlang +pfiltermap(Pred, List) -> + Parent = self(), + Pids = lists:map(fun(X) -> + spawn_monitor(fun() -> + Result = {X, Pred(X)}, + ?event({pfiltermap, sending_result, self()}), + Parent ! {self(), Result} + end) + end, List), + ?event({pfiltermap, waiting_for_results, Pids}), + [ + Res + || + {true, Res} <- + lists:map(fun({Pid, Ref}) -> + receive + {Pid, {_Item, Result}} -> + ?event({pfiltermap, received_result, Pid}), + Result; + % Handle crashes as filterable events + {'DOWN', Ref, process, Pid, _Reason} -> + ?event({pfiltermap, crashed, Pid}), + false; + Other -> + ?event({pfiltermap, unexpected_message, Other}), + false + end + end, Pids) + ]. +``` + +### find_process + +Find the process that this message is targeting, in order to + +```erlang +find_process(Item, #{ <<"logger">> := _Logger, <<"store">> := Store }) -> + case Item#tx.target of + X when X =/= <<>> -> + ?event({poda_find_process, hb_util:id(Item#tx.target)}), + {ok, Proc} = hb_cache:read(Store, hb_util:id(Item#tx.target)), + Proc; + _ -> + case lists:keyfind(<<"type">>, 1, Item#tx.tags) of + {<<"type">>, <<"process">>} -> Item; + _ -> process_not_specified + end +``` + +--- + +*Generated from [dev_poda.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_poda.erl)* diff --git a/docs/book/src/dev_process.erl.md b/docs/book/src/dev_process.erl.md new file mode 100644 index 000000000..76b2f0283 --- /dev/null +++ b/docs/book/src/dev_process.erl.md @@ -0,0 +1,1221 @@ +# dev_process + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process.erl) + +This module contains the device implementation of AO processes +in AO-Core. The core functionality of the module is in 'routing' requests +for different functionality (scheduling, computing, and pushing messages) +to the appropriate device. This is achieved by swapping out the device +of the process message with the necessary component in order to run the +execution, then swapping it back before returning. Computation is supported +as a stack of devices, customizable by the user, while the scheduling +device is (by default) a single device. +This allows the devices to share state as needed. Additionally, after each +computation step the device caches the result at a path relative to the +process definition itself, such that the process message's ID can act as an +immutable reference to the process's growing list of interactions. See +`dev_process_cache` for details. +The external API of the device is as follows: +
+GET /ID/Schedule:                Returns the messages in the schedule
+POST /ID/Schedule:               Adds a message to the schedule
+GET /ID/Compute/[IDorSlotNum]:   Returns the state of the process after 
+                                 applying a message
+GET /ID/Now:                     Returns the `/Results` key of the latest 
+                                 computed message
+
+An example process definition will look like this: +
+    Device: Process/1.0
+    Scheduler-Device: Scheduler/1.0
+    Execution-Device: Stack/1.0
+    Execution-Stack: "Scheduler/1.0", "Cron/1.0", "WASM/1.0", "PoDA/1.0"
+    Cron-Frequency: 10-Minutes
+    WASM-Image: WASMImageID
+    PoDA:
+        Device: PoDA/1.0
+        Authority: A
+        Authority: B
+        Authority: C
+        Quorum: 2
+
+Runtime options: + Cache-Frequency: The number of assignments that will be computed + before the full (restorable) state should be cached. + Cache-Keys: A list of the keys that should be cached for all + assignments, in addition to `/Results`. + +--- + +## Exported Functions + +- `as_process/2` +- `as/3` +- `compute/3` +- `dev_test_process/0` +- `do_test_restore/0` +- `ensure_process_key/2` +- `info/1` +- `init/0` +- `now/3` +- `process_id/3` +- `push/3` +- `schedule_aos_call/2` +- `schedule_aos_call/3` +- `schedule/3` +- `slot/3` +- `snapshot/3` +- `test_aos_process/0` +- `test_aos_process/1` +- `test_wasm_process/1` + +--- + +### info + +This module contains the device implementation of AO processes +When the info key is called, we should return the process exports. + +```erlang +info(_Msg1) -> + #{ + worker => fun dev_process_worker:server/3, + grouper => fun dev_process_worker:group/3, + await => fun dev_process_worker:await/5, + excludes => [ + <<"test">>, + <<"init">>, + <<"ping_ping_script">>, + <<"schedule_aos_call">>, + <<"test_aos_process">>, + <<"dev_test_process">>, + <<"test_wasm_process">> + ] + }. +``` + +### as + +Return the process state with the device swapped out for the device + +```erlang +as(RawMsg1, Msg2, Opts) -> + {ok, Msg1} = ensure_loaded(RawMsg1, Msg2, Opts), + Key = + hb_ao:get_first( + [ + {{as, <<"message@1.0">>, Msg2}, <<"as">>}, + {{as, <<"message@1.0">>, Msg2}, <<"as-device">>} + ], + <<"execution">>, + Opts + ), + {ok, + hb_util:deep_merge( + ensure_process_key(Msg1, Opts), + #{ + <<"device">> => + hb_maps:get( + << Key/binary, "-device">>, + Msg1, + default_device(Msg1, Key, Opts), + Opts + ), + % Configure input prefix for proper message routing within the + % device + <<"input-prefix">> => + case hb_maps:get(<<"input-prefix">>, Msg1, not_found, Opts) of + not_found -> <<"process">>; + Prefix -> Prefix + end, + % Configure output prefixes for result organization + <<"output-prefixes">> => + hb_maps:get( + <>, + Msg1, + undefined, % Undefined in set will be ignored. +``` + +### default_device + +Returns the default device for a given piece of functionality. Expects + +```erlang +default_device(Msg1, Key, Opts) -> + NormKey = hb_ao:normalize_key(Key), + case {NormKey, hb_util:deep_get(<<"process/variant">>, Msg1, Opts)} of + {<<"execution">>, <<"ao.TN.1">>} -> <<"genesis-wasm@1.0">>; + _ -> default_device_index(NormKey) + end. +``` + +### default_device_index + +```erlang +default_device_index(<<"scheduler">>) -> <<"scheduler@1.0">>; +``` + +### default_device_index + +```erlang +default_device_index(<<"execution">>) -> <<"genesis-wasm@1.0">>; +``` + +### default_device_index + +Wraps functions in the Scheduler device. + +```erlang +default_device_index(<<"push">>) -> <<"push@1.0">>. +``` + +### schedule + +Wraps functions in the Scheduler device. + +```erlang +schedule(Msg1, Msg2, Opts) -> + run_as(<<"scheduler">>, Msg1, Msg2, Opts). +``` + +### slot + +Wraps functions in the Scheduler device. + +```erlang +slot(Msg1, Msg2, Opts) -> + ?event({slot_called, {msg1, Msg1}, {msg2, Msg2}}), + run_as(<<"scheduler">>, Msg1, Msg2, Opts). +``` + +### next + +Wraps functions in the Scheduler device. + +```erlang +next(Msg1, _Msg2, Opts) -> + run_as(<<"scheduler">>, Msg1, next, Opts). +``` + +### snapshot + +Wraps functions in the Scheduler device. + +```erlang +snapshot(RawMsg1, _Msg2, Opts) -> + Msg1 = ensure_process_key(RawMsg1, Opts), + {ok, SnapshotMsg} = run_as( + <<"execution">>, + Msg1, + #{ <<"path">> => <<"snapshot">>, <<"mode">> => <<"Map">> }, + Opts#{ + cache_control => [<<"no-cache">>, <<"no-store">>], + hashpath => ignore + } + ), + ProcID = hb_message:id(Msg1, all, Opts), + Slot = hb_ao:get(<<"at-slot">>, {as, <<"message@1.0">>, Msg1}, Opts), + {ok, + hb_private:set( + SnapshotMsg#{ <<"cache-control">> => [<<"store">>] }, + #{ <<"priv/additional-hashpaths">> => + [ + hb_path:to_binary([ProcID, <<"snapshot">>, Slot]) + ] + }, + Opts + ) + }. +``` + +### process_id + +Returns the process ID of the current process. + +```erlang +process_id(Msg1, Msg2, Opts) -> + case hb_ao:get(<<"process">>, Msg1, Opts#{ hashpath => ignore }) of + not_found -> + process_id(ensure_process_key(Msg1, Opts), Msg2, Opts); + Process -> + hb_message:id( + Process, + hb_util:atom(maps:get(<<"commitments">>, Msg2, <<"all">>)), + Opts + ) + end. +``` + +### init + +Before computation begins, a boot phase is required. This phase + +```erlang +init(Msg1, Msg2, Opts) -> + ?event({init_called, {msg1, Msg1}, {msg2, Msg2}}), + {ok, Initialized} = + run_as(<<"execution">>, Msg1, #{ <<"path">> => init }, Opts), + { + ok, + hb_ao:set( + Initialized, + #{ + <<"initialized">> => <<"true">>, + <<"at-slot">> => -1 + }, + Opts + ) + }. +``` + +### compute + +Compute the result of an assignment applied to the process state. + +```erlang +compute(Msg1, Msg2, Opts) -> + ProcBase = ensure_process_key(Msg1, Opts), + ProcID = process_id(ProcBase, #{}, Opts), + TargetSlot = + hb_ao:get_first( + [ + {{as, <<"message@1.0">>, Msg2}, <<"compute">>}, + {{as, <<"message@1.0">>, Msg2}, <<"slot">>} + ], + Opts + ), + case TargetSlot of + not_found -> + % The slot is not set, so we need to serve the latest known state. +``` + +### compute_to_slot + +Continually get and apply the next assignment from the scheduler until + +```erlang +compute_to_slot(ProcID, Msg1, Msg2, TargetSlot, Opts) -> + CurrentSlot = hb_ao:get(<<"at-slot">>, Msg1, Opts#{ hashpath => ignore }), + ?event(compute_short, + {starting_compute, + {proc_id, ProcID}, + {current, CurrentSlot}, + {target, TargetSlot} + } + ), + case CurrentSlot of + CurrentSlot when CurrentSlot > TargetSlot -> + % The cache should already have the result, so we should never end up + % here. Depending on the type of process, 'rewinding' may require + % re-computing from a significantly earlier checkpoint, so for now + % we throw an error. +``` + +### compute_slot + +Compute a single slot for a process, given an initialized state. + +```erlang +compute_slot(ProcID, State, RawInputMsg, ReqMsg, Opts) -> + % Ensure that the next slot is the slot that we are expecting, just + % in case there is a scheduler device error. +``` + +### store_result + +Store the resulting state in the cache, potentially with the snapshot + +```erlang +store_result(ForceSnapshot, ProcID, Slot, Msg3, Msg2, Opts) -> + % Cache the `Snapshot' key as frequently as the node is configured to. +``` + +### should_snapshot + +Should we snapshot a new full state result? First, we check if the + +```erlang +should_snapshot(Slot, Msg3, Opts) -> + should_snapshot_slots(Slot, Opts) + orelse should_snapshot_time(Msg3, Opts). +``` + +### should_snapshot_slots + +Calculate if we should snapshot based on the number of slots. + +```erlang +should_snapshot_slots(Slot, Opts) -> + case hb_opts:get(process_snapshot_slots, ?DEFAULT_SNAPSHOT_SLOTS, Opts) of + Undef when (Undef == undefined) or (Undef == <<"false">>) -> + false; + RawSnapshotSlots -> + SnapshotSlots = hb_util:int(RawSnapshotSlots), + Slot rem SnapshotSlots == 0 + end. +``` + +### should_snapshot_time + +Calculate if we should snapshot based on the elapsed time since the last + +```erlang +should_snapshot_time(Msg3, Opts) -> + case hb_opts:get(process_snapshot_time, ?DEFAULT_SNAPSHOT_TIME, Opts) of + Undef when (Undef == undefined) or (Undef == <<"false">>) -> + false; + RawSecs -> + Secs = hb_util:int(RawSecs), + case hb_private:get(<<"last-snapshot">>, Msg3, undefined, Opts) of + undefined -> + ?event( + debug_interval, + {no_last_snapshot, + {interval, Secs}, + {msg, Msg3} + } + ), + true; + OldTimestamp -> + ?event( + debug_interval, + {calculating, + {secs, Secs}, + {timestamp, OldTimestamp}, + {now, os:system_time(second)} + } + ), + os:system_time(second) > OldTimestamp + hb_util:int(Secs) + end + end. +``` + +### now + +Returns the known state of the process at either the current slot, or + +```erlang +now(RawMsg1, Msg2, Opts) -> + Msg1 = ensure_process_key(RawMsg1, Opts), + ProcessID = process_id(Msg1, #{}, Opts), + case hb_opts:get(process_now_from_cache, false, Opts) of + false -> + {ok, CurrentSlot} = + hb_ao:resolve( + Msg1, + #{ <<"path">> => <<"slot/current">> }, + Opts + ), + ?event({now_called, {process, ProcessID}, {slot, CurrentSlot}}), + hb_ao:resolve( + Msg1, + #{ <<"path">> => <<"compute">>, <<"slot">> => CurrentSlot }, + Opts + ); + CacheParam -> + % We are serving the latest known state from the cache, rather + % than computing it. +``` + +### push + +Recursively push messages to the scheduler until we find a message +Ensure that the process message we have in memory is live and + +```erlang +push(Msg1, Msg2, Opts) -> + ProcBase = ensure_process_key(Msg1, Opts), + run_as(<<"push">>, ProcBase, Msg2, Opts). +``` + +### ensure_loaded + +Recursively push messages to the scheduler until we find a message +Ensure that the process message we have in memory is live and + +```erlang +ensure_loaded(Msg1, Msg2, Opts) -> + % Get the nonce we are currently on and the inbound nonce. +``` + +### without_snapshot + +Remove the `snapshot` key from a message and return it. +Run a message against Msg1, with the device being swapped out for + +```erlang +without_snapshot(Msg, Opts) -> + hb_maps:remove(<<"snapshot">>, Msg, Opts). +``` + +### run_as + +Remove the `snapshot` key from a message and return it. +Run a message against Msg1, with the device being swapped out for + +```erlang +run_as(Key, Msg1, Path, Opts) when not is_map(Path) -> + run_as(Key, Msg1, #{ <<"path">> => Path }, Opts); +``` + +### run_as + +Remove the `snapshot` key from a message and return it. +Run a message against Msg1, with the device being swapped out for + +```erlang +run_as(Key, Msg1, Msg2, Opts) -> + % Store the original device so we can restore it after execution + BaseDevice = hb_maps:get(<<"device">>, Msg1, not_found, Opts), + ?event({running_as, {key, {explicit, Key}}, {req, Msg2}}), + % Prepare the message with the specialized device configuration. +``` + +### as_process + +Change the message to for that has the device set as this module. + +```erlang +as_process(Msg1, Opts) -> + {ok, Proc} = dev_message:set(Msg1, #{ <<"device">> => <<"process@1.0">> }, Opts), + Proc. +``` + +### ensure_process_key + +Helper function to store a copy of the `process` key in the message. + +```erlang +ensure_process_key(Msg1, Opts) -> + case hb_maps:get(<<"process">>, Msg1, not_found, Opts) of + not_found -> + % If the message has lost its signers, we need to re-read it from + % the cache. This can happen if the message was 'cast' to a different + % device, leading the signers to be unset. +``` + +### init + +```erlang +init() -> + application:ensure_all_started(hb), + ok. +``` + +### test_base_process + +Generate a process message with a random number, and no + +```erlang +test_base_process() -> + test_base_process(#{}). +``` + +### test_base_process + +```erlang +test_base_process(Opts) -> + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + Address = hb_util:human_id(ar_wallet:to_address(Wallet)), + hb_message:commit(#{ + <<"device">> => <<"process@1.0">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"scheduler-location">> => hb_opts:get(scheduler, Address, Opts), + <<"type">> => <<"Process">>, + <<"test-random-seed">> => rand:uniform(1337) + }, Wallet). +``` + +### test_wasm_process + +```erlang +test_wasm_process(WASMImage) -> + test_wasm_process(WASMImage, #{}). +``` + +### test_wasm_process + +```erlang +test_wasm_process(WASMImage, Opts) -> + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + #{ <<"image">> := WASMImageID } = dev_wasm:cache_wasm_image(WASMImage, Opts), + hb_message:commit( + hb_maps:merge( + hb_message:uncommitted(test_base_process(Opts), Opts), + #{ + <<"execution-device">> => <<"stack@1.0">>, + <<"device-stack">> => [<<"wasm-64@1.0">>], + <<"image">> => WASMImageID + }, + Opts + ), + Opts#{ priv_wallet => Wallet} + ). +``` + +### test_aos_process + +Generate a process message with a random number, and the + +```erlang +test_aos_process() -> + test_aos_process(#{}). +``` + +### test_aos_process + +```erlang +test_aos_process(Opts) -> + test_aos_process(Opts, [ + <<"wasi@1.0">>, + <<"json-iface@1.0">>, + <<"wasm-64@1.0">>, + <<"multipass@1.0">> + ]). +``` + +### test_aos_process + +```erlang +test_aos_process(Opts, Stack) -> + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + Address = hb_util:human_id(ar_wallet:to_address(Wallet)), + WASMProc = test_wasm_process(<<"test/aos-2-pure-xs.wasm">>, Opts), + hb_message:commit( + hb_maps:merge( + hb_message:uncommitted(WASMProc, Opts), + #{ + <<"device-stack">> => Stack, + <<"execution-device">> => <<"stack@1.0">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"output-prefix">> => <<"wasm">>, + <<"patch-from">> => <<"/results/outbox">>, + <<"passes">> => 2, + <<"stack-keys">> => + [ + <<"init">>, + <<"compute">>, + <<"snapshot">>, + <<"normalize">> + ], + <<"scheduler">> => + hb_opts:get(scheduler, Address, Opts), + <<"authority">> => + hb_opts:get(authority, Address, Opts) + }, Opts), + Opts#{ priv_wallet => Wallet} + ). +``` + +### dev_test_process + +Generate a device that has a stack of two `dev_test`s for + +```erlang +dev_test_process() -> + Wallet = hb:wallet(), + hb_message:commit( + hb_maps:merge(test_base_process(), #{ + <<"execution-device">> => <<"stack@1.0">>, + <<"device-stack">> => [<<"test-device@1.0">>, <<"test-device@1.0">>] + }, #{}), + Wallet + ). +``` + +### schedule_test_message + +```erlang +schedule_test_message(Msg1, Text, Opts) -> + schedule_test_message(Msg1, Text, #{}, Opts). +``` + +### schedule_test_message + +```erlang +schedule_test_message(Msg1, Text, MsgBase, Opts) -> + Wallet = hb:wallet(), + UncommittedBase = hb_message:uncommitted(MsgBase, Opts), + Msg2 = + hb_message:commit(#{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + UncommittedBase#{ + <<"type">> => <<"Message">>, + <<"test-label">> => Text + }, + Opts#{ priv_wallet => Wallet} + ) + }, + Opts#{ priv_wallet => Wallet} + ), + {ok, _} = hb_ao:resolve(Msg1, Msg2, Opts). +``` + +### schedule_aos_call + +```erlang +schedule_aos_call(Msg1, Code) -> + schedule_aos_call(Msg1, Code, #{}). +``` + +### schedule_aos_call + +```erlang +schedule_aos_call(Msg1, Code, Opts) -> + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + ProcID = hb_message:id(Msg1, all), + Msg2 = + hb_message:commit( + #{ + <<"action">> => <<"Eval">>, + <<"data">> => Code, + <<"target">> => ProcID + }, + Opts#{priv_wallet => Wallet} + ), + schedule_test_message(Msg1, <<"TEST MSG">>, Msg2, Opts). +``` + +### schedule_wasm_call + +```erlang +schedule_wasm_call(Msg1, FuncName, Params) -> + schedule_wasm_call(Msg1, FuncName, Params, #{}). +``` + +### schedule_wasm_call + +```erlang +schedule_wasm_call(Msg1, FuncName, Params, Opts) -> + Wallet = hb:wallet(), + Msg2 = hb_message:commit(#{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + #{ + <<"type">> => <<"Message">>, + <<"function">> => FuncName, + <<"parameters">> => Params + }, + Opts#{ priv_wallet => Wallet} + ) + }, Opts#{ priv_wallet => Wallet}), + ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg2, Opts)). +``` + +### schedule_on_process_test_ + +```erlang +schedule_on_process_test_() -> + {timeout, 30, fun()-> + init(), + Msg1 = test_aos_process(), + schedule_test_message(Msg1, <<"TEST TEXT 1">>, #{}), + schedule_test_message(Msg1, <<"TEST TEXT 2">>, #{}), + ?event(messages_scheduled), + {ok, SchedulerRes} = + hb_ao:resolve(Msg1, #{ + <<"method">> => <<"GET">>, + <<"path">> => <<"schedule">> + }, #{}), + ?assertMatch( + <<"TEST TEXT 1">>, + hb_ao:get(<<"assignments/0/body/test-label">>, SchedulerRes) + ), + ?assertMatch( + <<"TEST TEXT 2">>, + hb_ao:get(<<"assignments/1/body/test-label">>, SchedulerRes) + ) + end}. +``` + +### get_scheduler_slot_test + +```erlang +get_scheduler_slot_test() -> + init(), + Msg1 = test_base_process(), + schedule_test_message(Msg1, <<"TEST TEXT 1">>, #{}), + schedule_test_message(Msg1, <<"TEST TEXT 2">>, #{}), + Msg2 = #{ + <<"path">> => <<"slot">>, + <<"method">> => <<"GET">> + }, + ?assertMatch( + {ok, #{ <<"current">> := CurrentSlot }} when CurrentSlot > 0, + hb_ao:resolve(Msg1, Msg2, #{}) + ). +``` + +### recursive_path_resolution_test + +```erlang +recursive_path_resolution_test() -> + init(), + Msg1 = test_base_process(), + schedule_test_message(Msg1, <<"TEST TEXT 1">>, #{}), + CurrentSlot = + hb_ao:resolve( + Msg1, + #{ <<"path">> => <<"slot/current">> }, + #{ <<"hashpath">> => ignore } + ), + ?event({resolved_current_slot, CurrentSlot}), + ?assertMatch( + CurrentSlot when CurrentSlot > 0, + CurrentSlot + ), + ok. +``` + +### test_device_compute_test + +```erlang +test_device_compute_test() -> + init(), + Msg1 = dev_test_process(), + schedule_test_message(Msg1, <<"TEST TEXT 1">>, #{}), + schedule_test_message(Msg1, <<"TEST TEXT 2">>, #{}), + ?assertMatch( + {ok, <<"TEST TEXT 2">>}, + hb_ao:resolve( + Msg1, + <<"schedule/assignments/1/body/test-label">>, + #{ <<"hashpath">> => ignore } + ) + ), + Msg2 = #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, + {ok, Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), + ?event({computed_message, {msg3, Msg3}}), + ?assertEqual(1, hb_ao:get(<<"results/assignment-slot">>, Msg3, #{})), + ?assertEqual([1,1,0,0], hb_ao:get(<<"already-seen">>, Msg3, #{})). +``` + +### wasm_compute_test + +```erlang +wasm_compute_test() -> + init(), + Msg1 = test_wasm_process(<<"test/test-64.wasm">>), + schedule_wasm_call(Msg1, <<"fac">>, [5.0]), + schedule_wasm_call(Msg1, <<"fac">>, [6.0]), + {ok, Msg3} = + hb_ao:resolve( + Msg1, + #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, + #{ <<"hashpath">> => ignore } + ), + ?event({computed_message, {msg3, Msg3}}), + ?assertEqual([120.0], hb_ao:get(<<"results/output">>, Msg3, #{})), + {ok, Msg4} = + hb_ao:resolve( + Msg1, + #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, + #{ <<"hashpath">> => ignore } + ), + ?event({computed_message, {msg4, Msg4}}), + ?assertEqual([720.0], hb_ao:get(<<"results/output">>, Msg4, #{})). +``` + +### wasm_compute_from_id_test + +```erlang +wasm_compute_from_id_test() -> + init(), + Opts = #{ cache_control => <<"always">> }, + Msg1 = test_wasm_process(<<"test/test-64.wasm">>), + schedule_wasm_call(Msg1, <<"fac">>, [5.0], Opts), + Msg1ID = hb_message:id(Msg1, all), + Msg2 = #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, + {ok, Msg3} = hb_ao:resolve(Msg1ID, Msg2, Opts), + ?event(process_compute, {computed_message, {msg3, Msg3}}), + ?assertEqual([120.0], hb_ao:get(<<"results/output">>, Msg3, Opts)). +``` + +### http_wasm_process_by_id_test + +```erlang +http_wasm_process_by_id_test() -> + rand:seed(default), + SchedWallet = ar_wallet:new(), + Node = hb_http_server:start_node(Opts = #{ + port => 10000 + rand:uniform(10000), + priv_wallet => SchedWallet, + cache_control => <<"always">>, + process_async_cache => false, + store => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-mainnet">> + } + }), + Wallet = ar_wallet:new(), + Proc = test_wasm_process(<<"test/test-64.wasm">>, Opts), + hb_cache:write(Proc, Opts), + ProcID = hb_util:human_id(hb_message:id(Proc, all)), + InitRes = + hb_http:post( + Node, + << "/schedule" >>, + Proc, + #{} + ), + ?event({schedule_proc_res, InitRes}), + ExecMsg = + hb_message:commit(#{ + <<"target">> => ProcID, + <<"type">> => <<"Message">>, + <<"function">> => <<"fac">>, + <<"parameters">> => [5.0] + }, + Wallet + ), + {ok, Msg3} = hb_http:post(Node, << ProcID/binary, "/schedule">>, ExecMsg, #{}), + ?event({schedule_msg_res, {msg3, Msg3}}), + {ok, Msg4} = + hb_http:get( + Node, + #{ + <<"path">> => << ProcID/binary, "/compute">>, + <<"slot">> => 1 + }, + #{} + ), + ?event({compute_msg_res, {msg4, Msg4}}), + ?assertEqual([120.0], hb_ao:get(<<"results/output">>, Msg4, #{})). +``` + +### aos_compute_test_ + +```erlang +aos_compute_test_() -> + {timeout, 30, fun() -> + init(), + Msg1 = test_aos_process(), + schedule_aos_call(Msg1, <<"return 1+1">>), + schedule_aos_call(Msg1, <<"return 2+2">>), + Msg2 = #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, + {ok, Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), + {ok, Res} = hb_ao:resolve(Msg3, <<"results">>, #{}), + ?event({computed_message, {msg3, Res}}), + {ok, Data} = hb_ao:resolve(Res, <<"data">>, #{}), + ?event({computed_data, Data}), + ?assertEqual(<<"2">>, Data), + Msg4 = #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, + {ok, Msg5} = hb_ao:resolve(Msg1, Msg4, #{}), + ?assertEqual(<<"4">>, hb_ao:get(<<"results/data">>, Msg5, #{})), + {ok, Msg5} + end}. +``` + +### aos_browsable_state_test_ + +```erlang +aos_browsable_state_test_() -> + {timeout, 30, fun() -> + init(), + Msg1 = test_aos_process(), + schedule_aos_call(Msg1, + <<"table.insert(ao.outbox.Messages, { target = ao.id, ", + "action = \"State\", ", + "data = { deep = 4, bool = true } })">> + ), + Msg2 = #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, + {ok, Msg3} = + hb_ao:resolve_many( + [Msg1, Msg2, <<"results">>, <<"outbox">>, 1, <<"data">>, <<"deep">>], + #{ cache_control => <<"always">> } + ), + ID = hb_message:id(Msg1), + ?event({computed_message, {id, {explicit, ID}}}), + ?assertEqual(4, Msg3) + end}. +``` + +### aos_state_access_via_http_test_ + +```erlang +aos_state_access_via_http_test_() -> + {timeout, 60, fun() -> + rand:seed(default), + Wallet = ar_wallet:new(), + Node = hb_http_server:start_node(Opts = #{ + port => 10000 + rand:uniform(10000), + priv_wallet => Wallet, + cache_control => <<"always">>, + store => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-mainnet">> + }, + force_signed_requests => true + }), + Proc = test_aos_process(Opts), + ProcID = hb_util:human_id(hb_message:id(Proc, all)), + {ok, _InitRes} = hb_http:post(Node, <<"/schedule">>, Proc, Opts), + Msg2 = hb_message:commit(#{ + <<"data-protocol">> => <<"ao">>, + <<"variant">> => <<"ao.N.1">>, + <<"type">> => <<"Message">>, + <<"action">> => <<"Eval">>, + <<"data">> => + <<"table.insert(ao.outbox.Messages, { target = ao.id,", + " action = \"State\", data = { ", + "[\"content-type\"] = \"text/html\", ", + "[\"body\"] = \"

Hello, world!

\"", + "}})">>, + <<"target">> => ProcID + }, Wallet), + {ok, Msg3} = hb_http:post(Node, << ProcID/binary, "/schedule">>, Msg2, Opts), + ?event({schedule_msg_res, {msg3, Msg3}}), + {ok, Msg4} = + hb_http:get( + Node, + #{ + <<"path">> => << ProcID/binary, "/compute/results/outbox/1/data" >>, + <<"slot">> => 1 + }, + Opts + ), + ?event({compute_msg_res, {msg4, Msg4}}), + ?event( + {try_yourself, + {explicit, + << + Node/binary, + "/", + ProcID/binary, + "/compute&slot=1/results/outbox/1/data" + >> + } + } + ), + ?assertMatch(#{ <<"body">> := <<"

Hello, world!

">> }, Msg4), + ok + end}. +``` + +### aos_state_patch_test_ + +```erlang +aos_state_patch_test_() -> + {timeout, 30, fun() -> + Wallet = hb:wallet(), + init(), + Msg1Raw = test_aos_process(#{}, [ + <<"wasi@1.0">>, + <<"json-iface@1.0">>, + <<"wasm-64@1.0">>, + <<"patch@1.0">>, + <<"multipass@1.0">> + ]), + {ok, Msg1} = hb_message:with_only_committed(Msg1Raw, #{}), + ProcID = hb_message:id(Msg1, all), + Msg2 = (hb_message:commit(#{ + <<"data-protocol">> => <<"ao">>, + <<"variant">> => <<"ao.N.1">>, + <<"target">> => ProcID, + <<"type">> => <<"Message">>, + <<"action">> => <<"Eval">>, + <<"data">> => + << + "table.insert(ao.outbox.Messages, " + "{ method = \"PATCH\", x = \"banana\" })" + >> + }, Wallet))#{ <<"path">> => <<"schedule">>, <<"method">> => <<"POST">> }, + {ok, _} = hb_ao:resolve(Msg1, Msg2, #{}), + Msg3 = #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, + {ok, Msg4} = hb_ao:resolve(Msg1, Msg3, #{}), + ?event({computed_message, {msg3, Msg4}}), + {ok, Data} = hb_ao:resolve(Msg4, <<"x">>, #{}), + ?event({computed_data, Data}), + ?assertEqual(<<"banana">>, Data) + end}. +``` + +### restore_test_ + +Manually test state restoration without using the cache. + +```erlang +restore_test_() -> {timeout, 30, fun do_test_restore/0}. +``` + +### do_test_restore + +Manually test state restoration without using the cache. + +```erlang +do_test_restore() -> + % Init the process and schedule 3 messages: + % 1. Set variables in Lua. +``` + +### now_results_test_ + +```erlang +now_results_test_() -> + {timeout, 30, fun() -> + init(), + Msg1 = test_aos_process(), + schedule_aos_call(Msg1, <<"return 1+1">>), + schedule_aos_call(Msg1, <<"return 2+2">>), + ?assertEqual({ok, <<"4">>}, hb_ao:resolve(Msg1, <<"now/results/data">>, #{})) + end}. +``` + +### prior_results_accessible_test_ + +```erlang +prior_results_accessible_test_() -> + {timeout, 30, fun() -> + init(), + Opts = #{ + process_async_cache => false + }, + Msg1 = test_aos_process(), + schedule_aos_call(Msg1, <<"return 1+1">>), + schedule_aos_call(Msg1, <<"return 2+2">>), + ?assertEqual( + {ok, <<"4">>}, + hb_ao:resolve(Msg1, <<"now/results/data">>, Opts) + ), + {ok, Results} = + hb_ao:resolve( + Msg1, + #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, + Opts + ), + ?assertMatch( + #{ <<"results">> := #{ <<"data">> := <<"4">> } }, + hb_cache:ensure_all_loaded(Results, Opts) + ) + end}. +``` + +### persistent_process_test + +```erlang +persistent_process_test() -> + {timeout, 30, fun() -> + init(), + Msg1 = test_aos_process(), + schedule_aos_call(Msg1, <<"X=1">>), + schedule_aos_call(Msg1, <<"return 2">>), + schedule_aos_call(Msg1, <<"return X">>), + T0 = hb:now(), + FirstSlotMsg2 = #{ + <<"path">> => <<"compute">>, + <<"slot">> => 0 + }, + ?assertMatch( + {ok, _}, + hb_ao:resolve(Msg1, FirstSlotMsg2, #{ spawn_worker => true }) + ), + T1 = hb:now(), + ThirdSlotMsg2 = #{ + <<"path">> => <<"compute">>, + <<"slot">> => 2 + }, + Res = hb_ao:resolve(Msg1, ThirdSlotMsg2, #{}), + ?event({computed_message, {msg3, Res}}), + ?assertMatch( + {ok, _}, + Res + ), + T2 = hb:now(), + ?event(benchmark, {runtimes, {first_run, T1 - T0}, {second_run, T2 - T1}}), + % The second resolve should be much faster than the first resolve, as the + % process is already running. +``` + +### simple_wasm_persistent_worker_benchmark_test + +```erlang +simple_wasm_persistent_worker_benchmark_test() -> + init(), + BenchTime = 1, + Msg1 = test_wasm_process(<<"test/test-64.wasm">>), + schedule_wasm_call(Msg1, <<"fac">>, [5.0]), + schedule_wasm_call(Msg1, <<"fac">>, [6.0]), + {ok, Initialized} = + hb_ao:resolve( + Msg1, + #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, + #{ spawn_worker => true, process_workers => true } + ), + Iterations = hb_test_utils:benchmark( + fun(Iteration) -> + schedule_wasm_call( + Initialized, + <<"fac">>, + [5.0] + ), + ?assertMatch( + {ok, _}, + hb_ao:resolve( + Initialized, + #{ <<"path">> => <<"compute">>, <<"slot">> => Iteration + 1 }, + #{} + ) + ) + end, + BenchTime + ), + ?event(benchmark, {scheduled, Iterations}), + hb_format:eunit_print( + "Scheduled and evaluated ~p simple wasm process messages in ~p s (~s msg/s)", + [Iterations, BenchTime, hb_util:human_int(Iterations / BenchTime)] + ), + ?assert(Iterations >= 2), + ok. +``` + +### aos_persistent_worker_benchmark_test_ + +```erlang +aos_persistent_worker_benchmark_test_() -> + {timeout, 30, fun() -> + BenchTime = 5, + init(), + Msg1 = test_aos_process(), + schedule_aos_call(Msg1, <<"X=1337">>), + FirstSlotMsg2 = #{ + <<"path">> => <<"compute">>, + <<"slot">> => 0 + }, + ?assertMatch( + {ok, _}, + hb_ao:resolve(Msg1, FirstSlotMsg2, #{ spawn_worker => true }) + ), + Iterations = hb_test_utils:benchmark( + fun(Iteration) -> + schedule_aos_call( + Msg1, + <<"return X + ", (integer_to_binary(Iteration))/binary>> + ), + ?assertMatch( + {ok, _}, + hb_ao:resolve( + Msg1, + #{ <<"path">> => <<"compute">>, <<"slot">> => Iteration }, + #{} + ) + ) + end, + BenchTime + ), + ?event(benchmark, {scheduled, Iterations}), + hb_format:eunit_print( + "Scheduled and evaluated ~p AOS process messages in ~p s (~s msg/s)", + [Iterations, BenchTime, hb_util:human_int(Iterations / BenchTime)] + ), + ?assert(Iterations >= 2), + ok +``` + +--- + +*Generated from [dev_process.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process.erl)* diff --git a/docs/book/src/dev_process_cache.erl.md b/docs/book/src/dev_process_cache.erl.md new file mode 100644 index 000000000..5b7170958 --- /dev/null +++ b/docs/book/src/dev_process_cache.erl.md @@ -0,0 +1,204 @@ +# dev_process_cache + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process_cache.erl) + +A wrapper around the hb_cache module that provides a more +convenient interface for reading the result of a process at a given slot or +message ID. + +--- + +## Exported Functions + +- `latest/2` +- `latest/3` +- `latest/4` +- `read/2` +- `read/3` +- `write/4` + +--- + +### read + +A wrapper around the hb_cache module that provides a more +Read the result of a process at a given slot. + +```erlang +read(ProcID, Opts) -> + hb_util:ok(latest(ProcID, Opts)). +``` + +### read + +```erlang +read(ProcID, SlotRef, Opts) -> + ?event({reading_computed_result, ProcID, SlotRef}), + Path = path(ProcID, SlotRef, Opts), + hb_cache:read(Path, Opts). +``` + +### write + +Write a process computation result to the cache. + +```erlang +write(ProcID, Slot, Msg, Opts) -> + % Write the item to the cache in the root of the store. +``` + +### path + +Calculate the path of a result, given a process ID and a slot. + +```erlang +path(ProcID, Ref, Opts) -> + path(ProcID, Ref, [], Opts). +``` + +### path + +```erlang +path(ProcID, Ref, PathSuffix, Opts) -> + Store = hb_opts:get(store, no_viable_store, Opts), + hb_store:path( + Store, + [ + <<"computed">>, + hb_util:human_id(ProcID) + ] ++ + case Ref of + Int when is_integer(Int) -> ["slot", integer_to_binary(Int)]; + root -> []; + slot_root -> ["slot"]; + _ -> [Ref] + end ++ PathSuffix + ). +``` + +### latest + +Retrieve the latest slot for a given process. Optionally state a limit + +```erlang +latest(ProcID, Opts) -> latest(ProcID, [], Opts). +``` + +### latest + +Retrieve the latest slot for a given process. Optionally state a limit + +```erlang +latest(ProcID, RequiredPath, Opts) -> + latest(ProcID, RequiredPath, undefined, Opts). +``` + +### latest + +```erlang +latest(ProcID, RawRequiredPath, Limit, Opts) -> + ?event( + {latest_called, + {proc_id, ProcID}, + {required_path, RawRequiredPath}, + {limit, Limit} + } + ), + % Convert the required path to a list of _binary_ keys. +``` + +### first_with_path + +Find the latest assignment with the requested path suffix. + +```erlang +first_with_path(ProcID, RequiredPath, Slots, Opts) -> + first_with_path( + ProcID, + RequiredPath, + Slots, + Opts, + hb_opts:get(store, no_viable_store, Opts) + ). +``` + +### first_with_path + +```erlang +first_with_path(_ProcID, _Required, [], _Opts, _Store) -> + not_found; +``` + +### first_with_path + +```erlang +first_with_path(ProcID, RequiredPath, [Slot | Rest], Opts, Store) -> + RawPath = path(ProcID, Slot, RequiredPath, Opts), + ResolvedPath = hb_store:resolve(Store, RawPath), + ?event({trying_slot, {slot, Slot}, {path, RawPath}, {resolved_path, ResolvedPath}}), + case hb_store:type(Store, ResolvedPath) of + not_found -> + first_with_path(ProcID, RequiredPath, Rest, Opts, Store); + _ -> + Slot + end. +``` + +### process_cache_suite_test_ + +```erlang +process_cache_suite_test_() -> + hb_store:generate_test_suite( + [ + {"write and read process outputs", fun test_write_and_read_output/1}, + {"find latest output (with path)", fun find_latest_outputs/1} + ], + [ + {Name, Opts} + || + {Name, Opts} <- hb_store:test_stores() + ] + ). +``` + +### test_write_and_read_output + +Test for writing multiple computed outputs, then getting them by + +```erlang +test_write_and_read_output(Opts) -> + Proc = hb_cache:test_signed( + #{ <<"test-item">> => hb_cache:test_unsigned(<<"test-body-data">>) }), + ProcID = hb_util:human_id(hb_ao:get(id, Proc)), + Item1 = hb_cache:test_signed(<<"Simple signed output #1">>), + Item2 = hb_cache:test_unsigned(<<"Simple unsigned output #2">>), + {ok, Path0} = write(ProcID, 0, Item1, Opts), + {ok, Path1} = write(ProcID, 1, Item2, Opts), + {ok, DirectReadItem1} = hb_cache:read(Path0, Opts), + ?assert(hb_message:match(Item1, DirectReadItem1)), + {ok, DirectReadItem2} = hb_cache:read(Path1, Opts), + ?assert(hb_message:match(Item2, DirectReadItem2)), + {ok, ReadItem1BySlotNum} = read(ProcID, 0, Opts), + ?assert(hb_message:match(Item1, ReadItem1BySlotNum)), + {ok, ReadItem2BySlotNum} = read(ProcID, 1, Opts), + ?assert(hb_message:match(Item2, ReadItem2BySlotNum)), + {ok, ReadItem1ByID} = + read(ProcID, hb_util:human_id(hb_ao:get(id, Item1)), Opts), + ?assert(hb_message:match(Item1, ReadItem1ByID)), + {ok, ReadItem2ByID} = + read(ProcID, hb_util:human_id(hb_message:id(Item2, all)), Opts), + ?assert(hb_message:match(Item2, ReadItem2ByID)). +``` + +### find_latest_outputs + +Test for retrieving the latest computed output for a process. + +```erlang +find_latest_outputs(Opts) -> + % Create test environment. +``` + +--- + +*Generated from [dev_process_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process_cache.erl)* diff --git a/docs/book/src/dev_process_worker.erl.md b/docs/book/src/dev_process_worker.erl.md new file mode 100644 index 000000000..b02c5448f --- /dev/null +++ b/docs/book/src/dev_process_worker.erl.md @@ -0,0 +1,213 @@ +# dev_process_worker + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process_worker.erl) + +A long-lived process worker that keeps state in memory between +calls. Implements the interface of `hb_ao` to receive and respond +to computation requests regarding a process as a singleton. + +--- + +## Exported Functions + +- `await/5` +- `group/3` +- `notify_compute/4` +- `server/3` +- `stop/1` + +--- + +### group + +A long-lived process worker that keeps state in memory between +Returns a group name for a request. The worker is responsible for all + +```erlang +group(Msg1, undefined, Opts) -> + hb_persistent:default_grouper(Msg1, undefined, Opts); +``` + +### group + +A long-lived process worker that keeps state in memory between +Returns a group name for a request. The worker is responsible for all + +```erlang +group(Msg1, Msg2, Opts) -> + case hb_opts:get(process_workers, false, Opts) of + false -> + hb_persistent:default_grouper(Msg1, Msg2, Opts); + true -> + case Msg2 of + undefined -> + hb_persistent:default_grouper(Msg1, undefined, Opts); + _ -> + case hb_path:matches(<<"compute">>, hb_path:hd(Msg2, Opts)) of + true -> + process_to_group_name(Msg1, Opts); + _ -> + hb_persistent:default_grouper(Msg1, Msg2, Opts) + end + end + end. +``` + +### process_to_group_name + +```erlang +process_to_group_name(Msg1, Opts) -> + Initialized = dev_process:ensure_process_key(Msg1, Opts), + ProcMsg = hb_ao:get(<<"process">>, Initialized, Opts#{ hashpath => ignore }), + ID = hb_message:id(ProcMsg, all), + ?event({process_to_group_name, {id, ID}, {msg1, Msg1}}), + hb_util:human_id(ID). +``` + +### server + +Spawn a new worker process. This is called after the end of the first + +```erlang +server(GroupName, Msg1, Opts) -> + ServerOpts = Opts#{ + await_inprogress => false, + spawn_worker => false, + process_workers => false + }, + % The maximum amount of time the worker will wait for a request before + % checking the cache for a snapshot. Default: 5 minutes. +``` + +### await + +Await a resolution from a worker executing the `process@1.0` device. + +```erlang +await(Worker, GroupName, Msg1, Msg2, Opts) -> + case hb_path:matches(<<"compute">>, hb_path:hd(Msg2, Opts)) of + false -> + hb_persistent:default_await(Worker, GroupName, Msg1, Msg2, Opts); + true -> + TargetSlot = hb_ao:get(<<"slot">>, Msg2, any, Opts), + ?event({awaiting_compute, + {worker, Worker}, + {group, GroupName}, + {target_slot, TargetSlot} + }), + receive + {resolved, _, GroupName, {slot, RecvdSlot}, Res} + when RecvdSlot == TargetSlot orelse TargetSlot == any -> + ?event(compute_debug, {notified_of_resolution, + {target, TargetSlot}, + {group, GroupName} + }), + Res; + {resolved, _, GroupName, {slot, RecvdSlot}, _Res} -> + ?event(compute_debug, {waiting_again, + {target, TargetSlot}, + {recvd, RecvdSlot}, + {worker, Worker}, + {group, GroupName} + }), + await(Worker, GroupName, Msg1, Msg2, Opts); + {'DOWN', _R, process, Worker, _Reason} -> + ?event(compute_debug, + {leader_died, + {group, GroupName}, + {leader, Worker}, + {target, TargetSlot} + } + ), + {error, leader_died} + end + end. +``` + +### notify_compute + +Notify any waiters for a specific slot of the computed results. + +```erlang +notify_compute(GroupName, SlotToNotify, Msg3, Opts) -> + notify_compute(GroupName, SlotToNotify, Msg3, Opts, 0). +``` + +### notify_compute + +```erlang +notify_compute(GroupName, SlotToNotify, Msg3, Opts, Count) -> + ?event({notifying_of_computed_slot, {group, GroupName}, {slot, SlotToNotify}}), + receive + {resolve, Listener, GroupName, #{ <<"slot">> := SlotToNotify }, _ListenerOpts} -> + send_notification(Listener, GroupName, SlotToNotify, Msg3), + notify_compute(GroupName, SlotToNotify, Msg3, Opts, Count + 1); + {resolve, Listener, GroupName, Msg, _ListenerOpts} + when is_map(Msg) andalso not is_map_key(<<"slot">>, Msg) -> + send_notification(Listener, GroupName, SlotToNotify, Msg3), + notify_compute(GroupName, SlotToNotify, Msg3, Opts, Count + 1) + after 0 -> + ?event(worker_short, + {finished_notifying, + {group, GroupName}, + {slot, SlotToNotify}, + {listeners, Count} + } + ) + end. +``` + +### send_notification + +```erlang +send_notification(Listener, GroupName, SlotToNotify, Msg3) -> + ?event({sending_notification, {group, GroupName}, {slot, SlotToNotify}}), + Listener ! {resolved, self(), GroupName, {slot, SlotToNotify}, Msg3}. +``` + +### stop + +Stop a worker process. + +```erlang +stop(Worker) -> + exit(Worker, normal). +``` + +### test_init + +```erlang +test_init() -> + application:ensure_all_started(hb), + ok. +``` + +### info_test + +```erlang +info_test() -> + test_init(), + M1 = dev_process:test_wasm_process(<<"test/aos-2-pure-xs.wasm">>), + Res = hb_ao:info(M1, #{}), + ?assertEqual(fun dev_process_worker:group/3, hb_maps:get(grouper, Res, undefined, #{})). +``` + +### grouper_test + +```erlang +grouper_test() -> + test_init(), + M1 = dev_process:test_aos_process(), + M2 = #{ <<"path">> => <<"compute">>, <<"v">> => 1 }, + M3 = #{ <<"path">> => <<"compute">>, <<"v">> => 2 }, + M4 = #{ <<"path">> => <<"not-compute">>, <<"v">> => 3 }, + G1 = hb_persistent:group(M1, M2, #{ process_workers => true }), + G2 = hb_persistent:group(M1, M3, #{ process_workers => true }), + G3 = hb_persistent:group(M1, M4, #{ process_workers => true }), + ?event({group_samples, {g1, G1}, {g2, G2}, {g3, G3}}), + ?assertEqual(G1, G2), +``` + +--- + +*Generated from [dev_process_worker.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process_worker.erl)* diff --git a/docs/book/src/dev_profile.erl.md b/docs/book/src/dev_profile.erl.md new file mode 100644 index 000000000..bffb37a8f --- /dev/null +++ b/docs/book/src/dev_profile.erl.md @@ -0,0 +1,414 @@ +# dev_profile + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_profile.erl) + +A module for running different profiling tools upon HyperBEAM executions. +This device allows a variety of profiling tools to be used and for their +outputs to be returned as messages, or displayed locally on the console. +When called from an AO-Core request, the path at the given key is resolved. +If the `eval` function is instead directly invoked via Erlang, the first +argument may be a function to profile instead. + +--- + +## Exported Functions + +- `eval/1` +- `eval/2` +- `eval/3` +- `eval/4` +- `info/1` + +--- + +### info + +A module for running different profiling tools upon HyperBEAM executions. +Default to the `eval` function. + +```erlang +info(_) -> + #{ + excludes => [<<"keys">>, <<"set">>], + default => fun eval/4 + }. +``` + +### eval + +Invoke a profiling tool on a function or an AO-Core resolution. If a + +```erlang +eval(Fun) -> eval(Fun, #{}). +``` + +### eval + +Invoke a profiling tool on a function or an AO-Core resolution. If a + +```erlang +eval(Fun, Opts) -> eval(Fun, #{}, Opts). +``` + +### eval + +Invoke a profiling tool on a function or an AO-Core resolution. If a + +```erlang +eval(Fun, Req, Opts) when is_function(Fun) -> + do_eval( + Fun, + case return_mode(Req, Opts, undefined) of + undefined -> Req#{ <<"return-mode">> => <<"open">> }; + _ -> Req + end, + Opts + ); +``` + +### eval + +Invoke a profiling tool on a function or an AO-Core resolution. If a + +```erlang +eval(Base, Request, Opts) -> + eval(<<"eval">>, Base, Request, Opts). +``` + +### eval + +Invoke a profiling tool on a function or an AO-Core resolution. If a + +```erlang +eval(PathKey, Base, Req, Opts) when not is_function(Base) -> + case hb_ao:get(PathKey, Req, undefined, Opts) of + undefined -> + { + error, + << + "Path key `", + (hb_util:bin(PathKey))/binary, + "` not found in request." + >> + }; + Path -> + do_eval( + fun() -> hb_ao:resolve(Req#{ <<"path">> => Path }, Opts) end, + Req, + Opts + ) + end. +``` + +### do_eval + +```erlang +do_eval(Fun, Req, Opts) -> + % Validate the request and options, then invoke the engine-specific profile + % function. We match the user-requested engine against the supported engines + % on the node. Each engine takes three arguments: + % 1. The function to profile. +``` + +### find_profiling_config + +Find the profiling options. The supported options for `profiling` in the + +```erlang +find_profiling_config(Opts) -> + case hb_opts:get(profiling, not_found, Opts) of + not_found -> + case hb_opts:get(mode, prod, Opts) of + prod -> false; + _ -> hb_features:test() + end; + EnableProfiling -> EnableProfiling + end. +``` + +### validate_enabled + +Validate that profiling is enabled. + +```erlang +validate_enabled(Opts) -> + case find_profiling_config(Opts) of + false -> {validation_error, disabled}; + _ -> true + end. +``` + +### validate_return_mode + +Validate that the request return mode is acceptable. We only allow the + +```erlang +validate_return_mode(Req, Opts) -> + case return_mode(Req, Opts) of + <<"open">> -> hb_opts:get(mode, prod, Opts) == debug; + _ -> true + end. +``` + +### validate_signer + +Validate that the request is from a valid signer, if set by the node + +```erlang +validate_signer(Req, Opts) -> + case find_profiling_config(Opts) of + ValidSigners when is_list(ValidSigners) -> + lists:any( + fun(Signer) -> lists:member(Signer, ValidSigners) end, + hb_message:signers(Req, Opts) + ); + EnableProfiling -> EnableProfiling + end orelse {validation_error, invalid_signer}. +``` + +### engine + +Return the profiling function for the given engine. + +```erlang +engine(<<"eflame">>) -> {ok, fun eflame_profile/3}; +``` + +### engine + +Return the profiling function for the given engine. + +```erlang +engine(<<"eprof">>) -> {ok, fun eprof_profile/3}; +``` + +### engine + +Return the profiling function for the given engine. + +```erlang +engine(<<"event">>) -> {ok, fun event_profile/3}; +``` + +### engine + +Return the profiling function for the given engine. + +```erlang +engine(default) -> {ok, default()}; +``` + +### engine + +Return the profiling function for the given engine. +Return the default profiling engine to use. `eflame` if preferred if + +```erlang +engine(Unknown) -> {unknown_engine, Unknown}. +``` + +### default + +Return the profiling function for the given engine. +Return the default profiling engine to use. `eflame` if preferred if + +```erlang +default() -> + case hb_features:eflame() of + true -> fun eflame_profile/3; + false -> fun eprof_profile/3 + end. +``` + +### eflame_profile + +Profile a function using the `eflame` tool. This tool is only available + +```erlang +eflame_profile(Fun, Req, Opts) -> + File = temp_file(), + Res = eflame:apply(normal, File, Fun, []), + MergeStacks = hb_maps:get(<<"mode">>, Req, <<"merge">>, Opts), + EflameDir = code:lib_dir(eflame), + % Get the name of the function to profile. If the path in the request is + % set, attempt to find it. If that is not found, we use the bare path. +``` + +### eflame_profile + +```erlang +eflame_profile(_Fun, _Req, _Opts) -> + {error, <<"eflame is not enabled.">>}. +-endif. +``` + +### eprof_profile + +Profile a function using the `eprof` tool. + +```erlang +eprof_profile(Fun, Req, Opts) -> + File = temp_file(), + % Attempt to profile the function, stopping the profiler afterwards. +``` + +### event_profile + +Profile using HyperBEAM's events. + +```erlang +event_profile(Fun, Req, Opts) -> + Start = hb_event:counters(), + Fun(), + End = hb_event:counters(), + Diff = hb_message:diff(Start, End, Opts), + case return_mode(Req, Opts) of + <<"message">> -> + {ok, Diff}; + <<"console">> -> + hb_format:print(Diff), + {ok, Diff} + end. +``` + +### return_mode + +Get the return mode of a profiler run. The run mode is set to `console` + +```erlang +return_mode(Req, Opts) -> + return_mode(Req, Opts, <<"message">>). +``` + +### return_mode + +Get the return mode of a profiler run. The run mode is set to `console` +Returns a temporary filename for use in a profiling run. + +```erlang +return_mode(Req, Opts, Default) -> + hb_ao:get(<<"return-mode">>, Req, Default, Opts). +``` + +### temp_file + +Get the return mode of a profiler run. The run mode is set to `console` +Returns a temporary filename for use in a profiling run. + +```erlang +temp_file() -> temp_file(<<"out">>). +``` + +### temp_file + +Get the return mode of a profiler run. The run mode is set to `console` +Returns a temporary filename for use in a profiling run. + +```erlang +temp_file(Ext) -> + << + "profile-", + (integer_to_binary(os:system_time(microsecond)))/binary, + ".", + Ext/binary + >>. +``` + +### eprof_fun_test + +```erlang +eprof_fun_test() -> test_engine(function, <<"eprof">>). +``` + +### eprof_resolution_test + +```erlang +eprof_resolution_test() -> test_engine(resolution, <<"eprof">>). +-ifdef(ENABLE_EFLAME). +``` + +### eflame_fun_test + +```erlang +eflame_fun_test() -> test_engine(function, <<"eflame">>). +``` + +### eflame_resolution_test + +```erlang +eflame_resolution_test() -> test_engine(resolution, <<"eflame">>). +-endif. +``` + +### test_engine + +Run a test and validate the output for a given engine. + +```erlang +test_engine(Type, Engine) -> + validate_profiler_output(Engine, test_profiler_exec(Type, Engine)). +``` + +### test_profiler_exec + +Invoke an engine in either a function (as called from Erlang) or + +```erlang +test_profiler_exec(function, Engine) -> + eval( + fun() -> dev_meta:build(#{}, #{}, #{}) end, + #{ <<"engine">> => Engine, <<"return-mode">> => <<"message">> }, + #{} + ); +``` + +### test_profiler_exec + +Invoke an engine in either a function (as called from Erlang) or + +```erlang +test_profiler_exec(resolution, Engine) -> + hb_ao:resolve( + #{ + <<"path">> => <<"/~profile@1.0/run?run=/~meta@1.0/build">>, + <<"engine">> => Engine, <<"return-mode">> => <<"message">> }, + #{} + ). +``` + +### validate_profiler_output + +Verify the expected type of output from a profiler. + +```erlang +validate_profiler_output(<<"eprof">>, Res) -> + ?assertMatch( + {ok, + #{ + <<"content-type">> := <<"text/plain">>, + <<"body">> := Body + } + } when byte_size(Body) > 100, + Res + ); +``` + +### validate_profiler_output + +Verify the expected type of output from a profiler. + +```erlang +validate_profiler_output(<<"eflame">>, Res) -> + ?assertMatch( + {ok, + #{ + <<"content-type">> := <<"image/svg+xml">>, + <<"body">> := Body + } + } when byte_size(Body) > 100, + Res +``` + +--- + +*Generated from [dev_profile.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_profile.erl)* diff --git a/docs/book/src/dev_push.erl.md b/docs/book/src/dev_push.erl.md new file mode 100644 index 000000000..d2a4bbd4d --- /dev/null +++ b/docs/book/src/dev_push.erl.md @@ -0,0 +1,911 @@ +# dev_push + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_push.erl) + +`push@1.0` takes a message or slot number, evaluates it, and recursively +pushes the resulting messages to other processes. The `push`ing mechanism +continues until the there are no remaining messages to push. + +--- + +## Exported Functions + +- `push/3` + +--- + +### push + +`push@1.0` takes a message or slot number, evaluates it, and recursively +Push either a message or an assigned slot number. If a `Process` is + +```erlang +push(Base, Req, Opts) -> + Process = dev_process:as_process(Base, Opts), + ?event(push, {push_base, {base, Process}, {req, Req}}, Opts), + case hb_ao:get(<<"slot">>, {as, <<"message@1.0">>, Req}, no_slot, Opts) of + no_slot -> + case schedule_initial_message(Process, Req, Opts) of + {ok, Assignment} -> + case find_type(hb_ao:get(<<"body">>, Assignment, Opts), Opts) of + <<"Process">> -> + ?event(push, + {initializing_process, + {base, Process}, + {assignment, Assignment}}, + Opts + ), + {ok, Assignment}; + _ -> + ?event(push, + {pushing_message, + {base, Process}, + {assignment, Assignment} + }, + Opts + ), + push_with_mode(Process, Assignment, Opts) + end; + {error, Res} -> {error, Res} + end; + _ -> push_with_mode(Process, Req, Opts) + end. +``` + +### push_with_mode + +```erlang +push_with_mode(Process, Req, Opts) -> + Mode = is_async(Process, Req, Opts), + case Mode of + <<"sync">> -> + do_push(Process, Req, Opts); + <<"async">> -> + spawn(fun() -> do_push(Process, Req, Opts) end) + end. +``` + +### is_async + +Determine if the push is asynchronous. + +```erlang +is_async(Process, Req, Opts) -> + hb_ao:get_first( + [ + {Req, <<"push-mode">>}, + {Process, <<"push-mode">>}, + {Process, <<"process/push-mode">>} + ], + <<"sync">>, + Opts + ). +``` + +### do_push + +Push a message or slot number, including its downstream results. + +```erlang +do_push(PrimaryProcess, Assignment, Opts) -> + Slot = hb_ao:get(<<"slot">>, Assignment, Opts), + ID = dev_process:process_id(PrimaryProcess, #{}, Opts), + UncommittedID = + dev_process:process_id( + PrimaryProcess, + #{ <<"commitments">> => <<"none">> }, + Opts + ), + BaseID = calculate_base_id(PrimaryProcess, Opts), + ?event(debug, + {push_computing_outbox, + {process_id, ID}, + {base_id, BaseID}, + {slot, Slot} + } + ), + ?event(push, {push_computing_outbox, {process_id, ID}, {slot, Slot}}), + {Status, Result} = hb_ao:resolve( + {as, <<"process@1.0">>, PrimaryProcess}, + #{ <<"path">> => <<"compute/results">>, <<"slot">> => Slot }, + Opts#{ hashpath => ignore } + ), + % Determine if we should include the full compute result in our response. +``` + +### maybe_evaluate_message + +If the outbox message has a path we interpret it as a request to perform + +```erlang +maybe_evaluate_message(Message, Opts) -> + case hb_ao:get(<<"resolve">>, Message, Opts) of + not_found -> + {ok, Message}; + ResolvePath -> + ReqMsg = + maps:without( + [<<"target">>], + Message + ), + ResolveOpts = Opts#{ force_message => true }, + case hb_ao:resolve(ReqMsg#{ <<"path">> => ResolvePath }, ResolveOpts) of + {ok, EvalRes} -> + { + ok, + EvalRes#{ + <<"target">> => + hb_ao:get( + <<"target">>, + Message, + Opts + ) + } + }; + Err -> Err + end + end. +``` + +### push_result_message + +Push a downstream message result. The `Origin` map contains information + +```erlang +push_result_message(TargetProcess, MsgToPush, Origin, Opts) -> + NormMsgToPush = hb_util:lower_case_key_map(MsgToPush, Opts), + case hb_ao:get(<<"target">>, NormMsgToPush, undefined, Opts) of + undefined -> + ?event(push, + {skip_no_target, {msg, MsgToPush}, {origin, Origin}}, + Opts + ), + #{}; + TargetID -> + ?event(push, + {pushing_child, + {target, TargetID}, + {msg, MsgToPush}, + {origin, Origin} + }, + Opts + ), + case schedule_result(TargetProcess, MsgToPush, Origin, Opts) of + {ok, Assignment} -> + % Analyze the result of the message push. +``` + +### normalize_message + +Augment the message with from-* keys, if it doesn't already have them. + +```erlang +normalize_message(MsgToPush, Opts) -> + hb_ao:set( + MsgToPush, + #{ + <<"target">> => target_process(MsgToPush, Opts) + }, + Opts#{ hashpath => ignore } + ). +``` + +### target_process + +Find the target process ID for a message to push. + +```erlang +target_process(MsgToPush, Opts) -> + case hb_ao:get(<<"target">>, MsgToPush, Opts) of + not_found -> undefined; + RawTarget -> extract(target, RawTarget) + end. +``` + +### extract + +Return either the `target` or the `hint`. + +```erlang +extract(hint, Raw) -> + {_, Hint} = split_target(Raw), + Hint; +``` + +### extract + +Return either the `target` or the `hint`. + +```erlang +extract(target, Raw) -> + {Target, _} = split_target(Raw), + Target. +``` + +### split_target + +Split the target into the process ID and the optional query string. + +```erlang +split_target(RawTarget) -> + case binary:split(RawTarget, [<<"?">>, <<"&">>]) of + [Target, QStr] -> {Target, QStr}; + _ -> {RawTarget, <<>>} + end. +``` + +### calculate_base_id + +Calculate the base ID for a process. The base ID is not just the + +```erlang +calculate_base_id(GivenProcess, Opts) -> + Process = + case hb_ao:get(<<"process">>, GivenProcess, Opts#{ hashpath => ignore }) of + not_found -> GivenProcess; + Proc -> Proc + end, + BaseProcess = maps:without([<<"authority">>, <<"scheduler">>], Process), + {ok, BaseID} = hb_ao:resolve( + BaseProcess, + #{ <<"path">> => <<"id">>, <<"commitments">> => <<"none">> }, + Opts + ), + ?event({push_generated_base, {id, BaseID}, {base, BaseProcess}}), + BaseID. +``` + +### schedule_result + +Add the necessary keys to the message to be scheduled, then schedule it. + +```erlang +schedule_result(TargetProcess, MsgToPush, Origin, Opts) -> + schedule_result(TargetProcess, MsgToPush, <<"httpsig@1.0">>, Origin, Opts). +``` + +### schedule_result + +Add the necessary keys to the message to be scheduled, then schedule it. + +```erlang +schedule_result(TargetProcess, MsgToPush, Codec, Origin, Opts) -> + Target = hb_ao:get(<<"target">>, MsgToPush, Opts), + ?event(push, + {push_scheduling_result, + {target, {string, Target}}, + {target_process, TargetProcess}, + {msg, MsgToPush}, + {codec, Codec}, + {origin, Origin} + }, + Opts + ), + AugmentedMsg = augment_message(Origin, MsgToPush, Opts), + ?event(push, {prepared_msg, {msg, AugmentedMsg}}, Opts), + % Load the `accept-id`'d wallet into the `Opts` map, if requested. +``` + +### augment_message + +Set the necessary keys in order for the recipient to know where the + +```erlang +augment_message(Origin, ToSched, Opts) -> + ?event(push, {adding_keys, {origin, Origin}, {to, ToSched}}, Opts), + hb_message:uncommitted( + hb_ao:set( + ToSched, + #{ + <<"data-protocol">> => <<"ao">>, + <<"variant">> => <<"ao.N.1">>, + <<"type">> => <<"Message">>, + <<"from-process">> => maps:get(<<"process">>, Origin), + <<"from-uncommitted">> => maps:get(<<"from-uncommitted">>, Origin), + <<"from-base">> => maps:get(<<"from-base">>, Origin), + <<"from-scheduler">> => maps:get(<<"from-scheduler">>, Origin), + <<"from-authority">> => maps:get(<<"from-authority">>, Origin) + }, + Opts#{ hashpath => ignore } + ) + ). +``` + +### apply_security + +Apply the recipient's security policy to the message. Observes the + +```erlang +apply_security(Msg, TargetProcess, Codec, Opts) -> + apply_security(policy, Msg, TargetProcess, Codec, Opts). +``` + +### apply_security + +```erlang +apply_security(policy, Msg, TargetProcess, Codec, Opts) -> + case hb_ao:get(<<"policy">>, TargetProcess, not_found, Opts) of + not_found -> apply_security(authority, Msg, TargetProcess, Codec, Opts); + Policy -> + case hb_ao:resolve(Policy, Opts) of + {ok, PolicyOpts} -> + case hb_ao:get(<<"accept-committers">>, PolicyOpts, Opts) of + not_found -> + apply_security( + authority, + Msg, + TargetProcess, + Codec, + Opts + ); + Committers -> + commit_result(Msg, Committers, Codec, Opts) + end; + {error, Error} -> + ?event(push, {policy_error, {error, Error}}, Opts), + apply_security(authority, Msg, TargetProcess, Codec, Opts) + end + end; +``` + +### apply_security + +```erlang +apply_security(authority, Msg, TargetProcess, Codec, Opts) -> + case hb_ao:get(<<"authority">>, TargetProcess, Opts) of + not_found -> apply_security(default, Msg, TargetProcess, Codec, Opts); + Authorities when is_list(Authorities) -> + % The `authority` key has already been parsed into a list of + % committers. Sign with all local valid keys. +``` + +### apply_security + +```erlang +apply_security(default, Msg, TargetProcess, Codec, Opts) -> + ?event(push, {default_policy, {target, TargetProcess}}, Opts), + commit_result( + Msg, + [hb_util:human_id(hb_opts:get(priv_wallet, no_viable_wallet, Opts))], + Codec, + Opts + ). +``` + +### commit_result + +Attempt to sign a result message with the given committers. + +```erlang +commit_result(Msg, [], Codec, Opts) -> + case hb_opts:get(push_always_sign, true, Opts) of + true -> hb_message:commit(hb_message:uncommitted(Msg), Opts, Codec); + false -> Msg + end; +``` + +### commit_result + +Attempt to sign a result message with the given committers. + +```erlang +commit_result(Msg, Committers, Codec, Opts) -> + Signed = lists:foldl( + fun(Committer, Acc) -> + case hb_opts:as(Committer, Opts) of + {ok, CommitterOpts} -> + ?event(debug_commit, {signing_with_identity, Committer}), + hb_message:commit(Acc, CommitterOpts, Codec); + {error, not_found} -> + ?event(debug_commit, desired_signer_not_available_on_node), + ?event(push, + {policy_warning, + { + unknown_committer, + Committer + } + }, + Opts + ), + Acc + end + end, + hb_message:uncommitted(Msg), + Committers + ), + ?event(debug_commit, + {signed_message_as, {explicit, hb_message:signers(Signed, Opts)}} + ), + case hb_message:signers(Signed, Opts) of + [] -> + ?event(debug_commit, signing_with_default_identity), + commit_result(Msg, [], Codec, Opts); + _FoundSigners -> + Signed + end. +``` + +### schedule_initial_message + +Push a message or a process, prior to pushing the resulting slot number. + +```erlang +schedule_initial_message(Base, Req, Opts) -> + ModReq = Req#{ <<"path">> => <<"schedule">>, <<"method">> => <<"POST">> }, + ?event(push, {initial_push, {base, Base}, {req, ModReq}}, Opts), + case hb_ao:resolve(Base, ModReq, Opts) of + {ok, Res} -> + case hb_ao:get(<<"status">>, Res, 200, Opts) of + 200 -> {ok, Res}; + 307 -> + Location = hb_ao:get(<<"location">>, Res, Opts), + remote_schedule_result(Location, Req, Opts) + end; + {error, Res = #{ <<"status">> := 422 }} -> + ?event(push, {initial_push_wrong_format, {error, Res}}, Opts), + {error, Res}; + {error, Res} -> + ?event(push, {initial_push_error, {error, Res}}, Opts), + {error, Res} + end. +``` + +### remote_schedule_result + +```erlang +remote_schedule_result(Location, SignedReq, Opts) -> + ?event(push, {remote_schedule_result, {location, Location}, {req, SignedReq}}, Opts), + {Node, RedirectPath} = parse_redirect(Location, Opts), + Path = + case find_type(SignedReq, Opts) of + <<"Process">> -> <<"/schedule">>; + <<"Message">> -> RedirectPath + end, + % Store a copy of the message for ourselves. +``` + +### find_type + +```erlang +find_type(Req, Opts) -> + hb_ao:get_first( + [ + {Req, <<"type">>}, + {Req, <<"body/type">>} + ], + Opts + ). +``` + +### parse_redirect + +```erlang +parse_redirect(Location, Opts) -> + Parsed = uri_string:parse(Location), + Node = + uri_string:recompose( + (hb_maps:remove(query, Parsed, Opts))#{ + path => <<"/schedule">> + } + ), + {Node, hb_maps:get(path, Parsed, undefined, Opts)}. +``` + +### full_push_test_ + +```erlang +full_push_test_() -> + {timeout, 30, fun() -> + dev_process:init(), + Opts = #{ + process_async_cache => false, + priv_wallet => hb:wallet(), + cache_control => <<"always">>, + store => [ + #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-TEST">> }, + #{ <<"store-module">> => hb_store_gateway, + <<"store">> => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + } + } + ] + }, + Msg1 = dev_process:test_aos_process(Opts), + hb_cache:write(Msg1, Opts), + {ok, SchedInit} = + hb_ao:resolve(Msg1, #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => Msg1 + }, + Opts + ), + ?event({test_setup, {msg1, Msg1}, {sched_init, SchedInit}}), + Script = ping_pong_script(2), + ?event({script, Script}), + {ok, Msg2} = dev_process:schedule_aos_call(Msg1, Script, Opts), + ?event({msg_sched_result, Msg2}), + {ok, StartingMsgSlot} = + hb_ao:resolve(Msg2, #{ <<"path">> => <<"slot">> }, Opts), + ?event({starting_msg_slot, StartingMsgSlot}), + Msg3 = + #{ + <<"path">> => <<"push">>, + <<"slot">> => StartingMsgSlot + }, + {ok, _} = hb_ao:resolve(Msg1, Msg3, Opts), + ?assertEqual( + {ok, <<"Done.">>}, + hb_ao:resolve(Msg1, <<"now/results/data">>, Opts) + ) + end}. +``` + +### push_as_identity_test_ + +```erlang +push_as_identity_test_() -> + {timeout, 90, fun() -> + dev_process:init(), + % Create a new identity for the scheduler. +``` + +### multi_process_push_test_ + +```erlang +multi_process_push_test_() -> + {timeout, 30, fun() -> + dev_process:init(), + Opts = #{ + priv_wallet => hb:wallet(), + cache_control => <<"always">> + }, + Proc1 = dev_process:test_aos_process(Opts), + hb_cache:write(Proc1, Opts), + {ok, _SchedInit1} = + hb_ao:resolve(Proc1, #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => Proc1 + }, + Opts + ), + {ok, _} = dev_process:schedule_aos_call(Proc1, reply_script()), + Proc2 = dev_process:test_aos_process(Opts), + hb_cache:write(Proc2, Opts), + {ok, _SchedInit2} = + hb_ao:resolve(Proc2, #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => Proc2 + }, + Opts + ), + ProcID1 = hb_message:id(Proc1, all, Opts), + ProcID2 = hb_message:id(Proc2, all, Opts), + ?event(push, {testing_with, {proc1_id, ProcID1}, {proc2_id, ProcID2}}), + {ok, ToPush} = dev_process:schedule_aos_call( + Proc2, + << + "Handlers.add(\"Pong\",\n" + " function (test) return true end,\n" + " function(m)\n" + " print(\"GOT PONG\")\n" + " end\n" + ")\n" + "Send({ Target = \"", (ProcID1)/binary, "\", Action = \"Ping\" })" + >> + ), + SlotToPush = hb_ao:get(<<"slot">>, ToPush, Opts), + ?event(push, {slot_to_push_proc2, SlotToPush}), + Msg3 = + #{ + <<"path">> => <<"push">>, + <<"slot">> => SlotToPush, + <<"result-depth">> => 1 + }, + {ok, PushResult} = hb_ao:resolve(Proc2, Msg3, Opts), + ?event(push, {push_result_proc2, PushResult}), + AfterPush = hb_ao:resolve(Proc2, <<"now/results/data">>, Opts), + ?event(push, {after_push, AfterPush}), + ?assertEqual({ok, <<"GOT PONG">>}, AfterPush) + end}. +``` + +### push_with_redirect_hint_test_disabled + +```erlang +push_with_redirect_hint_test_disabled() -> + {timeout, 30, fun() -> + dev_process:init(), + Stores = + [ + #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + } + ], + ExtOpts = #{ priv_wallet => ar_wallet:new(), store => Stores }, + LocalOpts = #{ priv_wallet => hb:wallet(), store => Stores }, + ExtScheduler = hb_http_server:start_node(ExtOpts), + ?event(push, {external_scheduler, {location, ExtScheduler}}), + % Create the Pong server and client + Client = dev_process:test_aos_process(), + PongServer = dev_process:test_aos_process(ExtOpts), + % Push the new process that runs on the external scheduler + {ok, ServerSchedResp} = + hb_http:post( + ExtScheduler, + <<"/push">>, + PongServer, + ExtOpts + ), + ?event(push, {pong_server_sched_resp, ServerSchedResp}), + % Get the IDs of the server process + PongServerID = + hb_ao:get( + <<"process/id">>, + dev_process:ensure_process_key(PongServer, LocalOpts), + LocalOpts + ), + {ok, ServerScriptSchedResp} = + hb_http:post( + ExtScheduler, + <>, + #{ + <<"body">> => + hb_message:commit( + #{ + <<"target">> => PongServerID, + <<"action">> => <<"Eval">>, + <<"type">> => <<"Message">>, + <<"data">> => reply_script() + }, + ExtOpts + ) + }, + ExtOpts + ), + ?event(push, {pong_server_script_sched_resp, ServerScriptSchedResp}), + {ok, ToPush} = + dev_process:schedule_aos_call( + Client, + << + "Handlers.add(\"Pong\",\n" + " function (test) return true end,\n" + " function(m)\n" + " print(\"GOT PONG\")\n" + " end\n" + ")\n" + "Send({ Target = \"", + (PongServerID)/binary, "?hint=", + (ExtScheduler)/binary, + "\", Action = \"Ping\" })\n" + >>, + LocalOpts + ), + SlotToPush = hb_ao:get(<<"slot">>, ToPush, LocalOpts), + ?event(push, {slot_to_push_client, SlotToPush}), + Msg3 = #{ <<"path">> => <<"push">>, <<"slot">> => SlotToPush }, + {ok, PushResult} = hb_ao:resolve(Client, Msg3, LocalOpts), + ?event(push, {push_result_client, PushResult}), + AfterPush = hb_ao:resolve(Client, <<"now/results/data">>, LocalOpts), + ?event(push, {after_push, AfterPush}), + % Note: This test currently only gets a reply that the message was not + % trusted by the process. To fix this, we would have to add another + % trusted authority to the `test_aos_process' call. For now, this is + % enough to validate that redirects are pushed through correctly. +``` + +### push_prompts_encoding_change_test_ + +```erlang +push_prompts_encoding_change_test_() -> + {timeout, 30, fun push_prompts_encoding_change/0}. +``` + +### push_prompts_encoding_change + +```erlang +push_prompts_encoding_change() -> + dev_process:init(), + Opts = #{ + priv_wallet => hb:wallet(), + cache_control => <<"always">>, + store => + [ + #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-TEST">> }, + % Include a gateway store so that we can get the legacynet + % process when needed. +``` + +### oracle_push_test_ + +```erlang +oracle_push_test_() -> {timeout, 30, fun oracle_push/0}. +``` + +### oracle_push + +```erlang +oracle_push() -> + dev_process:init(), + Client = dev_process:test_aos_process(), + {ok, _} = hb_cache:write(Client, #{}), + {ok, _} = dev_process:schedule_aos_call(Client, oracle_script()), + Msg3 = + #{ + <<"path">> => <<"push">>, + <<"slot">> => 0 + }, + {ok, PushResult} = hb_ao:resolve(Client, Msg3, #{ priv_wallet => hb:wallet() }), + ?event({result, PushResult}), + ComputeRes = + hb_ao:resolve( + Client, + <<"now/results/data">>, + #{ priv_wallet => hb:wallet() } + ), + ?event({compute_res, ComputeRes}), + ?assertMatch({ok, _}, ComputeRes). +``` + +### nested_push_prompts_encoding_change_test_ + +Test that a message that generates another message which resides on an + +```erlang +nested_push_prompts_encoding_change_test_() -> + {timeout, 30, fun nested_push_prompts_encoding_change/0}. +``` + +### nested_push_prompts_encoding_change + +```erlang +nested_push_prompts_encoding_change() -> + dev_process:init(), + Opts = #{ + priv_wallet => hb:wallet(), + cache_control => <<"always">>, + store => hb_opts:get(store) + }, + ?event(push_debug, {opts, Opts}), + Msg1 = dev_process:test_aos_process(Opts), + hb_cache:write(Msg1, Opts), + {ok, SchedInit} = + hb_ao:resolve(Msg1, #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => Msg1 + }, + Opts + ), + ?event({test_setup, {msg1, Msg1}, {sched_init, SchedInit}}), + Script = message_to_legacynet_scheduler_script(), + ?event({script, Script}), + {ok, Msg2} = dev_process:schedule_aos_call(Msg1, Script), + ?event(push, {msg_sched_result, Msg2}), + {ok, StartingMsgSlot} = + hb_ao:resolve(Msg2, #{ <<"path">> => <<"slot">> }, Opts), + ?event({starting_msg_slot, StartingMsgSlot}), + Msg3 = + #{ + <<"path">> => <<"push">>, + <<"slot">> => StartingMsgSlot + }, + {ok, Res} = hb_ao:resolve(Msg1, Msg3, Opts), + ?event(push, {res, Res}), + Msg = hb_message:commit(#{ + <<"path">> => <<"push">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + #{ + <<"target">> => hb_message:id(Msg1, all, Opts), + <<"action">> => <<"Ping">> + }, + Opts + ) + }, Opts), + ?event(push, {msg1, Msg}), + Res2 = + hb_ao:resolve_many( + [ + hb_message:id(Msg1, all, Opts), + {as, <<"process@1.0">>, <<>>}, + Msg + ], + Opts + ), + ?assertMatch({ok, #{ <<"1">> := #{ <<"resulted-in">> := _ }}}, Res2). +-endif. +``` + +### ping_pong_script + +```erlang +ping_pong_script(Limit) -> + << + "Handlers.add(\"Ping\",\n" + " function (test) return true end,\n" + " function(m)\n" + " C = tonumber(m.Count)\n" + " if C <= ", (integer_to_binary(Limit))/binary, " then\n" + " Send({ Target = ao.id, Action = \"Ping\", Count = C + 1 })\n" + " print(\"Ping\", C + 1)\n" + " else\n" + " print(\"Done.\")\n" + " end\n" + " end\n" + ")\n" + "Send({ Target = ao.id, Action = \"Ping\", Count = 1 })\n" + >>. +``` + +### reply_script + +```erlang +reply_script() -> + << + """ + Handlers.add("Reply", + { Action = "Ping" }, + function(m) + print("Replying to...") + print(m.From) + Send({ Target = m.From, Action = "Reply", Message = "Pong!" }) + print("Done.") + end + ) + """ + >>. +``` + +### message_to_legacynet_scheduler_script + +```erlang +message_to_legacynet_scheduler_script() -> + << + """ + Handlers.add("Ping", + { Action = "Ping" }, + function(m) + print("Pinging...") + print(m.From) + Send({ + Target = "QQiMcAge5ZtxcUV7ruxpi16KYRE8UBP0GAAqCIJPXz0", + Action = "Ping" + }) + print("Done.") + end + ) + """ + >>. +``` + +### oracle_script + +```erlang +oracle_script() -> + << + """ + Handlers.add("Oracle", + function(m) + return true + end, + function(m) + print(m.Body) + end + ) + Send({ + target = ao.id, + resolve = "/~relay@1.0/call", + ["relay-path"] = "https://arweave.net" + }) + """ +``` + +--- + +*Generated from [dev_push.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_push.erl)* diff --git a/docs/book/src/dev_query.erl.md b/docs/book/src/dev_query.erl.md new file mode 100644 index 000000000..bf2f727b9 --- /dev/null +++ b/docs/book/src/dev_query.erl.md @@ -0,0 +1,359 @@ +# dev_query + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query.erl) + +A discovery engine for searching for and returning messages found in +a node's cache, through supported stores. +This device supports various modes of matching, including: +- `all` (default): Match all keys in the request message. +- `base`: Match all keys in the base message. +- `only`: Match only the key(s) specified in the `only` key. +The `only` key can be a binary, a map, or a list of keys. If it is a binary, +it is split on commas to get a list of keys to search for. If it is a message, +it is used directly as the match spec. If it is a list, it is assumed to be +a list of keys that we should select from the request or base message and +use as the match spec. +The `return` key can be used to specify the type of data to return. +- `count`: Return the number of matches. +- `paths`: Return the paths of the matches in a list. +- `messages`: Return the messages associated with each match in a list. +- `first-path`: Return the first path of the matches. +- `first-message`: Return the first message of the matches. +- `boolean`: Return a boolean indicating whether any matches were found. + +--- + +## Exported Functions + +- `all/3` +- `base/3` +- `graphql/3` +- `has_results/3` +- `info/1` +- `only/3` +- `test_setup/0` + +--- + +### info + +A discovery engine for searching for and returning messages found in + +```erlang +info(_Opts) -> + #{ + excludes => [<<"keys">>, <<"set">>], + default => fun default/4 + }. +``` + +### graphql + +Execute the query via GraphQL. + +```erlang +graphql(Req, Base, Opts) -> + dev_query_graphql:handle(Req, Base, Opts). +``` + +### has_results + +Return whether a GraphQL esponse in a message has transaction results. + +```erlang +has_results(Base, Req, Opts) -> + JSON = + hb_ao:get_first( + [ + {{as, <<"message@1.0">>, Base}, <<"body">>}, + {{as, <<"message@1.0">>, Req}, <<"body">>} + ], + <<"{}">>, + Opts + ), + Decoded = hb_json:decode(JSON), + ?event(debug_multi, {has_results, {decoded_json, Decoded}}), + case Decoded of + #{ <<"data">> := #{ <<"transactions">> := #{ <<"edges">> := Nodes } } } + when length(Nodes) > 0 -> + true; + _ -> false + end. +``` + +### default + +Search for the keys specified in the request message. + +```erlang +default(_, Base, Req, Opts) -> + all(Base, Req, Opts). +``` + +### all + +Search the node's store for all of the keys and values in the request, + +```erlang +all(Base, Req, Opts) -> + match(Req, Base, Req, Opts). +``` + +### base + +Search the node's store for all of the keys and values in the base + +```erlang +base(Base, Req, Opts) -> + match(Base, Base, Req, Opts). +``` + +### only + +Search only for the (list of) key(s) specified in `only` in the request. + +```erlang +only(Base, Req, Opts) -> + case hb_maps:get(<<"only">>, Req, not_found, Opts) of + KeyBin when is_binary(KeyBin) -> + % The descriptor is a binary, so we split it on commas to get a + % list of keys to search for. If there is only one key, we + % return a list with that key. +``` + +### match + +Match the request against the base message, using the keys to select + +```erlang +match(Keys, Base, Req, Opts) when is_list(Keys) -> + UserSpec = + maps:from_list( + lists:filtermap( + fun(Key) -> + % Search for the value in the request. If not found, + % look in the base message. +``` + +### match + +```erlang +match(UserSpec, _Base, Req, Opts) -> + ?event({matching, {spec, UserSpec}}), + FilteredSpec = + hb_maps:without( + hb_maps:get(<<"exclude">>, Req, ?DEFAULT_EXCLUDES, Opts), + UserSpec + ), + ReturnType = hb_maps:get(<<"return">>, Req, <<"paths">>, Opts), + ?event({matching, {spec, FilteredSpec}, {return, ReturnType}}), + case hb_cache:match(FilteredSpec, Opts) of + {ok, Matches} when ReturnType == <<"count">> -> + ?event({matched, {paths, Matches}}), + {ok, length(Matches)}; + {ok, Matches} when ReturnType == <<"paths">> -> + ?event({matched, {paths, Matches}}), + {ok, Matches}; + {ok, Matches} when ReturnType == <<"messages">> -> + ?event({matched, {paths, Matches}}), + Messages = + lists:map( + fun(Path) -> + hb_util:ok(hb_cache:read(Path, Opts)) + end, + Matches + ), + ?event({matched, {messages, Messages}}), + {ok, Messages}; + {ok, Matches} when ReturnType == <<"first-path">> -> + ?event({matched, {paths, Matches}}), + {ok, hd(Matches)}; + {ok, Matches} when ReturnType == <<"first">> + orelse ReturnType == <<"first-message">> -> + ?event({matched, {paths, Matches}}), + {ok, hb_util:ok(hb_cache:read(hd(Matches), Opts))}; + {ok, Matches} when ReturnType == <<"boolean">> -> + ?event({matched, {paths, Matches}}), + {ok, length(Matches) > 0}; + not_found when ReturnType == <<"boolean">> -> + {ok, false}; + not_found -> + {error, not_found} + end. +``` + +### test_setup + +Return test options with a test store. + +```erlang +test_setup() -> + Store = hb_test_utils:test_store(hb_store_lmdb), + Opts = #{ store => Store, priv_wallet => hb:wallet() }, + % Write a simple message. +``` + +### basic_test + +Search for and find a basic test key. + +```erlang +basic_test() -> + {ok, Opts, _} = test_setup(), + {ok, [ID]} = hb_ao:resolve(<<"~query@1.0/all?basic=binary-value">>, Opts), + {ok, Read} = hb_cache:read(ID, Opts), + ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Read)), + ?assertEqual(<<"binary-value-2">>, hb_maps:get(<<"basic-2">>, Read)), + {ok, [Msg]} = + hb_ao:resolve( + <<"~query@1.0/all?basic-2=binary-value-2&return=messages">>, + Opts + ), + ?assertEqual(<<"binary-value-2">>, hb_maps:get(<<"basic-2">>, Msg)), + ok. +``` + +### only_test + +Ensure that we can search for and match only a single key. + +```erlang +only_test() -> + {ok, Opts, _} = test_setup(), + {ok, [Msg]} = + hb_ao:resolve( + <<"~query@1.0/only=basic&basic=binary-value&wrong=1&return=messages">>, + Opts + ), + ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg)), + ok. +``` + +### multiple_test + +Ensure that we can specify multiple keys to match. + +```erlang +multiple_test() -> + {ok, Opts, _} = test_setup(), + {ok, [Msg]} = + hb_ao:resolve( + << + "~query@1.0/only=basic,basic-2", + "&basic=binary-value&basic-2=binary-value-2", + "&return=messages" + >>, + Opts + ), + ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg)), + ?assertEqual(<<"binary-value-2">>, hb_maps:get(<<"basic-2">>, Msg)), + ok. +``` + +### nested_test + +Search for and find a nested test key. + +```erlang +nested_test() -> + {ok, Opts, _} = test_setup(), + {ok, [MsgWithNested]} = + hb_ao:resolve( + <<"~query@1.0/all?test-key=test-value&return=messages">>, + Opts + ), + ?assert(hb_maps:is_key(<<"nested">>, MsgWithNested, Opts)), + Nested = hb_maps:get(<<"nested">>, MsgWithNested, undefined, Opts), + ?assertEqual(<<"test-value-3">>, hb_maps:get(<<"test-key-3">>, Nested, Opts)), + ?assertEqual(<<"test-value-4">>, hb_maps:get(<<"test-key-4">>, Nested, Opts)), + ok. +``` + +### list_test + +Search for and find a list message with typed elements. + +```erlang +list_test() -> + {ok, Opts, _} = test_setup(), + {ok, [Msg]} = + hb_ao:resolve( + <<"~query@1.0/all?2+integer=2&3+atom=ok&return=messages">>, + Opts + ), + ?assertEqual([<<"a">>, 2, ok], Msg), + ok. +``` + +### return_key_test + +Ensure user's can opt not to specify a key to resolve, instead specifying + +```erlang +return_key_test() -> + {ok, Opts, _} = test_setup(), + {ok, [ID]} = + hb_ao:resolve( + <<"~query@1.0/basic=binary-value">>, + Opts + ), + {ok, Msg} = hb_cache:read(ID, Opts), + ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg, Opts)), + ok. +``` + +### return_types_test + +Validate the functioning of various return types. + +```erlang +return_types_test() -> + {ok, Opts, _} = test_setup(), + {ok, [Msg]} = + hb_ao:resolve( + <<"~query@1.0/basic=binary-value&return=messages">>, + Opts + ), + ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg, Opts)), + ?assertEqual( + {ok, 1}, + hb_ao:resolve( + <<"~query@1.0/basic=binary-value&return=count">>, + Opts + ) + ), + ?assertEqual( + {ok, true}, + hb_ao:resolve( + <<"~query@1.0/basic=binary-value&return=boolean">>, + Opts + ) + ), + ?assertEqual( + {ok, <<"binary-value">>}, + hb_ao:resolve( + <<"~query@1.0/basic=binary-value&return=first-message/basic">>, + Opts + ) + ), + ok. +``` + +### http_test + +```erlang +http_test() -> + {ok, Opts, _} = test_setup(), + Node = hb_http_server:start_node(Opts), + {ok, Msg} = + hb_http:get( + Node, + <<"~query@1.0/only=basic&basic=binary-value?return=first">>, + Opts + ), + ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg, Opts)), +``` + +--- + +*Generated from [dev_query.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query.erl)* diff --git a/docs/book/src/dev_query_arweave.erl.md b/docs/book/src/dev_query_arweave.erl.md new file mode 100644 index 000000000..f7195ca8e --- /dev/null +++ b/docs/book/src/dev_query_arweave.erl.md @@ -0,0 +1,546 @@ +# dev_query_arweave + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_arweave.erl) + +An implementation of the Arweave GraphQL API, inside the `~query@1.0` +device. + +--- + +## Exported Functions + +- `query/4` + +--- + +### query + +An implementation of the Arweave GraphQL API, inside the `~query@1.0` +The arguments that are supported by the Arweave GraphQL API. +Handle an Arweave GraphQL query for either transactions or blocks. + +```erlang +query(List, <<"edges">>, _Args, _Opts) -> + {ok, [{ok, Msg} || Msg <- List]}; +``` + +### query + +An implementation of the Arweave GraphQL API, inside the `~query@1.0` +The arguments that are supported by the Arweave GraphQL API. +Handle an Arweave GraphQL query for either transactions or blocks. + +```erlang +query(Msg, <<"node">>, _Args, _Opts) -> + {ok, Msg}; +``` + +### query + +An implementation of the Arweave GraphQL API, inside the `~query@1.0` +The arguments that are supported by the Arweave GraphQL API. +Handle an Arweave GraphQL query for either transactions or blocks. + +```erlang +query(Obj, <<"transaction">>, Args, Opts) -> + case query(Obj, <<"transactions">>, Args, Opts) of + {ok, []} -> {ok, null}; + {ok, [Msg|_]} -> {ok, Msg} + end; +``` + +### query + +An implementation of the Arweave GraphQL API, inside the `~query@1.0` +The arguments that are supported by the Arweave GraphQL API. +Handle an Arweave GraphQL query for either transactions or blocks. + +```erlang +query(Obj, <<"transactions">>, Args, Opts) -> + ?event({transactions_query, + {object, Obj}, + {field, <<"transactions">>}, + {args, Args} + }), + Matches = match_args(Args, Opts), + ?event({transactions_matches, Matches}), + Messages = + lists:filtermap( + fun(Match) -> + case hb_cache:read(Match, Opts) of + {ok, Msg} -> {true, Msg}; + not_found -> false + end + end, + Matches + ), + {ok, Messages}; +``` + +### query + +An implementation of the Arweave GraphQL API, inside the `~query@1.0` +The arguments that are supported by the Arweave GraphQL API. +Handle an Arweave GraphQL query for either transactions or blocks. + +```erlang +query(Obj, <<"block">>, Args, Opts) -> + case query(Obj, <<"blocks">>, Args, Opts) of + {ok, []} -> {ok, null}; + {ok, [Msg|_]} -> {ok, Msg} + end; +``` + +### query + +An implementation of the Arweave GraphQL API, inside the `~query@1.0` +The arguments that are supported by the Arweave GraphQL API. +Handle an Arweave GraphQL query for either transactions or blocks. + +```erlang +query(Obj, <<"blocks">>, Args, Opts) -> + ?event({blocks, + {object, Obj}, + {field, <<"blocks">>}, + {args, Args} + }), + Matches = match_args(Args, Opts), + ?event({blocks_matches, Matches}), + Blocks = + lists:filtermap( + fun(Match) -> + case hb_cache:read(Match, Opts) of + {ok, Msg} -> {true, Msg}; + not_found -> false + end + end, + Matches + ), + % Return the blocks as a list of messages. +``` + +### query + +```erlang +query(Block, <<"previous">>, _Args, Opts) -> + {ok, hb_maps:get(<<"previous_block">>, Block, null, Opts)}; +``` + +### query + +```erlang +query(Block, <<"height">>, _Args, Opts) -> + {ok, hb_maps:get(<<"height">>, Block, null, Opts)}; +``` + +### query + +```erlang +query(Block, <<"timestamp">>, _Args, Opts) -> + {ok, hb_maps:get(<<"timestamp">>, Block, null, Opts)}; +``` + +### query + +```erlang +query(Msg, <<"signature">>, _Args, Opts) -> + % Return the signature of the transaction. +``` + +### query + +```erlang +query(Msg, <<"owner">>, _Args, Opts) -> + ?event({query_owner, Msg}), + case hb_message:commitments(#{ <<"committer">> => '_' }, Msg, Opts) of + not_found -> {ok, null}; + Commitments -> + case hb_maps:keys(Commitments) of + [] -> {ok, null}; + [CommID | _] -> + {ok, Commitment} = hb_maps:find(CommID, Commitments, Opts), + {ok, Address} = hb_maps:find(<<"committer">>, Commitment, Opts), + {ok, KeyID} = hb_maps:find(<<"keyid">>, Commitment, Opts), + Key = dev_codec_httpsig_keyid:remove_scheme_prefix(KeyID), + {ok, #{ + <<"address">> => Address, + <<"key">> => Key + }} + end + end; +``` + +### query + +```erlang +query(#{ <<"key">> := Key }, <<"key">>, _Args, _Opts) -> + {ok, Key}; +``` + +### query + +```erlang +query(#{ <<"address">> := Address }, <<"address">>, _Args, _Opts) -> + {ok, Address}; +``` + +### query + +```erlang +query(Msg, <<"fee">>, _Args, Opts) -> + {ok, hb_maps:get(<<"fee">>, Msg, 0, Opts)}; +``` + +### query + +```erlang +query(Msg, <<"quantity">>, _Args, Opts) -> + {ok, hb_maps:get(<<"quantity">>, Msg, 0, Opts)}; +``` + +### query + +```erlang +query(Number, <<"winston">>, _Args, _Opts) when is_number(Number) -> + {ok, Number}; +``` + +### query + +```erlang +query(Msg, <<"recipient">>, _Args, Opts) -> + case find_field_key(<<"field-target">>, Msg, Opts) of + {ok, null} -> {ok, <<"">>}; + OkRes -> OkRes + end; +``` + +### query + +```erlang +query(Msg, <<"anchor">>, _Args, Opts) -> + case find_field_key(<<"field-anchor">>, Msg, Opts) of + {ok, null} -> {ok, <<"">>}; + {ok, Anchor} -> {ok, hb_util:human_id(Anchor)} + end; +``` + +### query + +```erlang +query(Msg, <<"data">>, _Args, Opts) -> + Data = + hb_ao:get_first( + [ + {{as, <<"message@1.0">>, Msg}, <<"data">>}, + {{as, <<"message@1.0">>, Msg}, <<"body">>} + ], + <<>>, + Opts + ), + Type = hb_maps:get(<<"content-type">>, Msg, null, Opts), + {ok, #{ <<"data">> => Data, <<"type">> => Type }}; +``` + +### query + +```erlang +query(#{ <<"data">> := Data }, <<"size">>, _Args, _Opts) -> + {ok, byte_size(Data)}; +``` + +### query + +```erlang +query(#{ <<"type">> := Type }, <<"type">>, _Args, _Opts) -> + {ok, Type}; +``` + +### query + +Find and return a value from the fields of a message (from its + +```erlang +query(Obj, Field, Args, _Opts) -> + ?event({unimplemented_transactions_query, + {object, Obj}, + {field, Field}, + {args, Args} + }), + {ok, <<"Not implemented.">>}. +``` + +### find_field_key + +Find and return a value from the fields of a message (from its + +```erlang +find_field_key(Field, Msg, Opts) -> + case hb_message:commitments(#{ Field => '_' }, Msg, Opts) of + not_found -> {ok, null}; + Commitments -> + case hb_maps:keys(Commitments) of + [] -> {ok, null}; + [CommID | _] -> + {ok, Commitment} = hb_maps:find(CommID, Commitments, Opts), + case hb_maps:find(Field, Commitment, Opts) of + {ok, Value} -> {ok, Value}; + error -> {ok, null} + end + end + end. +``` + +### match_args + +Progressively generate matches from each argument for a transaction + +```erlang +match_args(Args, Opts) when is_map(Args) -> + match_args( + maps:to_list( + maps:with( + ?SUPPORTED_QUERY_ARGS, + Args + ) + ), + [], + Opts + ). +``` + +### match_args + +```erlang +match_args([], Results, Opts) -> + ?event({match_args_results, Results}), + Matches = + lists:foldl( + fun(Result, Acc) -> + hb_util:list_with(resolve_ids(Result, Opts), Acc) + end, + resolve_ids(hd(Results), Opts), + tl(Results) + ), + hb_util:unique( + lists:flatten( + [ + all_ids(ID, Opts) + || + ID <- Matches + ] + ) + ); +``` + +### match_args + +```erlang +match_args([{Field, X} | Rest], Acc, Opts) -> + MatchRes = match(Field, X, Opts), + ?event({match, {field, Field}, {arg, X}, {match_res, MatchRes}}), + case MatchRes of + {ok, Result} -> + match_args(Rest, [Result | Acc], Opts); + _Error -> + match_args(Rest, Acc, Opts) + end. +``` + +### match + +Generate a match upon `tags` in the arguments, if given. + +```erlang +match(_, null, _) -> ignore; +``` + +### match + +Generate a match upon `tags` in the arguments, if given. + +```erlang +match(<<"height">>, Heights, Opts) -> + Min = hb_maps:get(<<"min">>, Heights, 0, Opts), + Max = + case hb_maps:find(<<"max">>, Heights, Opts) of + {ok, GivenMax} -> GivenMax; + error -> + {ok, Latest} = dev_arweave_block_cache:latest(Opts), + Latest + end, + #{ store := ScopedStores } = scope(Opts), + {ok, + lists:filtermap( + fun(Height) -> + Path = dev_arweave_block_cache:path(Height, Opts), + case hb_store:type(ScopedStores, Path) of + not_found -> false; + _ -> {true, hb_store:resolve(ScopedStores, Path)} + end + end, + lists:seq(Min, Max) + ) + }; +``` + +### match + +Generate a match upon `tags` in the arguments, if given. + +```erlang +match(<<"id">>, ID, _Opts) -> + {ok, [ID]}; +``` + +### match + +Generate a match upon `tags` in the arguments, if given. + +```erlang +match(<<"ids">>, IDs, _Opts) -> + {ok, IDs}; +``` + +### match + +Generate a match upon `tags` in the arguments, if given. + +```erlang +match(<<"tags">>, Tags, Opts) -> + hb_cache:match(dev_query_graphql:keys_to_template(Tags), Opts); +``` + +### match + +Generate a match upon `tags` in the arguments, if given. + +```erlang +match(<<"owners">>, Owners, Opts) -> + {ok, matching_commitments(<<"committer">>, Owners, Opts)}; +``` + +### match + +Generate a match upon `tags` in the arguments, if given. + +```erlang +match(<<"owner">>, Owner, Opts) -> + Res = matching_commitments(<<"committer">>, Owner, Opts), + ?event({match_owner, Owner, Res}), + {ok, Res}; +``` + +### match + +Generate a match upon `tags` in the arguments, if given. + +```erlang +match(<<"recipients">>, Recipients, Opts) -> + {ok, matching_commitments(<<"field-target">>, Recipients, Opts)}; +``` + +### match + +Generate a match upon `tags` in the arguments, if given. + +```erlang +match(UnsupportedFilter, _, _) -> + throw({unsupported_query_filter, UnsupportedFilter}). +``` + +### matching_commitments + +Return the base IDs for messages that have a matching commitment. + +```erlang +matching_commitments(Field, Values, Opts) when is_list(Values) -> + hb_util:unique(lists:flatten( + lists:map( + fun(Value) -> matching_commitments(Field, Value, Opts) end, + Values + ) + )); +``` + +### matching_commitments + +Return the base IDs for messages that have a matching commitment. + +```erlang +matching_commitments(Field, Value, Opts) when is_binary(Value) -> + case hb_cache:match(#{ Field => Value }, Opts) of + {ok, IDs} -> + ?event( + {found_matching_commitments, + {field, Field}, + {value, Value}, + {ids, IDs} + } + ), + lists:map(fun(ID) -> commitment_id_to_base_id(ID, Opts) end, IDs); + not_found -> not_found + end. +``` + +### commitment_id_to_base_id + +Convert a commitment message's ID to a base ID. + +```erlang +commitment_id_to_base_id(ID, Opts) -> + Store = hb_opts:get(store, no_store, Opts), + ?event({commitment_id_to_base_id, ID}), + case hb_store:read(Store, << ID/binary, "/signature">>) of + {ok, EncSig} -> + Sig = hb_util:decode(EncSig), + ?event({commitment_id_to_base_id_sig, Sig}), + hb_util:encode(hb_crypto:sha256(Sig)); + not_found -> not_found + end. +``` + +### all_ids + +Find all IDs for a message, by any of its other IDs. + +```erlang +all_ids(ID, Opts) -> + Store = hb_opts:get(store, no_store, Opts), + case hb_store:list(Store, << ID/binary, "/commitments">>) of + {ok, []} -> [ID]; + {ok, CommitmentIDs} -> CommitmentIDs; + _ -> [] + end. +``` + +### scope + +Scope the stores used for block matching. The searched stores can be + +```erlang +scope(Opts) -> + Scope = hb_opts:get(query_arweave_scope, [local], Opts), + hb_store:scope(Opts, Scope). +``` + +### resolve_ids + +Resolve a list of IDs to their store paths, using the stores provided. + +```erlang +resolve_ids(IDs, Opts) -> + Scoped = scope(Opts), + lists:map( + fun(ID) -> + case hb_cache:read(ID, Opts) of + {ok, Msg} -> hb_message:id(Msg, uncommitted, Scoped); + not_found -> ID + end + end, + IDs +``` + +--- + +*Generated from [dev_query_arweave.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_arweave.erl)* diff --git a/docs/book/src/dev_query_graphql.erl.md b/docs/book/src/dev_query_graphql.erl.md new file mode 100644 index 000000000..523c48d63 --- /dev/null +++ b/docs/book/src/dev_query_graphql.erl.md @@ -0,0 +1,497 @@ +# dev_query_graphql + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_graphql.erl) + +A GraphQL interface for querying a node's cache. Accessible through the +`~query@1.0/graphql` device key. + +--- + +## Exported Functions + +- `execute/4` +- `handle/3` +- `keys_to_template/1` +- `test_query/3` +- `test_query/4` + +--- + +### schema + +A GraphQL interface for querying a node's cache. Accessible through the +Returns the complete GraphQL schema. +Ensure that the GraphQL schema and context are initialized. Can be + +```erlang +schema() -> + hb_util:ok(file:read_file("scripts/schema.gql")). +``` + +### ensure_started + +A GraphQL interface for querying a node's cache. Accessible through the +Returns the complete GraphQL schema. +Ensure that the GraphQL schema and context are initialized. Can be + +```erlang +ensure_started() -> ensure_started(#{}). +``` + +### ensure_started + +A GraphQL interface for querying a node's cache. Accessible through the +Returns the complete GraphQL schema. +Ensure that the GraphQL schema and context are initialized. Can be + +```erlang +ensure_started(Opts) -> + case hb_name:lookup(graphql_controller) of + PID when is_pid(PID) -> ok; + undefined -> + Parent = self(), + PID = + spawn_link( + fun() -> + init(Opts), + Parent ! {started, self()}, + receive stop -> ok end + end + ), + receive {started, PID} -> ok + after ?START_TIMEOUT -> exit(graphql_start_timeout) + end + end. +``` + +### init + +Initialize the GraphQL schema and context. Should only be called once. + +```erlang +init(_Opts) -> + ?event(graphql_init_called), + application:ensure_all_started(graphql), + ?event(graphql_application_started), + GraphQLOpts = + #{ + scalars => #{ default => ?MODULE }, + interfaces => #{ default => ?MODULE }, + unions => #{ default => ?MODULE }, + objects => #{ default => ?MODULE }, + enums => #{ default => ?MODULE } + }, + ok = graphql:load_schema(GraphQLOpts, schema()), + ?event(graphql_schema_loaded), + Root = + {root, + #{ + query => 'Query', + interfaces => [] + } + }, + ok = graphql:insert_schema_definition(Root), + ?event(graphql_schema_definition_inserted), + ok = graphql:validate_schema(), + ?event(graphql_schema_validated), + hb_name:register(graphql_controller, self()), + ?event(graphql_controller_registered), + ok. +``` + +### handle + +```erlang +handle(_Base, RawReq, Opts) -> + ?event({request, RawReq}), + Req = + case hb_maps:find(<<"query">>, RawReq, Opts) of + {ok, _} -> RawReq; + error -> + % Parse the query, assuming that the request body is a JSON + % object with the necessary fields. +``` + +### execute + +The main entrypoint for resolving GraphQL elements, called by the + +```erlang +execute(#{opts := Opts}, Obj, Field, Args) -> + ?event({graphql_query, {object, Obj}, {field, Field}, {args, Args}}), + case lists:member(Field, ?MESSAGE_QUERY_KEYS) of + true -> message_query(Obj, Field, Args, Opts); + false -> dev_query_arweave:query(Obj, Field, Args, Opts) + end. +``` + +### message_query + +Handle a HyperBEAM `message` query. + +```erlang +message_query(Obj, <<"message">>, #{<<"keys">> := Keys}, Opts) -> + Template = keys_to_template(Keys), + ?event( + {graphql_execute_called, + {object, Obj}, + {field, <<"message">>}, + {raw_keys, Keys}, + {template, Template} + } + ), + case hb_cache:match(Template, Opts) of + {ok, [ID | _IDs]} -> + ?event({graphql_cache_match_found, ID}), + {ok, Msg} = hb_cache:read(ID, Opts), + ?event({graphql_cache_read, Msg}), + {ok, Msg}; + not_found -> + ?event(graphql_cache_match_not_found), + {ok, #{<<"id">> => <<"not-found">>, <<"keys">> => #{}}} + end; +``` + +### message_query + +Handle a HyperBEAM `message` query. + +```erlang +message_query(Msg, Field, _Args, Opts) when Field =:= <<"keys">>; Field =:= <<"tags">> -> + OnlyKeys = + hb_maps:to_list( + hb_private:reset( + hb_maps:without( + [<<"data">>, <<"body">>], + hb_message:uncommitted(Msg, Opts), + Opts + ) + ), + Opts + ), + ?event({message_query_keys_or_tags, {object, Msg}, {only_keys, OnlyKeys}}), + Res = { + ok, + [ + {ok, + #{ + <<"name">> => Name, + <<"value">> => hb_cache:ensure_loaded(Value, Opts) + } + } + || + {Name, Value} <- OnlyKeys + ] + }, + ?event({message_query_keys_or_tags_result, Res}), + Res; +``` + +### message_query + +Handle a HyperBEAM `message` query. + +```erlang +message_query(Msg, Field, _Args, Opts) + when Field =:= <<"name">> orelse Field =:= <<"value">> -> + ?event({message_query_name_or_value, {object, Msg}, {field, Field}}), + {ok, hb_maps:get(Field, Msg, null, Opts)}; +``` + +### message_query + +Handle a HyperBEAM `message` query. + +```erlang +message_query(Msg = #{ <<"independent_hash">> := _ }, <<"id">>, _Args, Opts) -> + {ok, hb_maps:get(<<"independent_hash">>, Msg, null, Opts)}; +``` + +### message_query + +Handle a HyperBEAM `message` query. + +```erlang +message_query(Msg, <<"id">>, _Args, Opts) -> + ?event({message_query_id, {object, Msg}}), + {ok, hb_message:id(Msg, all, Opts)}; +``` + +### message_query + +Handle a HyperBEAM `message` query. + +```erlang +message_query(_Msg, <<"cursor">>, _Args, _Opts) -> + {ok, <<"">>}; +``` + +### message_query + +Handle a HyperBEAM `message` query. + +```erlang +message_query(_Obj, _Field, _, _) -> + {ok, <<"Not found.">>}. +``` + +### keys_to_template + +Handle a HyperBEAM `message` query. + +```erlang +keys_to_template(Keys) -> + maps:from_list(lists:foldl( + fun(#{<<"name">> := Name, <<"value">> := Value}, Acc) -> + [{Name, Value} | Acc]; + (#{<<"name">> := Name, <<"values">> := [Value]}, Acc) -> + [{Name, Value} | Acc]; + (#{<<"name">> := Name, <<"values">> := Values}, _Acc) -> + throw( + {multivalue_tag_search_not_supported, #{ + <<"name">> => Name, + <<"values">> => Values + }} + ) + end, + [], + Keys + )). +``` + +### test_query + +```erlang +test_query(Node, Query, Opts) -> + test_query(Node, Query, undefined, Opts). +``` + +### test_query + +```erlang +test_query(Node, Query, Variables, Opts) -> + test_query(Node, Query, Variables, undefined, Opts). +``` + +### test_query + +```erlang +test_query(Node, Query, Variables, OperationName, Opts) -> + UnencodedPayload = + maps:filter( + fun(_, undefined) -> false; + (_, _) -> true + end, + #{ + <<"query">> => Query, + <<"variables">> => Variables, + <<"operationName">> => OperationName + } + ), + ?event({test_query_unencoded_payload, UnencodedPayload}), + {ok, Res} = + hb_http:post( + Node, + #{ + <<"path">> => <<"~query@1.0/graphql">>, + <<"content-type">> => <<"application/json">>, + <<"codec-device">> => <<"json@1.0">>, + <<"body">> => hb_json:encode(UnencodedPayload) + }, + Opts + ), + hb_json:decode(hb_maps:get(<<"body">>, Res, <<>>, Opts)). +%%% Tests +``` + +### lookup_test + +```erlang +lookup_test() -> + {ok, Opts, _} = dev_query:test_setup(), + Node = hb_http_server:start_node(Opts), + Query = + <<""" + query GetMessage { + message( + keys: + [ + { + name: "basic", + value: "binary-value" + } + ] + ) { + id + keys { + name + value + } + } + } + """>>, + Res = test_query(Node, Query, Opts), + ?event({test_response, Res}), + ?assertMatch( + #{ <<"data">> := + #{ + <<"message">> := + #{ + <<"id">> := _, + <<"keys">> := + [ + #{ + <<"name">> := <<"basic">>, + <<"value">> := <<"binary-value">> + }, + #{ + <<"name">> := <<"basic-2">>, + <<"value">> := <<"binary-value-2">> + } + ] + } + } + }, + Res + ). +``` + +### lookup_with_vars_test + +```erlang +lookup_with_vars_test() -> + {ok, Opts, _} = dev_query:test_setup(), + Node = hb_http_server:start_node(Opts), + {ok, Res} = + hb_http:post( + Node, + #{ + <<"path">> => <<"~query@1.0/graphql">>, + <<"content-type">> => <<"application/json">>, + <<"codec-device">> => <<"json@1.0">>, + <<"body">> => + hb_json:encode(#{ + <<"query">> => + <<""" + query GetMessage($keys: [KeyInput]) { + message( + keys: $keys + ) { + id + keys { + name + value + } + } + } + """>>, + <<"operationName">> => <<"GetMessage">>, + <<"variables">> => #{ + <<"keys">> => + [ + #{ + <<"name">> => <<"basic">>, + <<"value">> => <<"binary-value">> + } + ] + } + }) + }, + Opts + ), + Object = hb_json:decode(hb_maps:get(<<"body">>, Res, <<>>, Opts)), + ?event({test_response, Object}), + ?assertMatch( + #{ <<"data">> := + #{ + <<"message">> := + #{ + <<"id">> := _, + <<"keys">> := + [ + #{ + <<"name">> := <<"basic">>, + <<"value">> := <<"binary-value">> + }, + #{ + <<"name">> := <<"basic-2">>, + <<"value">> := <<"binary-value-2">> + } + ] + } + } + }, + Object + ). +``` + +### lookup_without_opname_test + +```erlang +lookup_without_opname_test() -> + {ok, Opts, _} = dev_query:test_setup(), + Node = hb_http_server:start_node(Opts), + {ok, Res} = + hb_http:post( + Node, + #{ + <<"path">> => <<"~query@1.0/graphql">>, + <<"content-type">> => <<"application/json">>, + <<"codec-device">> => <<"json@1.0">>, + <<"body">> => + hb_json:encode(#{ + <<"query">> => + <<""" + query($keys: [KeyInput]) { + message( + keys: $keys + ) { + id + keys { + name + value + } + } + } + """>>, + <<"variables">> => #{ + <<"keys">> => + [ + #{ + <<"name">> => <<"basic">>, + <<"value">> => <<"binary-value">> + } + ] + } + }) + }, + Opts + ), + Object = hb_json:decode(hb_maps:get(<<"body">>, Res, <<>>, Opts)), + ?event({test_response, Object}), + ?assertMatch( + #{ <<"data">> := + #{ + <<"message">> := + #{ + <<"id">> := _, + <<"keys">> := + [ + #{ + <<"name">> := <<"basic">>, + <<"value">> := <<"binary-value">> + }, + #{ + <<"name">> := <<"basic-2">>, + <<"value">> := <<"binary-value-2">> + } + ] + } + } + }, + Object +``` + +--- + +*Generated from [dev_query_graphql.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_graphql.erl)* diff --git a/docs/book/src/dev_query_test_vectors.erl.md b/docs/book/src/dev_query_test_vectors.erl.md new file mode 100644 index 000000000..3fa562c81 --- /dev/null +++ b/docs/book/src/dev_query_test_vectors.erl.md @@ -0,0 +1,842 @@ +# dev_query_test_vectors + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_test_vectors.erl) + +A suite of test queries and responses for the `~query@1.0` device's +GraphQL implementation. + +--- + +### write_test_message + +A suite of test queries and responses for the `~query@1.0` device's + +```erlang +write_test_message(Opts) -> + hb_cache:write( + Msg = hb_message:commit( + #{ + <<"data-protocol">> => <<"ao">>, + <<"variant">> => <<"ao.N.1">>, + <<"type">> => <<"Message">>, + <<"action">> => <<"Eval">>, + <<"data">> => <<"test data">> + }, + Opts, + #{ + <<"commitment-device">> => <<"ans104@1.0">> + } + ), + Opts + ), + {ok, Msg}. +``` + +### get_test_blocks + +Populate the cache with three test blocks. + +```erlang +get_test_blocks(Node) -> + InitialHeight = 1745749, + FinalHeight = 1745750, + lists:foreach( + fun(Height) -> + {ok, _} = + hb_http:request( + <<"GET">>, + Node, + <<"/~arweave@2.9-pre/block=", (hb_util:bin(Height))/binary>>, + #{} + ) + end, + lists:seq(InitialHeight, FinalHeight) + ). +``` + +### write_test_message_with_recipient + +```erlang +write_test_message_with_recipient(Recipient, Opts) -> + hb_cache:write( + Msg = hb_message:commit( + #{ + <<"data-protocol">> => <<"ao">>, + <<"variant">> => <<"ao.N.1">>, + <<"type">> => <<"Message">>, + <<"action">> => <<"Eval">>, + <<"content-type">> => <<"text/plain">>, + <<"data">> => <<"test data">>, + <<"target">> => Recipient + }, + Opts, + #{ + <<"commitment-device">> => <<"ans104@1.0">> + } + ), + Opts + ), + {ok, Msg}. +``` + +### simple_blocks_query_test + +```erlang +simple_blocks_query_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + get_test_blocks(Node), + Query = + <<""" + query { + blocks( + ids: ["V7yZNKPQLIQfUu8r8-lcEaz4o7idl6LTHn5AHlGIFF8TKfxIe7s_yFxjqan6OW45"] + ) { + edges { + node { + id + previous + height + timestamp + } + } + } + } + """>>, + ?assertMatch( + #{ + <<"data">> := #{ + <<"blocks">> := #{ + <<"edges">> := [ + #{ + <<"node">> := #{ + <<"id">> := _, + <<"previous">> := _, + <<"height">> := 1745749, + <<"timestamp">> := 1756866695 + } + } + ] + } + } + }, + dev_query_graphql:test_query(Node, Query, #{}, Opts) + ). +``` + +### block_by_height_query_test + +```erlang +block_by_height_query_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + get_test_blocks(Node), + Query = + <<""" + query { + blocks( height: {min: 1745749, max: 1745750} ) { + edges { + node { + id + previous + height + timestamp + } + } + } + } + """>>, + ?assertMatch( + #{ + <<"data">> := #{ + <<"blocks">> := #{ + <<"edges">> := [ + #{ + <<"node">> := #{ + <<"id">> := _, + <<"previous">> := _, + <<"height">> := 1745749, + <<"timestamp">> := 1756866695 + } + }, + #{ + <<"node">> := #{ + <<"id">> := _, + <<"previous">> := _, + <<"height">> := 1745750, + <<"timestamp">> := _ + } + } + ] + } + } + }, + dev_query_graphql:test_query(Node, Query, #{}, Opts) + ). +``` + +### simple_ans104_query_test + +```erlang +simple_ans104_query_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + {ok, WrittenMsg} = write_test_message(Opts), + ?assertMatch( + {ok, [_]}, + hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) + ), + Query = + <<""" + query($owners: [String!]) { + transactions( + tags: + [ + {name: "type" values: ["Message"]}, + {name: "variant" values: ["ao.N.1"]} + ], + owners: $owners + ) { + edges { + node { + id, + tags { + name, + value + } + } + } + } + } + """>>, + Res = + dev_query_graphql:test_query( + Node, + Query, + #{ + <<"owners">> => [hb:address()] + }, + Opts + ), + ExpectedID = hb_message:id(WrittenMsg, all, Opts), + ?event({expected_id, ExpectedID}), + ?event({simple_ans104_query_test, Res}), + ?assertMatch( + #{ + <<"data">> := #{ + <<"transactions">> := #{ + <<"edges">> := + [#{ + <<"node">> := + #{ + <<"id">> := ExpectedID, + <<"tags">> := + [#{ <<"name">> := _, <<"value">> := _ }|_] + } + }] + } + } + } when ?IS_ID(ExpectedID), + Res + ). +``` + +### transactions_query_tags_test + +Test transactions query with tags filter + +```erlang +transactions_query_tags_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + {ok, WrittenMsg} = write_test_message(Opts), + ?assertMatch( + {ok, [_]}, + hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) + ), + Query = + <<""" + query { + transactions( + tags: [ + {name: "type", values: ["Message"]}, + {name: "variant", values: ["ao.N.1"]} + ] + ) { + edges { + node { + id + tags { + name + value + } + } + } + } + } + """>>, + Res = + dev_query_graphql:test_query( + Node, + Query, + #{}, + Opts + ), + ExpectedID = hb_message:id(WrittenMsg, all, Opts), + ?event({expected_id, ExpectedID}), + ?event({transactions_query_tags_test, Res}), + ?assertMatch( + #{ + <<"data">> := #{ + <<"transactions">> := #{ + <<"edges">> := + [#{ + <<"node">> := + #{ + <<"id">> := ExpectedID, + <<"tags">> := + [#{ <<"name">> := _, <<"value">> := _ }|_] + } + }] + } + } + } when ?IS_ID(ExpectedID), + Res + ). +``` + +### transactions_query_owners_test + +Test transactions query with owners filter + +```erlang +transactions_query_owners_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + {ok, WrittenMsg} = write_test_message(Opts), + ?assertMatch( + {ok, [_]}, + hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) + ), + Query = + <<""" + query($owners: [String!]) { + transactions( + owners: $owners + ) { + edges { + node { + id + tags { + name + value + } + } + } + } + } + """>>, + Res = + dev_query_graphql:test_query( + Node, + Query, + #{ + <<"owners">> => [hb:address()] + }, + Opts + ), + ExpectedID = hb_message:id(WrittenMsg, all, Opts), + ?event({expected_id, ExpectedID}), + ?event({transactions_query_owners_test, Res}), + ?assertMatch( + #{ + <<"data">> := #{ + <<"transactions">> := #{ + <<"edges">> := + [#{ + <<"node">> := + #{ + <<"id">> := ExpectedID, + <<"tags">> := + [#{ <<"name">> := _, <<"value">> := _ }|_] + } + }] + } + } + } when ?IS_ID(ExpectedID), + Res + ). +``` + +### transactions_query_recipients_test + +Test transactions query with recipients filter + +```erlang +transactions_query_recipients_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + Alice = ar_wallet:new(), + ?event({alice, Alice, {explicit, hb_util:human_id(Alice)}}), + AliceAddress = hb_util:human_id(Alice), + {ok, WrittenMsg} = write_test_message_with_recipient(AliceAddress, Opts), + ?assertMatch( + {ok, [_]}, + hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) + ), + Query = + <<""" + query($recipients: [String!]) { + transactions( + recipients: $recipients + ) { + edges { + node { + id + tags { + name + value + } + } + } + } + } + """>>, + Res = + dev_query_graphql:test_query( + Node, + Query, + #{ + <<"recipients">> => [AliceAddress] + }, + Opts + ), + ExpectedID = hb_message:id(WrittenMsg, all, Opts), + ?event({expected_id, ExpectedID}), + ?event({transactions_query_recipients_test, Res}), + ?assertMatch( + #{ + <<"data">> := #{ + <<"transactions">> := #{ + <<"edges">> := + [#{ + <<"node">> := + #{ + <<"id">> := ExpectedID, + <<"tags">> := + [#{ <<"name">> := _, <<"value">> := _ }|_] + } + }] + } + } + } when ?IS_ID(ExpectedID), + Res + ). +``` + +### transactions_query_ids_test + +Test transactions query with ids filter + +```erlang +transactions_query_ids_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + {ok, WrittenMsg} = write_test_message(Opts), + ExpectedID = hb_message:id(WrittenMsg, all, Opts), + ?assertMatch( + {ok, [_]}, + hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) + ), + Query = + <<""" + query($ids: [ID!]) { + transactions( + ids: $ids + ) { + edges { + node { + id + tags { + name + value + } + } + } + } + } + """>>, + Res = + dev_query_graphql:test_query( + Node, + Query, + #{ + <<"ids">> => [ExpectedID] + }, + Opts + ), + ?event({expected_id, ExpectedID}), + ?event({transactions_query_ids_test, Res}), + ?assertMatch( + #{ + <<"data">> := #{ + <<"transactions">> := #{ + <<"edges">> := + [#{ + <<"node">> := + #{ + <<"id">> := ExpectedID, + <<"tags">> := + [#{ <<"name">> := _, <<"value">> := _ }|_] + } + }] + } + } + } when ?IS_ID(ExpectedID), + Res + ). +``` + +### transactions_query_combined_test + +Test transactions query with combined filters + +```erlang +transactions_query_combined_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + {ok, WrittenMsg} = write_test_message(Opts), + ExpectedID = hb_message:id(WrittenMsg, all, Opts), + ?assertMatch( + {ok, [_]}, + hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) + ), + Query = + <<""" + query($owners: [String!], $ids: [ID!]) { + transactions( + owners: $owners, + ids: $ids, + tags: [ + {name: "type", values: ["Message"]} + ] + ) { + edges { + node { + id + tags { + name + value + } + } + } + } + } + """>>, + Res = + dev_query_graphql:test_query( + Node, + Query, + #{ + <<"owners">> => [hb:address()], + <<"ids">> => [ExpectedID] + }, + Opts + ), + ?event({expected_id, ExpectedID}), + ?event({transactions_query_combined_test, Res}), + ?assertMatch( + #{ + <<"data">> := #{ + <<"transactions">> := #{ + <<"edges">> := + [#{ + <<"node">> := + #{ + <<"id">> := ExpectedID, + <<"tags">> := + [#{ <<"name">> := _, <<"value">> := _ }|_] + } + }] + } + } + } when ?IS_ID(ExpectedID), + Res + ). +``` + +### transaction_query_by_id_test + +Test single transaction query by ID + +```erlang +transaction_query_by_id_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + {ok, WrittenMsg} = write_test_message(Opts), + ExpectedID = hb_message:id(WrittenMsg, all, Opts), + ?assertMatch( + {ok, [_]}, + hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) + ), + Query = + <<""" + query($id: ID!) { + transaction(id: $id) { + id + tags { + name + value + } + } + } + """>>, + Res = + dev_query_graphql:test_query( + Node, + Query, + #{ + <<"id">> => ExpectedID + }, + Opts + ), + ?event({expected_id, ExpectedID}), + ?event({transaction_query_by_id_test, Res}), + ?assertMatch( + #{ + <<"data">> := #{ + <<"transaction">> := #{ + <<"id">> := ExpectedID, + <<"tags">> := + [#{ <<"name">> := _, <<"value">> := _ }|_] + } + } + } when ?IS_ID(ExpectedID), + Res + ). +``` + +### transaction_query_full_test + +Test single transaction query with more fields + +```erlang +transaction_query_full_test() -> + Opts = + #{ + priv_wallet => SenderKey = hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + Alice = ar_wallet:new(), + ?event({alice, Alice, {explicit, hb_util:human_id(Alice)}}), + AliceAddress = hb_util:human_id(Alice), + SenderAddress = hb_util:human_id(SenderKey), + SenderPubKey = hb_util:encode(ar_wallet:to_pubkey(SenderKey)), + {ok, WrittenMsg} = write_test_message_with_recipient(AliceAddress, Opts), + ExpectedID = hb_message:id(WrittenMsg, all, Opts), + ?assertMatch( + {ok, [_]}, + hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) + ), + Query = + <<""" + query($id: ID!) { + transaction(id: $id) { + id + anchor + signature + recipient + owner { + address + key + } + tags { + name + value + } + data { + size + type + } + } + } + """>>, + Res = + dev_query_graphql:test_query( + Node, + Query, + #{ + <<"id">> => ExpectedID + }, + Opts + ), + ?event({expected_id, ExpectedID}), + ?event({transaction_query_full_test, Res}), + ?assertMatch( + #{ + <<"data">> := #{ + <<"transaction">> := #{ + <<"id">> := ExpectedID, + <<"recipient">> := AliceAddress, + <<"anchor">> := <<"">>, + <<"owner">> := #{ + <<"address">> := SenderAddress, + <<"key">> := SenderPubKey + }, + <<"data">> := #{ + <<"size">> := <<"9">>, + <<"type">> := <<"text/plain">> + }, + <<"tags">> := + [#{ <<"name">> := _, <<"value">> := _ }|_] + % Note: other fields may be "Not implemented." for now + } + } + } when ?IS_ID(ExpectedID), + Res + ). +``` + +### transaction_query_not_found_test + +Test single transaction query with non-existent ID + +```erlang +transaction_query_not_found_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Res = + dev_query_graphql:test_query( + hb_http_server:start_node(Opts), + <<""" + query($id: ID!) { + transaction(id: $id) { + id + tags { + name + value + } + } + } + """>>, + #{ + <<"id">> => hb_util:encode(crypto:strong_rand_bytes(32)) + }, + Opts + ), + % Should return null for non-existent transaction + ?assertMatch( + #{ + <<"data">> := #{ + <<"transaction">> := null + } + }, + Res + ). +``` + +### transaction_query_with_anchor_test + +Test parsing, storing, and querying a transaction with an anchor. + +```erlang +transaction_query_with_anchor_test() -> + Opts = + #{ + priv_wallet => hb:wallet(), + store => [hb_test_utils:test_store(hb_store_lmdb)] + }, + Node = hb_http_server:start_node(Opts), + {ok, ID} = + hb_cache:write( + hb_message:convert( + ar_bundles:sign_item( + #tx { + anchor = AnchorID = crypto:strong_rand_bytes(32), + data = <<"test-data">> + }, + hb:wallet() + ), + <<"structured@1.0">>, + <<"ans104@1.0">>, + Opts + ), + Opts + ), + EncodedAnchor = hb_util:encode(AnchorID), + Query = + <<""" + query($id: ID!) { + transaction(id: $id) { + data { + size + type + } + anchor + } + } + """>>, + Res = + dev_query_graphql:test_query( + Node, + Query, + #{ + <<"id">> => ID + }, + Opts + ), + ?event({transaction_query_with_anchor_test, Res}), + ?assertMatch( + #{ + <<"data">> := #{ + <<"transaction">> := #{ + <<"anchor">> := EncodedAnchor + } + } + }, + Res +``` + +--- + +*Generated from [dev_query_test_vectors.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_test_vectors.erl)* diff --git a/docs/book/src/dev_relay.erl.md b/docs/book/src/dev_relay.erl.md new file mode 100644 index 000000000..262e1ee65 --- /dev/null +++ b/docs/book/src/dev_relay.erl.md @@ -0,0 +1,301 @@ +# dev_relay + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_relay.erl) + +This module implements the relay device, which is responsible for +relaying messages between nodes and other HTTP(S) endpoints. +It can be called in either `call` or `cast` mode. In `call` mode, it +returns a `{ok, Result}` tuple, where `Result` is the response from the +remote peer to the message sent. In `cast` mode, the invocation returns +immediately, and the message is relayed asynchronously. No response is given +and the device returns `{ok, <<"OK">>}`. +Example usage: +
+    curl /~relay@.1.0/call?method=GET?0.path=https://www.arweave.net/
+
+ +--- + +## Exported Functions + +- `call/3` +- `cast/3` +- `request/3` + +--- + +### call + +This module implements the relay device, which is responsible for +Execute a `call` request using a node's routes. + +```erlang +call(M1, RawM2, Opts) -> + ?event({relay_call, {m1, M1}, {raw_m2, RawM2}}), + {ok, BaseTarget} = hb_message:find_target(M1, RawM2, Opts), + ?event({relay_call, {message_to_relay, BaseTarget}}), + RelayPath = + hb_ao:get_first( + [ + {M1, <<"path">>}, + {{as, <<"message@1.0">>, BaseTarget}, <<"path">>}, + {RawM2, <<"relay-path">>}, + {M1, <<"relay-path">>} + ], + Opts + ), + RelayDevice = + hb_ao:get_first( + [ + {M1, <<"relay-device">>}, + {{as, <<"message@1.0">>, BaseTarget}, <<"relay-device">>}, + {RawM2, <<"relay-device">>} + ], + Opts + ), + RelayPeer = + hb_ao:get_first( + [ + {M1, <<"peer">>}, + {{as, <<"message@1.0">>, BaseTarget}, <<"peer">>}, + {RawM2, <<"peer">>} + ], + Opts + ), + RelayMethod = + hb_ao:get_first( + [ + {M1, <<"method">>}, + {{as, <<"message@1.0">>, BaseTarget}, <<"method">>}, + {RawM2, <<"relay-method">>}, + {M1, <<"relay-method">>}, + {RawM2, <<"method">>} + ], + Opts + ), + RelayBody = + hb_ao:get_first( + [ + {M1, <<"body">>}, + {{as, <<"message@1.0">>, BaseTarget}, <<"body">>}, + {RawM2, <<"relay-body">>}, + {M1, <<"relay-body">>}, + {RawM2, <<"body">>} + ], + Opts + ), + Commit = + hb_ao:get_first( + [ + {{as, <<"message@1.0">>, BaseTarget}, <<"commit-request">>}, + {RawM2, <<"relay-commit-request">>}, + {M1, <<"relay-commit-request">>}, + {RawM2, <<"commit-request">>}, + {M1, <<"commit-request">>} + ], + false, + Opts + ), + TargetMod1 = + if RelayBody == not_found -> BaseTarget; + true -> BaseTarget#{<<"body">> => RelayBody} + end, + TargetMod2 = + TargetMod1#{ + <<"method">> => RelayMethod, + <<"path">> => RelayPath + }, + TargetMod3 = + case RelayDevice of + not_found -> hb_maps:without([<<"device">>], TargetMod2); + _ -> TargetMod2#{<<"device">> => RelayDevice} + end, + TargetMod4 = + case Commit of + true -> + case hb_opts:get(relay_allow_commit_request, false, Opts) of + true -> + ?event(debug_relay, {recommitting, TargetMod3}, Opts), + Committed = hb_message:commit(TargetMod3, Opts), + ?event(debug_relay, {relay_call, {committed, Committed}}, Opts), + true = hb_message:verify(Committed, all), + Committed; + false -> + throw(relay_commit_request_not_allowed) + end; + false -> TargetMod3 + end, + ?event(debug_relay, {relay_call, {without_http_params, TargetMod3}}), + ?event(debug_relay, {relay_call, {with_http_params, TargetMod4}}), + true = hb_message:verify(TargetMod4), + ?event(debug_relay, {relay_call, {verified, true}}), + Client = + case hb_maps:get(<<"http-client">>, BaseTarget, not_found, Opts) of + not_found -> hb_opts:get(relay_http_client, Opts); + RequestedClient -> RequestedClient + end, + % Let `hb_http:request/2' handle finding the peer and dispatching the + % request, unless the peer is explicitly given. +``` + +### cast + +Execute a request in the same way as `call/3`, but asynchronously. Always +Preprocess a request to check if it should be relayed to a different node. + +```erlang +cast(M1, M2, Opts) -> + spawn(fun() -> call(M1, M2, Opts) end), + {ok, <<"OK">>}. +``` + +### request + +Execute a request in the same way as `call/3`, but asynchronously. Always +Preprocess a request to check if it should be relayed to a different node. + +```erlang +request(_Msg1, Msg2, Opts) -> + {ok, + #{ + <<"body">> => + [ + #{ <<"device">> => <<"relay@1.0">> }, + #{ + <<"path">> => <<"call">>, + <<"target">> => <<"body">>, + <<"body">> => + hb_ao:get(<<"request">>, Msg2, Opts#{ hashpath => ignore }) + } + ] + } + }. +``` + +### call_get_test + +```erlang +call_get_test() -> + application:ensure_all_started([hb]), + {ok, #{<<"body">> := Body}} = + hb_ao:resolve( + #{ + <<"device">> => <<"relay@1.0">>, + <<"method">> => <<"GET">>, + <<"path">> => <<"https://www.google.com/">> + }, + <<"call">>, + #{ protocol => http2 } + ), + ?assertEqual(true, byte_size(Body) > 10_000). +``` + +### relay_nearest_test + +```erlang +relay_nearest_test() -> + Peer1 = hb_http_server:start_node(#{ priv_wallet => W1 = ar_wallet:new() }), + Peer2 = hb_http_server:start_node(#{ priv_wallet => W2 = ar_wallet:new() }), + Address1 = hb_util:human_id(ar_wallet:to_address(W1)), + Address2 = hb_util:human_id(ar_wallet:to_address(W2)), + Peers = [Address1, Address2], + Node = + hb_http_server:start_node(Opts = #{ + store => hb_opts:get(store), + priv_wallet => ar_wallet:new(), + routes => [ + #{ + <<"template">> => <<"/.*">>, + <<"strategy">> => <<"Nearest">>, + <<"nodes">> => [ + #{ + <<"prefix">> => Peer1, + <<"wallet">> => Address1 + }, + #{ + <<"prefix">> => Peer2, + <<"wallet">> => Address2 + } + ] + } + ] + }), + {ok, RelayRes} = + hb_http:get( + Node, + <<"/~relay@1.0/call?relay-path=/~meta@1.0/info">>, + Opts#{ http_only_result => false } + ), + ?event( + {relay_res, + {response, RelayRes}, + {signer, hb_message:signers(RelayRes, Opts)}, + {peers, Peers} + } + ), + HasValidSigner = + lists:any( + fun(Peer) -> + lists:member(Peer, hb_message:signers(RelayRes, Opts)) + end, + Peers + ), + ?assert(HasValidSigner). +``` + +### commit_request_test + +Test that a `relay@1.0/call` correctly commits requests as specified. + +```erlang +commit_request_test() -> + Port = 10000 + rand:uniform(10000), + Wallet = ar_wallet:new(), + Executor = + hb_http_server:start_node( + #{ + port => Port, + force_signed_requests => true + } + ), + Node = + hb_http_server:start_node(#{ + priv_wallet => Wallet, + relay_allow_commit_request => true, + routes => + [ + #{ + <<"template">> => <<"/test-key">>, + <<"strategy">> => <<"Nearest">>, + <<"nodes">> => [ + #{ + <<"wallet">> => hb_util:human_id(Wallet), + <<"prefix">> => Executor + } + ] + } + ], + on => #{ + <<"request">> => + #{ + <<"device">> => <<"router@1.0">>, + <<"path">> => <<"preprocess">>, + <<"commit-request">> => true + } + } + }), + {ok, Res} = + hb_http:get( + Node, + #{ + <<"path">> => <<"test-key">>, + <<"test-key">> => <<"value">> + }, + #{} + ), + ?event({res, Res}), +``` + +--- + +*Generated from [dev_relay.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_relay.erl)* diff --git a/docs/book/src/dev_router.erl.md b/docs/book/src/dev_router.erl.md new file mode 100644 index 000000000..5249f8434 --- /dev/null +++ b/docs/book/src/dev_router.erl.md @@ -0,0 +1,1450 @@ +# dev_router + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_router.erl) + +A device that routes outbound messages from the node to their +appropriate network recipients via HTTP. All messages are initially +routed to a single process per node, which then load-balances them +between downstream workers that perform the actual requests. +The routes for the router are defined in the `routes` key of the `Opts`, +as a precidence-ordered list of maps. The first map that matches the +message will be used to determine the route. +Multiple nodes can be specified as viable for a single route, with the +`Choose` key determining how many nodes to choose from the list (defaulting +to 1). The `Strategy` key determines the load distribution strategy, +which can be one of `Random`, `By-Base`, or `Nearest`. The route may also +define additional parallel execution parameters, which are used by the +`hb_http` module to manage control of requests. +The structure of the routes should be as follows: +
+    Node?: The node to route the message to.
+    Nodes?: A list of nodes to route the message to.
+    Strategy?: The load distribution strategy to use.
+    Choose?: The number of nodes to choose from the list.
+    Template?: A message template to match the message against, either as a
+               map or a path regex.
+
+ +--- + +## Exported Functions + +- `info/1` +- `info/3` +- `match/3` +- `preprocess/3` +- `register/3` +- `route/2` +- `route/3` +- `routes/3` + +--- + +### info + +A device that routes outbound messages from the node to their +Exported function for getting device info, controls which functions are + +```erlang +info(_) -> + #{ exports => [info, routes, route, match, register, preprocess] }. +``` + +### info + +HTTP info response providing information about this device + +```erlang +info(_Msg1, _Msg2, _Opts) -> + InfoBody = #{ + <<"description">> => <<"Router device for handling outbound message routing">>, + <<"version">> => <<"1.0">>, + <<"api">> => #{ + <<"info">> => #{ + <<"description">> => <<"Get device info">> + }, + <<"routes">> => #{ + <<"description">> => <<"Get or add routes">>, + <<"method">> => <<"GET or POST">> + }, + <<"route">> => #{ + <<"description">> => <<"Find a route for a message">>, + <<"required_params">> => #{ + <<"route-path">> => <<"Path to route">> + } + }, + <<"match">> => #{ + <<"description">> => <<"Match a message against available routes">> + }, + <<"register">> => #{ + <<"description">> => <<"Register a route with a remote router node">>, + <<"node-message">> => #{ + <<"routes">> => + [ + #{ + <<"registration-peer">> => <<"Location of the router peer">>, + <<"prefix">> => <<"Prefix for the route">>, + <<"price">> => <<"Price for the route">>, + <<"template">> => <<"Template to match the route">> + } + ] + } + }, + <<"preprocess">> => #{ + <<"description">> => <<"Preprocess a request to check if it should be relayed">> + } + } + }, + {ok, InfoBody}. +``` + +### register + +Register function that allows telling the current node to register +Device function that returns all known routes. + +```erlang +register(_M1, M2, Opts) -> + %% Extract all required parameters from options + %% These values will be used to construct the registration message + RouterOpts = hb_opts:get(router_opts, #{}, Opts), + RouterRegMsgs = + case hb_maps:get(<<"offered">>, RouterOpts, #{}, Opts) of + RegList when is_list(RegList) -> RegList; + RegMsg when is_map(RegMsg) -> [RegMsg] + end, + lists:foreach( + fun(RegMsg) -> + RouterNode = + hb_ao:get( + <<"registration-peer">>, + RegMsg, + not_found, + Opts + ), + {ok, SigOpts} = + case hb_ao:get(<<"as">>, M2, not_found, Opts) of + not_found -> {ok, Opts}; + AsID -> hb_opts:as(AsID, Opts) + end, + % Post registration request to the router node + % The message includes our route details and attestation + % for verification + {ok, Res} = + hb_http:post( + RouterNode, + <<"/~router@1.0/routes">>, + hb_message:commit( + #{ + <<"subject">> => <<"self">>, + <<"action">> => <<"register">>, + <<"route">> => RegMsg + }, + SigOpts + ), + Opts + ), + ?event({registered, {msg, M2}, {res, Res}}), + {ok, <<"Route registered.">>} + end, + RouterRegMsgs + ), + {ok, <<"Routes registered.">>}. +``` + +### routes + +Register function that allows telling the current node to register +Device function that returns all known routes. + +```erlang +routes(M1, M2, Opts) -> + ?event({routes_msg, M1, M2}), + Routes = load_routes(Opts), + ?event({routes, Routes}), + case hb_ao:get(<<"method">>, M2, Opts) of + <<"POST">> -> + RouterOpts = hb_opts:get(router_opts, #{}, Opts), + ?event(debug_route_reg, {router_opts, RouterOpts}), + case hb_maps:get(<<"registrar">>, RouterOpts, not_found, Opts) of + not_found -> + % There is no registrar; register if and only if the message + % is signed by an authorized operator. +``` + +### route + +Find the appropriate route for the given message. If we are able to + +```erlang +route(Msg, Opts) -> route(undefined, Msg, Opts). +``` + +### route + +Find the appropriate route for the given message. If we are able to + +```erlang +route(_, Msg, Opts) -> + Routes = load_routes(Opts), + R = match_routes(Msg, Routes, Opts), + ?event({find_route, {msg, Msg}, {routes, Routes}, {res, R}}), + case (R =/= no_matches) andalso hb_ao:get(<<"node">>, R, Opts) of + false -> {error, no_matches}; + Node when is_binary(Node) -> {ok, Node}; + Node when is_map(Node) -> apply_route(Msg, Node, Opts); + not_found -> + ModR = apply_routes(Msg, R, Opts), + case hb_ao:get(<<"strategy">>, R, Opts) of + not_found -> {ok, ModR}; + <<"All">> -> {ok, ModR}; + Strategy -> + ChooseN = hb_ao:get(<<"choose">>, R, 1, Opts), + % Get the first element of the path -- the `base' message + % of the request. +``` + +### load_routes + +Load the current routes for the node. Allows either explicit routes from + +```erlang +load_routes(Opts) -> + RouterOpts = hb_opts:get(router_opts, #{}, Opts), + case hb_maps:get(<<"provider">>, RouterOpts, not_found, Opts) of + not_found -> hb_opts:get(routes, [], Opts); + RoutesProvider -> + ProviderMsgs = hb_singleton:from(RoutesProvider, Opts), + ?event({<<"provider">>, ProviderMsgs}), + case hb_ao:resolve_many(ProviderMsgs, Opts) of + {ok, Routes} -> hb_cache:ensure_all_loaded(Routes, Opts); + {error, Error} -> throw({routes, routes_provider_failed, Error}) + end + end. +``` + +### extract_base + +Extract the base message ID from a request message. Produces a single + +```erlang +extract_base(#{ <<"path">> := Path }, Opts) -> + extract_base(Path, Opts); +``` + +### extract_base + +Extract the base message ID from a request message. Produces a single + +```erlang +extract_base(RawPath, Opts) when is_binary(RawPath) -> + BasePath = hb_path:hd(#{ <<"path">> => RawPath }, Opts), + case ?IS_ID(BasePath) of + true -> BasePath; + false -> + case binary:split(BasePath, [<<"\~">>, <<"?">>, <<"&">>], [global]) of + [BaseMsgID|_] when ?IS_ID(BaseMsgID) -> BaseMsgID; + _ -> hb_crypto:sha256(BasePath) + end + end. +``` + +### apply_routes + +Generate a `uri` key for each node in a route. +Apply a node map's rules for transforming the path of the message. + +```erlang +apply_routes(Msg, R, Opts) -> + Nodes = hb_ao:get(<<"nodes">>, R, Opts), + NodesWithRouteApplied = + lists:map( + fun(N) -> + ?event({apply_route, {msg, Msg}, {node, N}}), + case apply_route(Msg, N, Opts) of + {ok, URI} when is_binary(URI) -> N#{ <<"uri">> => URI }; + {ok, RMsg} -> hb_maps:merge(N, RMsg); + {error, _} -> N + end + end, + hb_util:message_to_ordered_list(Nodes, Opts) + ), + ?event({nodes_after_apply, NodesWithRouteApplied}), + R#{ <<"nodes">> => NodesWithRouteApplied }. +``` + +### apply_route + +Generate a `uri` key for each node in a route. +Apply a node map's rules for transforming the path of the message. + +```erlang +apply_route(Msg, Route, Opts) -> + % LoadedRoute = hb_cache:ensure_all_loaded(Route, Opts), + RouteOpts = hb_maps:get(<<"opts">>, Route, #{}), + {ok, #{ + <<"opts">> => RouteOpts, + <<"uri">> => + hb_util:ok( + do_apply_route( + Msg, + hb_maps:without([<<"opts">>], Route, Opts), + Opts + ) + ) + }}. +``` + +### do_apply_route + +```erlang +do_apply_route(#{ <<"route-path">> := Path }, R, Opts) -> + do_apply_route(#{ <<"path">> => Path }, R, Opts); +``` + +### do_apply_route + +```erlang +do_apply_route(#{ <<"path">> := RawPath }, #{ <<"prefix">> := RawPrefix }, Opts) -> + Path = hb_cache:ensure_loaded(RawPath, Opts), + Prefix = hb_cache:ensure_loaded(RawPrefix, Opts), + {ok, <>}; +``` + +### do_apply_route + +```erlang +do_apply_route(#{ <<"path">> := RawPath }, #{ <<"suffix">> := RawSuffix }, Opts) -> + Path = hb_cache:ensure_loaded(RawPath, Opts), + Suffix = hb_cache:ensure_loaded(RawSuffix, Opts), + {ok, <>}; +``` + +### do_apply_route + +```erlang +do_apply_route( + #{ <<"path">> := RawPath }, + #{ <<"match">> := RawMatch, <<"with">> := RawWith }, + Opts) -> + Path = hb_cache:ensure_loaded(RawPath, Opts), + Match = hb_cache:ensure_loaded(RawMatch, Opts), + With = hb_cache:ensure_loaded(RawWith, Opts), + % Apply the regex to the path and replace the first occurrence. +``` + +### match + +Find the first matching template in a list of known routes. Allows the + +```erlang +match(Base, Req, Opts) -> + ?event(debug_preprocess, + {matching_routes, + {base, Base}, + {req, Req} + } + ), + TargetPath = hb_util:find_target_path(Req, Opts), + Match = + match_routes( + Req#{ <<"path">> => TargetPath }, + hb_ao:get(<<"routes">>, {as, <<"message@1.0">>, Base}, [], Opts), + Opts + ), + case Match of + no_matches -> {error, no_matching_route}; + _ -> {ok, Match} + end. +``` + +### match_routes + +```erlang +match_routes(ToMatch, Routes, Opts) -> + match_routes( + hb_cache:ensure_all_loaded(ToMatch, Opts), + hb_cache:ensure_all_loaded(Routes, Opts), + hb_ao:keys(hb_ao:normalize_keys(Routes, Opts)), + Opts + ). +``` + +### match_routes + +```erlang +match_routes(#{ <<"path">> := Explicit = <<"http://", _/binary>> }, _, _, _) -> + % If the route is an explicit HTTP URL, we can match it directly. +``` + +### match_routes + +```erlang +match_routes(#{ <<"path">> := Explicit = <<"https://", _/binary>> }, _, _, _) -> + #{ <<"node">> => Explicit, <<"reference">> => <<"explicit">> }; +``` + +### match_routes + +```erlang +match_routes(_, _, [], _) -> no_matches; +``` + +### match_routes + +```erlang +match_routes(ToMatch, Routes, [XKey|Keys], Opts) -> + XM = hb_ao:get(XKey, Routes, Opts), + Template = + hb_ao:get( + <<"template">>, + XM, + #{}, + Opts#{ hashpath => ignore } + ), + case hb_util:template_matches(ToMatch, Template, Opts) of + true -> XM#{ <<"reference">> => hb_path:to_binary([<<"routes">>, XKey]) }; + false -> match_routes(ToMatch, Routes, Keys, Opts) + end. +``` + +### choose + +Implements the load distribution strategies if given a cluster. + +```erlang +choose(0, _, _, _, _) -> []; +``` + +### choose + +Implements the load distribution strategies if given a cluster. + +```erlang +choose(N, <<"Random">>, _, Nodes, _Opts) -> + Node = lists:nth(rand:uniform(length(Nodes)), Nodes), + [Node | choose(N - 1, <<"Random">>, nop, lists:delete(Node, Nodes), _Opts)]; +``` + +### choose + +Implements the load distribution strategies if given a cluster. + +```erlang +choose(N, <<"By-Weight">>, _, Nodes, Opts) -> + ?event({nodes, Nodes}), + NodesWithWeight = + [ + { Node, hb_util:float(hb_ao:get(<<"weight">>, Node, Opts)) } + || + Node <- Nodes + ], + Node = hb_util:weighted_random(NodesWithWeight), + [ + Node + | + choose(N - 1, <<"By-Weight">>, nop, lists:delete(Node, Nodes), Opts) + ]; +``` + +### choose + +Implements the load distribution strategies if given a cluster. + +```erlang +choose(N, <<"By-Base">>, Hashpath, Nodes, Opts) when is_binary(Hashpath) -> + choose(N, <<"By-Base">>, binary_to_bignum(Hashpath), Nodes, Opts); +``` + +### choose + +Implements the load distribution strategies if given a cluster. + +```erlang +choose(N, <<"By-Base">>, HashInt, Nodes, Opts) -> + Node = lists:nth((HashInt rem length(Nodes)) + 1, Nodes), + [ + Node + | + choose( + N - 1, + <<"By-Base">>, + HashInt, + lists:delete(Node, Nodes), + Opts + ) + ]; +``` + +### choose + +Implements the load distribution strategies if given a cluster. + +```erlang +choose(N, <<"Nearest">>, HashPath, Nodes, Opts) -> + BareHashPath = hb_util:native_id(HashPath), + NodesWithDistances = + lists:map( + fun(Node) -> + Wallet = hb_ao:get(<<"wallet">>, Node, Opts), + DistanceScore = + field_distance( + hb_util:native_id(Wallet), + BareHashPath + ), + {Node, DistanceScore} + end, + Nodes + ), + lists:reverse( + element(1, + lists:foldl( + fun(_, {Current, Remaining}) -> + Res = {Lowest, _} = lowest_distance(Remaining), + {[Lowest|Current], lists:delete(Res, Remaining)} + end, + {[], NodesWithDistances}, + lists:seq(1, N) + ) + ) + ). +``` + +### field_distance + +Calculate the minimum distance between two numbers + +```erlang +field_distance(A, B) when is_binary(A) -> + field_distance(binary_to_bignum(A), B); +``` + +### field_distance + +Calculate the minimum distance between two numbers + +```erlang +field_distance(A, B) when is_binary(B) -> + field_distance(A, binary_to_bignum(B)); +``` + +### field_distance + +Calculate the minimum distance between two numbers + +```erlang +field_distance(A, B) -> + AbsDiff = abs(A - B), + min(AbsDiff, (1 bsl 256) - AbsDiff). +``` + +### lowest_distance + +Find the node with the lowest distance to the given hashpath. + +```erlang +lowest_distance(Nodes) -> lowest_distance(Nodes, {undefined, infinity}). +``` + +### lowest_distance + +Find the node with the lowest distance to the given hashpath. + +```erlang +lowest_distance([], X) -> X; +``` + +### lowest_distance + +Find the node with the lowest distance to the given hashpath. + +```erlang +lowest_distance([{Node, Distance}|Nodes], {CurrentNode, CurrentDistance}) -> + case Distance of + infinity -> lowest_distance(Nodes, {Node, Distance}); + _ when Distance < CurrentDistance -> + lowest_distance(Nodes, {Node, Distance}); + _ -> lowest_distance(Nodes, {CurrentNode, CurrentDistance}) + end. +``` + +### binary_to_bignum + +Cast a human-readable or native-encoded ID to a big integer. + +```erlang +binary_to_bignum(Bin) when ?IS_ID(Bin) -> + << Num:256/unsigned-integer >> = hb_util:native_id(Bin), + Num. +``` + +### preprocess + +Preprocess a request to check if it should be relayed to a different node. + +```erlang +preprocess(Msg1, Msg2, Opts) -> + Req = hb_ao:get(<<"request">>, Msg2, Opts#{ hashpath => ignore }), + ?event(debug_preprocess, {called_preprocess,Req}), + TemplateRoutes = load_routes(Opts), + ?event(debug_preprocess, {template_routes, TemplateRoutes}), + Res = hb_http:message_to_request(Req, Opts), + ?event(debug_preprocess, {match, Res}), + case Res of + {error, _} -> + ?event(debug_preprocess, preprocessor_did_not_match), + case hb_opts:get(router_preprocess_default, <<"local">>, Opts) of + <<"local">> -> + ?event(debug_preprocess, executing_locally), + {ok, #{ + <<"body">> => + hb_ao:get(<<"body">>, Msg2, Opts#{ hashpath => ignore }) + }}; + <<"error">> -> + ?event(debug_preprocess, preprocessor_returning_error), + {ok, #{ + <<"body">> => + [#{ + <<"status">> => 404, + <<"message">> => + <<"No matching template found in the given routes.">> + }] + }} + end; + {ok, _Method, Node, _Path, _MsgWithoutMeta, _ReqOpts} -> + ?event(debug_preprocess, {matched_route, {explicit, Res}}), + CommitRequest = + hb_util:atom( + hb_ao:get_first( + [ + {Msg1, <<"commit-request">>} + ], + false, + Opts + ) + ), + MaybeCommit = + case CommitRequest of + true -> #{ <<"commit-request">> => true }; + false -> #{} + end, + % Construct a request to `relay@1.0/call' which will proxy a request + % to `apply@1.0/body' with the original request body as the argument. +``` + +### test_provider_test + +```erlang +test_provider_test() -> + Node = + hb_http_server:start_node(Opts = + #{ + router_opts => #{ + <<"provider">> => #{ + <<"path">> => <<"/test-key/routes">>, + <<"test-key">> => #{ + <<"routes">> => [ + #{ + <<"template">> => <<"*">>, + <<"node">> => <<"testnode">> + } + ] + } + } + }, + store => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + } + } + ), + ?assertEqual( + {ok, <<"testnode">>}, + hb_http:get(Node, <<"/~router@1.0/routes/1/node">>, Opts) + ). +``` + +### dynamic_provider_test + +```erlang +dynamic_provider_test() -> + {ok, Script} = file:read_file("test/test.lua"), + Node = hb_http_server:start_node(#{ + router_opts => #{ + <<"provider">> => #{ + <<"device">> => <<"lua@5.3a">>, + <<"path">> => <<"provider">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Script + }, + <<"node">> => <<"test-dynamic-node">> + } + }, + priv_wallet => ar_wallet:new() + }), + ?assertEqual( + {ok, <<"test-dynamic-node">>}, + hb_http:get(Node, <<"/~router@1.0/routes/1/node">>, #{}) + ). +``` + +### local_process_provider_test_ + +```erlang +local_process_provider_test_() -> + {timeout, 30, fun local_process_provider/0}. +``` + +### local_process_provider + +```erlang +local_process_provider() -> + {ok, Script} = file:read_file("test/test.lua"), + Node = hb_http_server:start_node(#{ + priv_wallet => ar_wallet:new(), + router_opts => #{ + <<"provider">> => #{ + <<"path">> => <<"/router~node-process@1.0/now/known-routes">> + } + }, + node_processes => #{ + <<"router">> => #{ + <<"device">> => <<"process@1.0">>, + <<"execution-device">> => <<"lua@5.3a">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"body">> => Script + }, + <<"node">> => <<"router-node">>, + <<"function">> => <<"compute_routes">> + } + } + }), + ?assertEqual( + {ok, <<"test1">>}, + hb_http:get(Node, <<"/~router@1.0/routes/1/template">>, #{}) + ), + % Query the route 10 times with the same path. This should yield 2 different + % results, as the route provider should choose 1 node of a set of 2 at random. +``` + +### local_dynamic_router_test_ + +Example of a Lua module being used as the `<<"provider">>` for a + +```erlang +local_dynamic_router_test_() -> + {timeout, 60, fun local_dynamic_router/0}. +``` + +### local_dynamic_router + +```erlang +local_dynamic_router() -> + BenchRoutes = 50, + TestNodes = 5, + {ok, Module} = file:read_file(<<"scripts/dynamic-router.lua">>), + Node = hb_http_server:start_node(Opts = #{ + store => hb_test_utils:test_store(), + priv_wallet => ar_wallet:new(), + router_opts => #{ + <<"registrar">> => #{ + <<"device">> => <<"router@1.0">>, + <<"path">> => <<"/router1~node-process@1.0/schedule">> + }, + <<"provider">> => #{ + <<"path">> => + RouteProvider = + <<"/router1~node-process@1.0/compute/routes~message@1.0">> + } + }, + node_processes => #{ + <<"router1">> => #{ + <<"device">> => <<"process@1.0">>, + <<"execution-device">> => <<"lua@5.3a">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"name">> => <<"dynamic-router">>, + <<"body">> => Module + }, + % Set module-specific factors for the test + <<"pricing-weight">> => 9, + <<"performance-weight">> => 1, + <<"score-preference">> => 4 + } + } + }), + Store = hb_opts:get(store, no_store, Opts), + ?event(debug_dynrouter, {store, Store}), + % Register workers with the dynamic router with varied prices. +``` + +### dynamic_router_pricing_test_ + +Test that verifies dynamic router functionality and template-based pricing. + +```erlang +dynamic_router_pricing_test_() -> + {timeout, 30, fun dynamic_router_pricing/0}. +``` + +### dynamic_router_pricing + +```erlang +dynamic_router_pricing() -> + {ok, Module} = file:read_file(<<"scripts/dynamic-router.lua">>), + {ok, ClientScript} = file:read_file("scripts/hyper-token-p4-client.lua"), + {ok, TokenScript} = file:read_file("scripts/hyper-token.lua"), + {ok, ProcessScript} = file:read_file("scripts/hyper-token-p4.lua"), + ExecWallet = hb:wallet(<<"test/admissible-report-wallet.json">>), + ProxyWallet = ar_wallet:new(), + ExecNodeAddr = hb_util:human_id(ar_wallet:to_address(ExecWallet)), + Processor = + #{ + <<"device">> => <<"p4@1.0">>, + <<"ledger-device">> => <<"lua@5.3a">>, + <<"pricing-device">> => <<"simple-pay@1.0">>, + <<"ledger-path">> => <<"/ledger2~node-process@1.0">>, + <<"module">> => #{ + <<"content-type">> => <<"text/x-lua">>, + <<"name">> => <<"scripts/hyper-token-p4-client.lua">>, + <<"body">> => ClientScript + } + }, + ExecNode = + hb_http_server:start_node( + ExecOpts = #{ + priv_wallet => ExecWallet, + port => 10009, + store => hb_test_utils:test_store(), + node_processes => #{ + <<"ledger2">> => #{ + <<"device">> => <<"process@1.0">>, + <<"execution-device">> => <<"lua@5.3a">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"authority-match">> => 1, + <<"admin">> => ExecNodeAddr, + <<"token">> => + <<"iVplXcMZwiu5mn0EZxY-PxAkz_A9KOU0cmRE0rwej3E">>, + <<"module">> => [ + #{ + <<"content-type">> => <<"text/x-lua">>, + <<"name">> => <<"scripts/hyper-token.lua">>, + <<"body">> => TokenScript + }, + #{ + <<"content-type">> => <<"text/x-lua">>, + <<"name">> => <<"scripts/hyper-token-p4.lua">>, + <<"body">> => ProcessScript + } + ], + <<"authority">> => ExecNodeAddr + } + }, + p4_recipient => ExecNodeAddr, + p4_non_chargable_routes => [ + #{ <<"template">> => <<"/*~node-process@1.0/*">> }, + #{ <<"template">> => <<"/*~router@1.0/*">> } + ], + on => #{ + <<"request">> => Processor, + <<"response">> => Processor + }, + node_process_spawn_codec => <<"ans104@1.0">>, + router_opts => #{ + <<"offered">> => [ + #{ + <<"registration-peer">> => <<"http://localhost:10010">>, + <<"template">> => <<"/c">>, + <<"prefix">> => <<"http://localhost:10009">>, + <<"price">> => 0 + }, + #{ + <<"registration-peer">> => <<"http://localhost:10010">>, + <<"template">> => <<"/b">>, + <<"prefix">> => <<"http://localhost:10009">>, + <<"price">> => 1 + } + ] + } + } + ), + RouterNode = hb_http_server:start_node(#{ + port => 10010, + store => hb_test_utils:test_store(), + priv_wallet => ProxyWallet, + on => + #{ + <<"request">> => #{ + <<"device">> => <<"router@1.0">>, + <<"path">> => <<"preprocess">>, + <<"commit-request">> => true + } + }, + router_opts => #{ + <<"provider">> => #{ + <<"path">> => + <<"/router2~node-process@1.0/compute/routes~message@1.0">> + }, + <<"registrar">> => #{ + <<"path">> => <<"/router2~node-process@1.0">> + }, + <<"registrar-path">> => <<"schedule">> + }, + relay_allow_commit_request => true, + node_processes => #{ + <<"router2">> => #{ + <<"type">> => <<"Process">>, + <<"device">> => <<"process@1.0">>, + <<"execution-device">> => <<"lua@5.3a">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"module">> => <<"dynamic-router">>, + <<"body">> => Module + }, + % Set module-specific factors for the test + <<"pricing-weight">> => 9, + <<"performance-weight">> => 1, + <<"score-preference">> => 4, + <<"is-admissible">> => #{ + <<"path">> => <<"default">>, + <<"default">> => <<"false">> + }, + <<"trusted-peer">> => ExecNodeAddr + } + } + }), + ?event( + debug_load_routes, + {node_message, hb_http:get(RouterNode, <<"/~meta@1.0/info">>, #{})} + ), + % Register workers with the dynamic router with varied prices. +``` + +### dynamic_router_test_ + +Example of a Lua module being used as the `<<"provider">>` for a + +```erlang +dynamic_router_test_() -> + {timeout, 30, fun dynamic_router/0}. +``` + +### dynamic_router + +```erlang +dynamic_router() -> + {ok, Module} = file:read_file(<<"scripts/dynamic-router.lua">>), + ExecWallet = hb:wallet(<<"test/admissible-report-wallet.json">>), + ProxyWallet = ar_wallet:new(), + ExecNode = + hb_http_server:start_node( + ExecOpts = #{ priv_wallet => ExecWallet, store => hb_test_utils:test_store() } + ), + Node = hb_http_server:start_node(ProxyOpts = #{ + snp_trusted => [ + #{ + <<"vcpus">> => 32, + <<"vcpu_type">> => 5, + <<"vmm_type">> => 1, + <<"guest_features">> => 1, + <<"firmware">> => + <<"b8c5d4082d5738db6b0fb0294174992738645df70c44cdecf7fad3a62244b788e7e408c582ee48a74b289f3acec78510">>, + <<"kernel">> => + <<"69d0cd7d13858e4fcef6bc7797aebd258730f215bc5642c4ad8e4b893cc67576">>, + <<"initrd">> => + <<"544045560322dbcd2c454bdc50f35edf0147829ec440e6cb487b4a1503f923c1">>, + <<"append">> => + <<"95a34faced5e487991f9cc2253a41cbd26b708bf00328f98dddbbf6b3ea2892e">> + } + ], + store => hb_test_utils:test_store(), + priv_wallet => ProxyWallet, + on => + #{ + <<"request">> => #{ + <<"device">> => <<"router@1.0">>, + <<"path">> => <<"preprocess">> + } + }, + router_opts => #{ + <<"provider">> => #{ + <<"path">> => <<"/router~node-process@1.0/compute/routes~message@1.0">> + } + }, + node_processes => #{ + <<"router">> => #{ + <<"type">> => <<"Process">>, + <<"device">> => <<"process@1.0">>, + <<"execution-device">> => <<"lua@5.3a">>, + <<"scheduler-device">> => <<"scheduler@1.0">>, + <<"module">> => #{ + <<"content-type">> => <<"application/lua">>, + <<"module">> => <<"dynamic-router">>, + <<"body">> => Module + }, + % Set module-specific factors for the test + <<"pricing-weight">> => 9, + <<"performance-weight">> => 1, + <<"score-preference">> => 4, + <<"is-admissible">> => #{ + <<"device">> => <<"snp@1.0">>, + <<"path">> => <<"verify">> + } + } + } + }), % mergeRight this takes our defined Opts and merges them into the + % node opts configs. +``` + +### dynamic_routing_by_performance_test_ + +Demonstrates routing tables being dynamically created and adjusted + +```erlang +dynamic_routing_by_performance_test_() -> + {timeout, 60, fun dynamic_routing_by_performance/0}. +``` + +### dynamic_routing_by_performance + +```erlang +dynamic_routing_by_performance() -> + % Setup test parameters + TestNodes = 4, + BenchRoutes = 16, + TestPath = <<"/worker">>, + % Start the main node for the test, loading the `dynamic-router' script and + % the http_monitor to generate performance messages. +``` + +### weighted_random_strategy_test + +```erlang +weighted_random_strategy_test() -> + Nodes = + [ + #{ <<"host">> => <<"1">>, <<"weight">> => 1 }, + #{ <<"host">> => <<"2">>, <<"weight">> => 99 } + ], + SimRes = simulate(1000, 1, Nodes, <<"By-Weight">>), + [HitsOnFirstHost, _] = simulation_distribution(SimRes, Nodes), + ProportionOfFirstHost = HitsOnFirstHost / 1000, + ?event(debug_weighted_random, {proportion_of_first_host, ProportionOfFirstHost}), + ?assert(ProportionOfFirstHost < 0.05), + ?assert(ProportionOfFirstHost >= 0.0001). +``` + +### strategy_suite_test_ + +```erlang +strategy_suite_test_() -> + lists:map( + fun(Strategy) -> + {foreach, + fun() -> ok end, + fun(_) -> ok end, + [ + { + binary_to_list(Strategy) ++ ": " ++ Desc, + fun() -> Test(Strategy) end + } + || + {Desc, Test} <- [ + {"unique", fun unique_test/1}, + {"choose 1", fun choose_1_test/1}, + {"choose n", fun choose_n_test/1} + ] + ] + } + end, + [<<"Random">>, <<"By-Base">>, <<"Nearest">>] + ). +``` + +### by_base_determinism_test + +Ensure that `By-Base` always chooses the same node for the same + +```erlang +by_base_determinism_test() -> + FirstN = 5, + Nodes = generate_nodes(5), + HashPaths = generate_hashpaths(100), + Simulation = simulate(HashPaths, FirstN, Nodes, <<"By-Base">>), + Simulation2 = simulate(HashPaths, FirstN, Nodes, <<"By-Base">>), + ?assertEqual(Simulation, Simulation2). +``` + +### unique_test + +```erlang +unique_test(Strategy) -> + TestSize = 1, + FirstN = 5, + Nodes = generate_nodes(5), + Simulation = simulate(TestSize, FirstN, Nodes, Strategy), + unique_nodes(Simulation). +``` + +### choose_1_test + +```erlang +choose_1_test(Strategy) -> + TestSize = 1500, + Nodes = generate_nodes(20), + Simulation = simulate(TestSize, 1, Nodes, Strategy), + within_norms(Simulation, Nodes, TestSize). +``` + +### choose_n_test + +```erlang +choose_n_test(Strategy) -> + TestSize = 1500, + FirstN = 5, + Nodes = generate_nodes(20), + Simulation = simulate(TestSize, FirstN, Nodes, Strategy), + within_norms(Simulation, Nodes, TestSize * 5), + unique_nodes(Simulation). +``` + +### unique_nodes + +```erlang +unique_nodes(Simulation) -> + lists:foreach( + fun(SelectedNodes) -> + lists:foreach( + fun(Node) -> + ?assertEqual(1, hb_util:count(Node, SelectedNodes)) + end, + SelectedNodes + ) + end, + Simulation + ). +``` + +### route_template_message_matches_test + +```erlang +route_template_message_matches_test() -> + Routes = [ + #{ + <<"template">> => #{ <<"other-key">> => <<"other-value">> }, + <<"node">> => <<"incorrect">> + }, + #{ + <<"template">> => #{ <<"special-key">> => <<"special-value">> }, + <<"node">> => <<"correct">> + } + ], + ?assertEqual( + {ok, <<"correct">>}, + route( + #{ <<"path">> => <<"/">>, <<"special-key">> => <<"special-value">> }, + #{ routes => Routes } + ) + ), + ?assertEqual( + {error, no_matches}, + route( + #{ <<"path">> => <<"/">>, <<"special-key">> => <<"special-value2">> }, + #{ routes => Routes } + ) + ), + ?assertEqual( + {ok, <<"fallback">>}, + route( + #{ <<"path">> => <<"/">> }, + #{ routes => Routes ++ [#{ <<"node">> => <<"fallback">> }] } + ) + ). +``` + +### route_regex_matches_test + +```erlang +route_regex_matches_test() -> + Routes = [ + #{ + <<"template">> => <<"/.*/compute">>, + <<"node">> => <<"incorrect">> + }, + #{ + <<"template">> => <<"/.*/schedule">>, + <<"node">> => <<"correct">> + } + ], + ?assertEqual( + {ok, <<"correct">>}, + route(#{ <<"path">> => <<"/abc/schedule">> }, #{ routes => Routes }) + ), + ?assertEqual( + {ok, <<"correct">>}, + route(#{ <<"path">> => <<"/a/b/c/schedule">> }, #{ routes => Routes }) + ), + ?assertEqual( + {error, no_matches}, + route(#{ <<"path">> => <<"/a/b/c/bad-key">> }, #{ routes => Routes }) + ). +``` + +### explicit_route_test + +```erlang +explicit_route_test() -> + Routes = [ + #{ + <<"template">> => <<"*">>, + <<"node">> => <<"unimportant">> + } + ], + ?assertEqual( + {ok, <<"https://google.com">>}, + route( + #{ <<"path">> => <<"https://google.com">> }, + #{ routes => Routes } + ) + ), + ?assertEqual( + {ok, <<"http://google.com">>}, + route( + #{ <<"path">> => <<"http://google.com">> }, + #{ routes => Routes } + ) + ), + % Test that `route-path' can also be used to specify the path, via an AO + % call. +``` + +### device_call_from_singleton_test + +```erlang +device_call_from_singleton_test() -> + % Try with a real-world example, taken from a GET request to the router. +``` + +### get_routes_test + +```erlang +get_routes_test() -> + Node = hb_http_server:start_node( + #{ + force_signed => false, + routes => [ + #{ + <<"template">> => <<"*">>, + <<"node">> => <<"our_node">>, + <<"priority">> => 10 + } + ] + } + ), + Res = hb_http:get(Node, <<"/~router@1.0/routes/1/node">>, #{}), + ?event({get_routes_test, Res}), + {ok, Recvd} = Res, + ?assertMatch(<<"our_node">>, Recvd). +``` + +### add_route_test + +Test that the `preprocess/3` function re-routes a request to remote + +```erlang +add_route_test() -> + Owner = ar_wallet:new(), + Node = hb_http_server:start_node( + #{ + force_signed => false, + routes => [ + #{ + <<"template">> => <<"/some/path">>, + <<"node">> => <<"old">>, + <<"priority">> => 10 + } + ], + operator => hb_util:encode(ar_wallet:to_address(Owner)) + } + ), + Res = + hb_http:post( + Node, + hb_message:commit( + #{ + <<"path">> => <<"/~router@1.0/routes">>, + <<"template">> => <<"/some/new/path">>, + <<"node">> => <<"new">>, + <<"priority">> => 15 + }, + Owner + ), + #{} + ), + ?event({post_res, Res}), + ?assertMatch({ok, <<"Route added.">>}, Res), + GetRes = hb_http:get(Node, <<"/~router@1.0/routes/2/node">>, #{}), + ?event({get_res, GetRes}), + {ok, Recvd} = GetRes, + ?assertMatch(<<"new">>, Recvd). +``` + +### request_hook_reroute_to_nearest_test + +Test that the `preprocess/3` function re-routes a request to remote + +```erlang +request_hook_reroute_to_nearest_test() -> + Peer1 = hb_http_server:start_node(#{ priv_wallet => W1 = ar_wallet:new() }), + Peer2 = hb_http_server:start_node(#{ priv_wallet => W2 = ar_wallet:new() }), + Address1 = hb_util:human_id(ar_wallet:to_address(W1)), + Address2 = hb_util:human_id(ar_wallet:to_address(W2)), + Peers = [Address1, Address2], + Node = + hb_http_server:start_node(Opts = #{ + priv_wallet => ar_wallet:new(), + routes => + [ + #{ + <<"template">> => <<"/.*/.*/.*">>, + <<"strategy">> => <<"Nearest">>, + <<"nodes">> => + lists:map( + fun({Address, Node}) -> + #{ + <<"prefix">> => Node, + <<"wallet">> => Address + } + end, + [ + {Address1, Peer1}, + {Address2, Peer2} + ] + ) + } + ], + on => #{ <<"request">> => #{ <<"device">> => <<"relay@1.0">> } } + }), + Res = + lists:map( + fun(_) -> + hb_util:ok( + hb_http:get( + Node, + <<"/~meta@1.0/info/address">>, + Opts#{ http_only_result => true } + ) + ) + end, + lists:seq(1, 3) + ), + ?event(debug_test, + {res, { + {response, Res}, + {signers, hb_message:signers(Res, Opts)} + }} + ), + HasValidSigner = lists:any( + fun(Peer) -> + lists:member(Peer, Res) + end, + Peers + ), + ?assert(HasValidSigner). +``` + +### generate_nodes + +```erlang +generate_nodes(N) -> + [ + #{ + <<"host">> => + <<"http://localhost:", (integer_to_binary(Port))/binary>>, + <<"wallet">> => hb_util:encode(crypto:strong_rand_bytes(32)) + } + || + Port <- lists:seq(1, N) + ]. +``` + +### generate_hashpaths + +```erlang +generate_hashpaths(Runs) -> + [ + hb_util:encode(crypto:strong_rand_bytes(32)) + || + _ <- lists:seq(1, Runs) + ]. +``` + +### simulate + +```erlang +simulate(Runs, ChooseN, Nodes, Strategy) when is_integer(Runs) -> + simulate( + generate_hashpaths(Runs), + ChooseN, + Nodes, + Strategy + ); +``` + +### simulate + +```erlang +simulate(HashPaths, ChooseN, Nodes, Strategy) -> + [ + choose(ChooseN, Strategy, HashPath, Nodes, #{}) + || + HashPath <- HashPaths + ]. +``` + +### simulation_occurences + +```erlang +simulation_occurences(SimRes, Nodes) -> + lists:foldl( + fun(NearestNodes, Acc) -> + lists:foldl( + fun(Node, Acc2) -> + Acc2#{ Node => hb_maps:get(Node, Acc2, 0, #{}) + 1 } + end, + Acc, + NearestNodes + ) + end, + #{ Node => 0 || Node <- Nodes }, + SimRes + ). +``` + +### simulation_distribution + +```erlang +simulation_distribution(SimRes, Nodes) -> + hb_maps:values(simulation_occurences(SimRes, Nodes), #{}). +``` + +### within_norms + +```erlang +within_norms(SimRes, Nodes, TestSize) -> + Distribution = simulation_distribution(SimRes, Nodes), + % Check that the mean is `TestSize/length(Nodes)' + Mean = hb_util:mean(Distribution), + ?assert(Mean == (TestSize / length(Nodes))), + % Check that the highest count is not more than 3 standard deviations + % away from the mean. +``` + +--- + +*Generated from [dev_router.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_router.erl)* diff --git a/docs/book/src/dev_scheduler.erl.md b/docs/book/src/dev_scheduler.erl.md new file mode 100644 index 000000000..67bac346c --- /dev/null +++ b/docs/book/src/dev_scheduler.erl.md @@ -0,0 +1,1859 @@ +# dev_scheduler + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler.erl) + +A simple scheduler scheme for AO. +This device expects a message of the form: + Process: `#{ id, Scheduler: #{ Authority } }` +
+It exposes the following keys for scheduling:
+    `#{ method: GET, path: <<"/info">> }` ->
+        Returns information about the scheduler.
+    `#{ method: GET, path: <<"/slot">> }` -> `slot(Msg1, Msg2, Opts)`
+        Returns the current slot for a process.
+    `#{ method: GET, path: <<"/schedule">> }` -> `get_schedule(Msg1, Msg2, Opts)`
+        Returns the schedule for a process in a cursor-traversable format.
+   ` #{ method: POST, path: <<"/schedule">> }` -> `post_schedule(Msg1, Msg2, Opts)`
+        Schedules a new message for a process, or starts a new scheduler
+        for the given message.
+
+ +--- + +## Exported Functions + +- `checkpoint/1` +- `info/0` +- `location/3` +- `next/3` +- `parse_schedulers/1` +- `router/4` +- `schedule/3` +- `slot/3` +- `start/0` +- `status/3` +- `test_process/0` + +--- + +### start + +A simple scheduler scheme for AO. +Helper to ensure that the environment is started. + +```erlang +start() -> + % We need the rocksdb backend to run for hb_cache module to work + application:ensure_all_started(hb), + <> + = crypto:strong_rand_bytes(12), + rand:seed(exsplus, {I1, I2, I3}), + ok. +``` + +### info + +This device uses a default_handler to route requests to the correct + +```erlang +info() -> + #{ + exports => + [ + location, + status, + next, + schedule, + slot, + init, + checkpoint + ], + excludes => [set, keys], + default => fun router/4 + }. +``` + +### parse_schedulers + +General utility functions that are available to other modules. + +```erlang +parse_schedulers(SchedLoc) when is_list(SchedLoc) -> SchedLoc; +``` + +### parse_schedulers + +General utility functions that are available to other modules. + +```erlang +parse_schedulers(SchedLoc) when is_binary(SchedLoc) -> + binary:split( + binary:replace(SchedLoc, <<"\"">>, <<"">>, [global]), + <<",">>, + [global, trim_all] + ). +``` + +### router + +The default handler for the scheduler device. + +```erlang +router(_, Msg1, Msg2, Opts) -> + ?event({scheduler_router_called, {msg2, Msg2}, {opts, Opts}}), + schedule(Msg1, Msg2, Opts). +``` + +### next + +Load the schedule for a process into the cache, then return the next + +```erlang +next(Msg1, Msg2, Opts) -> + ?event(debug_next, {scheduler_next_called, {msg1, Msg1}, {msg2, Msg2}}), + ?event(next, started_next), + ?event(next_profiling, started_next), + Schedule = message_cached_assignments(Msg1, Opts), + LastProcessed = + hb_util:int( + hb_ao:get( + <<"at-slot">>, + Msg1, + Opts#{ hashpath => ignore } + ) + ), + ?event(next_profiling, got_last_processed), + ?event(debug_next, {in_message_cache, {schedule, Schedule}}), + ?event(next, {last_processed, LastProcessed, {message_cache, length(Schedule)}}), + % Get the assignments from the message cache, local cache, or fetch from + % the SU. Returns an ordered list of assignments. +``` + +### validate_next_slot + +Validate the `next` slot generated by `find_next_assignment`. + +```erlang +validate_next_slot(Msg1, [NextAssignment|Assignments], Lookahead, Last, Opts) -> + % Paranoia: Get the slot of the next assignment, to ensure that it is the + % last processed slot + 1. +``` + +### find_next_assignment + +Get the assignments for a process from the message cache, local cache, + +```erlang +find_next_assignment(_Msg1, _Msg2, Schedule = [_Next|_], _LastSlot, _Opts) -> + {ok, Schedule, undefined}; +``` + +### find_next_assignment + +Get the assignments for a process from the message cache, local cache, + +```erlang +find_next_assignment(Msg1, Msg2, _Schedule, LastSlot, Opts) -> + ProcID = dev_process:process_id(Msg1, Msg2, Opts), + LocalCacheRes = + case hb_util:atom(hb_opts:get(scheduler_ignore_local_cache, false, Opts)) of + true -> not_found; + false -> + check_lookahead_and_local_cache(Msg1, ProcID, LastSlot + 1, Opts) + end, + case LocalCacheRes of + {ok, Worker, Assignment} -> + ?event(next_debug, + {in_cache, + {slot, LastSlot + 1}, + {assignment, Assignment} + } + ), + ?event(next_profiling, read_assignment), + {ok, [Assignment], Worker}; + not_found -> + {ok, RecvdAssignments} = + hb_ao:resolve( + Msg1, + #{ + <<"method">> => <<"GET">>, + <<"path">> => <<"schedule/assignments">>, + <<"from">> => LastSlot + }, + Opts#{ scheduler_follow_redirects => true } + ), + % Convert the assignments to an ordered list of messages, + % after removing all keys before the last processed slot. +``` + +### message_cached_assignments + +Non-device exported helper to get the cached assignments held in a + +```erlang +message_cached_assignments(Msg, Opts) -> + hb_private:get( + <<"scheduler@1.0/assignments">>, + Msg, + [], + Opts + ). +``` + +### spawn_lookahead_worker + +Spawn a new Erlang process to fetch the next assignments from the local + +```erlang +spawn_lookahead_worker(ProcID, Slot, Opts) -> + Caller = self(), + spawn( + fun() -> + ?event(next_lookahead, + {looking_ahead, + {proc_id, ProcID}, + {slot, Slot}, + {caller, Caller} + } + ), + case dev_scheduler_cache:read(ProcID, Slot, Opts) of + {ok, Assignment} -> + LoadedAssignment = hb_cache:ensure_all_loaded(Assignment, Opts), + Caller ! {assignment, ProcID, Slot, LoadedAssignment}; + not_found -> + fail + end + end + ). +``` + +### check_lookahead_and_local_cache + +Check if we have a result from a lookahead worker or from our local + +```erlang +check_lookahead_and_local_cache(Msg1, ProcID, TargetSlot, Opts) when is_map(Msg1) -> + case hb_private:get(<<"scheduler@1.0/lookahead-worker">>, Msg1, Opts) of + not_found -> + check_lookahead_and_local_cache(undefined, ProcID, TargetSlot, Opts); + LookaheadWorker -> + check_lookahead_and_local_cache(LookaheadWorker, ProcID, TargetSlot, Opts) + end; +``` + +### check_lookahead_and_local_cache + +Check if we have a result from a lookahead worker or from our local + +```erlang +check_lookahead_and_local_cache(Worker, ProcID, TargetSlot, Opts) when is_pid(Worker) -> + receive + {assignment, ProcID, OldSlot, _Assignment} when OldSlot < TargetSlot -> + % The lookahead worker has found an assignment for a slot that is + % before the target slot. We remove it from the cache and continue + % searching. +``` + +### check_lookahead_and_local_cache + +```erlang +check_lookahead_and_local_cache(undefined, ProcID, TargetSlot, Opts) -> + % The lookahead worker has not found an assignment for the target + % slot yet, so we check our local cache. +``` + +### status + +Returns information about the entire scheduler. + +```erlang +status(_M1, _M2, _Opts) -> + ?event(getting_scheduler_status), + Wallet = dev_scheduler_registry:get_wallet(), + {ok, + #{ + <<"address">> => hb_util:id(ar_wallet:to_address(Wallet)), + <<"processes">> => + lists:map( + fun hb_util:id/1, + dev_scheduler_registry:get_processes() + ), + <<"cache-control">> => <<"no-store">> + } + }. +``` + +### location + +Router for `record` requests. Expects either a `POST` or `GET` request. + +```erlang +location(Msg1, Msg2, Opts) -> + case hb_ao:get(<<"method">>, Msg2, <<"GET">>, Opts) of + <<"POST">> -> post_location(Msg1, Msg2, Opts); + <<"GET">> -> get_location(Msg1, Msg2, Opts) + end. +``` + +### get_location + +Search for the location of the scheduler in the scheduler-location + +```erlang +get_location(_Msg1, Req, Opts) -> + % Get the address of the scheduler from the request. +``` + +### post_location + +Generate a new scheduler location record and register it. We both send + +```erlang +post_location(Msg1, RawReq, RawOpts) -> + Opts = + case dev_whois:ensure_host(RawOpts) of + {ok, NewOpts} -> NewOpts; + _ -> RawOpts + end, + % Ensure that the request is signed by the operator. +``` + +### schedule + +A router for choosing between getting the existing schedule, or + +```erlang +schedule(Msg1, Msg2, Opts) -> + ?event({resolving_schedule_request, {msg2, Msg2}, {state_msg, Msg1}}), + case hb_util:key_to_atom(hb_ao:get(<<"method">>, Msg2, <<"GET">>, Opts)) of + post -> post_schedule(Msg1, Msg2, Opts); + get -> get_schedule(Msg1, Msg2, Opts) + end. +``` + +### post_schedule + +Schedules a new message on the SU. Searches Msg1 for the appropriate ID, + +```erlang +post_schedule(Msg1, Msg2, Opts) -> + ?event(scheduling_message), + % Find the target message to schedule: + ToSched = find_message_to_schedule(Msg1, Msg2, Opts), + ?event({to_sched, ToSched}), + % Find the ProcessID of the target message: + % - If it is a Process, use the ID of the message. +``` + +### do_post_schedule + +Post schedule the message. `Msg2` by this point has been refined to only + +```erlang +do_post_schedule(ProcID, PID, Msg2, Opts) -> + % Should we verify the message again before scheduling? + Verified = + case hb_opts:get(verify_assignments, true, Opts) of + true -> + ?event(debug_scheduler_verify, + {verifying_message_before_scheduling, Msg2} + ), + Res = length(hb_message:signers(Msg2, Opts)) > 0 + andalso hb_message:verify(Msg2, signers, Opts), + ?event(debug_scheduler_verify, {verified, Res}), + Res; + accept_unsigned -> + ?event( + debug_scheduler_verify, + {accepting_unsigned_message_before_scheduling, Msg2} + ), + hb_message:verify(Msg2, signers, Opts); + false -> true + end, + ?event({verified, Verified}), + % Handle scheduling of the message if the message is valid. +``` + +### find_server + +Locate the correct scheduling server for a given process. + +```erlang +find_server(ProcID, Msg1, Opts) -> + find_server(ProcID, Msg1, undefined, Opts). +``` + +### find_server + +```erlang +find_server(ProcID, Msg1, ToSched, Opts) -> + case get_hint(ProcID, Opts) of + {ok, Hint} -> + ?event({found_hint_in_proc_id, Hint}), + generate_redirect(ProcID, Hint, Opts); + not_found -> + ?event({no_hint_in_proc_id, ProcID}), + case dev_scheduler_registry:find(ProcID, false, Opts) of + PID when is_pid(PID) -> + ?event({found_pid_in_local_registry, PID}), + {local, PID}; + not_found -> + ?event({no_pid_in_local_registry, ProcID}), + Proc = find_process_message(ProcID, Msg1, ToSched, Opts), + ?event({found_process, {process, Proc}, {msg1, Msg1}}), + SchedLoc = + hb_ao:get_first( + [ + {Proc, <<"scheduler">>}, + {Proc, <<"scheduler-location">>} + ] ++ + case ToSched of + undefined -> []; + _ -> [{ToSched, <<"scheduler-location">>}] + end, + not_found, + Opts#{ hashpath => ignore } + ), + ?event({sched_loc, SchedLoc}), + case SchedLoc of + not_found -> + {error, <<"No scheduler information provided.">>}; + _ -> + ?event( + {confirming_if_scheduler_is_local, + {addr, SchedLoc} + } + ), + ParsedLoc = parse_schedulers(SchedLoc), + case is_local_scheduler(ProcID, Proc, ParsedLoc, Opts) of + {ok, PID} -> + % We are the scheduler. Start the server if + % it has not already been started, with the + % given options. +``` + +### find_process_message + +Find the process message for a given process ID and base message. + +```erlang +find_process_message(ProcID, Msg1, ToSched, Opts) -> + % Find the process from the message. +``` + +### is_local_scheduler + +Determine if a scheduler is local. If so, return the PID and options. + +```erlang +is_local_scheduler(_, _, [], _Opts) -> false; +``` + +### is_local_scheduler + +Determine if a scheduler is local. If so, return the PID and options. + +```erlang +is_local_scheduler(ProcID, ProcMsg, [Scheduler | Rest], Opts) -> + case is_local_scheduler(ProcID, ProcMsg, Scheduler, Opts) of + {ok, PID} -> {ok, PID}; + false -> is_local_scheduler(ProcID, ProcMsg, Rest, Opts) + end; +``` + +### is_local_scheduler + +Determine if a scheduler is local. If so, return the PID and options. + +```erlang +is_local_scheduler(ProcID, ProcMsg, Scheduler, Opts) -> + case hb_opts:as(Scheduler, Opts) of + {ok, _} -> + { + ok, + dev_scheduler_registry:find(ProcID, ProcMsg, Opts) + }; + {error, _} -> false + end. +``` + +### get_hint + +If a hint is present in the string, return it. Else, return not_found. + +```erlang +get_hint(Str, Opts) when is_binary(Str) -> + case hb_opts:get(scheduler_follow_hints, true, Opts) of + true -> + case binary:split(Str, <<"?">>, [global]) of + [_, QS] -> + QueryMap = hb_maps:from_list(uri_string:dissect_query(QS)), + case hb_maps:get(<<"hint">>, QueryMap, not_found, Opts) of + not_found -> not_found; + Hint -> {ok, Hint} + end; + _ -> not_found + end; + false -> not_found + end; +``` + +### get_hint + +If a hint is present in the string, return it. Else, return not_found. +Generate a redirect message to a scheduler. + +```erlang +get_hint(_Str, _Opts) -> not_found. +``` + +### generate_redirect + +If a hint is present in the string, return it. Else, return not_found. +Generate a redirect message to a scheduler. + +```erlang +generate_redirect(ProcID, SchedulerLocation, Opts) -> + Variant = hb_ao:get(<<"variant">>, SchedulerLocation, <<"ao.N.1">>, Opts), + ?event({generating_redirect, {proc_id, ProcID}, {variant, Variant}}), + RedirectLocation = + case is_binary(SchedulerLocation) of + true -> SchedulerLocation; + false -> + hb_ao:get_first( + [ + {SchedulerLocation, <<"url">>}, + {SchedulerLocation, <<"location">>} + ], + <<"/">>, + Opts + ) + end, + {redirect, + #{ + <<"status">> => 307, + <<"location">> => RedirectLocation, + <<"body">> => + <<"Redirecting to scheduler: ", RedirectLocation/binary>>, + <<"variant">> => Variant + } + }. +``` + +### without_hint + +Take a process ID or target with a potential hint and return just the + +```erlang +without_hint(Target) when ?IS_ID(Target) -> + hb_util:human_id(Target); +``` + +### without_hint + +Take a process ID or target with a potential hint and return just the + +```erlang +without_hint(Target) -> + case binary:split(Target, [<<"?">>, <<"&">>], [global]) of + [ProcID] when ?IS_ID(ProcID) -> hb_util:human_id(ProcID); + _ -> throw({invalid_operation_target, Target}) + end. +``` + +### find_remote_scheduler + +Use the SchedulerLocation to find the remote path and return a redirect. + +```erlang +find_remote_scheduler(_ProcID, [], _Opts) -> {error, not_found}; +``` + +### find_remote_scheduler + +Use the SchedulerLocation to find the remote path and return a redirect. + +```erlang +find_remote_scheduler(ProcID, [Scheduler | Rest], Opts) -> + case find_remote_scheduler(ProcID, Rest, Opts) of + {error, not_found} -> + find_remote_scheduler(ProcID, Scheduler, Opts); + {ok, Redirect} -> + {ok, Redirect} + end; +``` + +### find_remote_scheduler + +Use the SchedulerLocation to find the remote path and return a redirect. + +```erlang +find_remote_scheduler(ProcID, Scheduler, Opts) -> + % Parse the scheduler location to see if it has a hint. If there is a hint, + % we will use it to construct a redirect message. +``` + +### slot + +Returns information about the current slot for a process. + +```erlang +slot(M1, M2, Opts) -> + ?event({getting_current_slot, {msg, M1}}), + ProcID = find_target_id(M1, M2, Opts), + case find_server(ProcID, M1, Opts) of + {local, PID} -> + ?event({getting_current_slot, {proc_id, ProcID}}), + {Timestamp, Height, Hash} = ar_timestamp:get(), + #{ current := CurrentSlot, wallets := Wallets } = + dev_scheduler_server:info(PID), + {ok, #{ + <<"process">> => ProcID, + <<"current">> => CurrentSlot, + <<"timestamp">> => Timestamp, + <<"block-height">> => Height, + <<"block-hash">> => Hash, + <<"cache-control">> => <<"no-store">>, + <<"addresses">> => lists:map(fun hb_util:human_id/1, Wallets) + }}; + {redirect, Redirect} -> + case hb_opts:get(scheduler_follow_redirects, true, Opts) of + false -> {ok, Redirect}; + true -> remote_slot(ProcID, Redirect, Opts) + end + end. +``` + +### remote_slot + +Get the current slot from a remote scheduler. + +```erlang +remote_slot(ProcID, Redirect, Opts) -> + ?event({getting_remote_slot, {proc_id, ProcID}, {redirect, {explicit, Redirect}}}), + Node = node_from_redirect(Redirect, Opts), + ?event({getting_slot_from_node, {string, Node}}), + remote_slot( + hb_ao:get(<<"variant">>, Redirect, <<"ao.N.1">>, Opts), + ProcID, + Node, + Opts + ). +``` + +### remote_slot + +Get the current slot from a remote scheduler, based on the variant of + +```erlang +remote_slot(<<"ao.N.1">>, ProcID, Node, Opts) -> + % The process is running on a mainnet AO-Core scheduler, so we can just + % use the `/slot' endpoint to get the current slot. +``` + +### remote_slot + +```erlang +remote_slot(<<"ao.TN.1">>, ProcID, Node, Opts) -> + % The process is running on a testnet AO-Core scheduler, so we need to use + % `/processes/procID/latest' to get the current slot. +``` + +### get_schedule + +Generate and return a schedule for a process, optionally between + +```erlang +get_schedule(Msg1, Msg2, Opts) -> + ProcID = hb_util:human_id(find_target_id(Msg1, Msg2, Opts)), + From = + case hb_ao:get(<<"from">>, Msg2, not_found, Opts) of + not_found -> 0; + X when X < 0 -> 0; + FromRes -> hb_util:int(FromRes) + end, + To = + case hb_ao:get(<<"to">>, Msg2, not_found, Opts) of + not_found -> undefined; + ToRes -> hb_util:int(ToRes) + end, + Format = hb_ao:get(<<"accept">>, Msg2, <<"application/http">>, Opts), + ?event( + {parsed_get_schedule, + {process, ProcID}, + {from, From}, + {to, To}, + {format, Format} + } + ), + case find_server(ProcID, Msg1, Opts) of + {local, _PID} -> + generate_local_schedule(Format, ProcID, From, To, Opts); + {redirect, Redirect} -> + ?event({redirect_received, {redirect, Redirect}}), + case hb_opts:get(scheduler_follow_redirects, true, Opts) of + true -> + case get_remote_schedule(ProcID, From, To, Redirect, Opts) of + {ok, Res} -> + case uri_string:percent_decode(Format) of + <<"application/aos-2">> -> + dev_scheduler_formats:assignments_to_aos2( + ProcID, + hb_ao:get( + <<"assignments">>, Res, [], Opts), + hb_util:atom(hb_ao:get( + <<"continues">>, Res, false, Opts)), + Opts + ); + _ -> + {ok, Res} + end; + {error, Res} -> + {error, Res} + end; + false -> + {ok, Redirect} + end + end. +``` + +### get_remote_schedule + +Get a schedule from a remote scheduler, but first read all of the + +```erlang +get_remote_schedule(RawProcID, From, To, Redirect, Opts) -> + % If we are responding to a legacy scheduler request we must add one to the + % `from' slot to account for the fact that the legacy scheduler gives us + % the slots _after_ the stated nonce. +``` + +### do_get_remote_schedule + +Get a schedule from a remote scheduler, unless we already have already + +```erlang +do_get_remote_schedule(ProcID, LocalAssignments, From, To, _, Opts) + when (To =/= undefined) andalso (From >= To) -> + % We already have all of the assignments from the local cache. Return them + % as a bundle. We set the 'more' to `undefined' to indicate that there may + % be more assignments to fetch, but we don't know for sure. +``` + +### do_get_remote_schedule + +```erlang +do_get_remote_schedule(ProcID, LocalAssignments, From, To, Redirect, Opts) -> + % We don't have all of the assignments from the local cache, so we need to + % fetch the rest from the remote scheduler. +``` + +### cache_remote_schedule + +Cache a schedule received from a remote scheduler. + +```erlang +cache_remote_schedule(<<"ao.TN.1">>, ProcID, Schedule, Opts) -> + % If the schedule has a variant of ao.TN.1, we add this to the raw assignment + % before caching it. +``` + +### cache_remote_schedule + +```erlang +cache_remote_schedule(<<"ao.N.1">>, ProcID, Schedule, Opts) -> + Assignments = + hb_ao:get( + <<"assignments">>, + Schedule, + Opts#{ hashpath => ignore } + ), + cache_remote_schedule(common, ProcID, Assignments, Opts); +``` + +### cache_remote_schedule + +```erlang +cache_remote_schedule(_, _ProcID, Schedule, Opts) -> + Cacher = + fun() -> + ?event(debug_sched, {caching_remote_schedule, {schedule, Schedule}}), + lists:foreach( + fun(Assignment) -> + % We do not care about the result of the write because it is only + % an additional cache. +``` + +### node_from_redirect + +Get the node URL from a redirect. + +```erlang +node_from_redirect(Redirect, Opts) -> + uri_string:recompose( + ( + hb_maps:remove( + query, + uri_string:parse( + hb_ao:get(<<"location">>, Redirect, Opts) + ), + Opts + ) + )#{path => <<"/">>} + ). +``` + +### filter_json_assignments + +Filter JSON assignment results from a remote legacy scheduler. + +```erlang +filter_json_assignments(JSONRes, To, From, Opts) -> + Edges = hb_maps:get(<<"edges">>, JSONRes, [], Opts), + Filtered = + lists:filter( + fun(Edge) -> + Node = hb_maps:get(<<"node">>, Edge, undefined, Opts), + Assignment = hb_maps:get(<<"assignment">>, Node, undefined, Opts), + Tags = hb_maps:get(<<"tags">>, Assignment, undefined, Opts), + Nonces = + lists:filtermap( + fun(#{ <<"name">> := <<"Nonce">>, <<"value">> := Nonce }) -> + {true, hb_util:int(Nonce)}; + (_) -> false + end, + Tags + ), + Nonce = hd(Nonces), + ?event({filter, {nonce, Nonce}, {from, From}, {to, To}}), + Nonce >= From andalso Nonce =< To + end, + Edges + ), + ?event({filtered, {length, length(Filtered)}, {edges, Filtered}}), + JSONRes#{ <<"edges">> => Filtered }. +``` + +### post_remote_schedule + +Filter JSON assignment results from a remote legacy scheduler. + +```erlang +post_remote_schedule(RawProcID, Redirect, OnlyCommitted, Opts) -> + RemoteOpts = Opts#{ http_client => httpc }, + ProcID = without_hint(RawProcID), + Location = hb_ao:get(<<"location">>, Redirect, Opts), + Parsed = uri_string:parse(Location), + Node = uri_string:recompose((hb_maps:remove(query, Parsed, Opts))#{path => <<"/">>}), + Variant = hb_ao:get(<<"variant">>, Redirect, <<"ao.N.1">>, Opts), + case Variant of + <<"ao.N.1">> -> + PostMsg = #{ + <<"path">> => << ProcID/binary, "/schedule">>, + <<"body">> => OnlyCommitted, + <<"method">> => <<"POST">> + }, + hb_http:post(Node, PostMsg, RemoteOpts); + <<"ao.TN.1">> -> + % Ensure that the message is signed with ANS-104. +``` + +### post_legacy_schedule + +```erlang +post_legacy_schedule(ProcID, OnlyCommitted, Node, Opts) -> + ?event({encoding_for_legacy_scheduler, {node, {string, Node}}}), + Encoded = + try + Item = + hb_message:convert( + OnlyCommitted, + <<"ans104@1.0">>, + Opts + ), + ?event( + {encoded_for_legacy_scheduler, + {item, Item}, + {exact, {explicit, Item}} + } + ), + {ok, ar_bundles:serialize(Item)} + catch + Class:Reason -> + {error, + #{ + <<"status">> => 422, + <<"body">> => + << + "Failed to encode message for legacy scheduler on ", + Node/binary, + ". Try different encoding?" + >>, + <<"class">> => Class, + <<"reason">> => + iolist_to_binary(io_lib:format("~p", [Reason])) + } + } + end, + case Encoded of + {error, EncodingErr} -> + ?event({could_not_encode_for_legacy_scheduler, {error, EncodingErr}}), + {error, #{ + <<"status">> => 422, + <<"body">> => + <<"Incorrect encoding. Scheduler has variant: ao.TN.1">> + } + }; + {ok, Body} -> + ?event({encoded_for_legacy_scheduler, {encoded, Body}}), + PostMsg = #{ + <<"path">> => P = <<"/?proc-id=", ProcID/binary>>, + <<"body">> => Body, + <<"method">> => <<"POST">> + }, + ?event({posting_to_remote_legacy_scheduler, + {node, {string, Node}}, + {path, {string, P}}, + {process_id, {string, ProcID}} + }), + LegacyOpts = Opts#{ protocol => http2 }, + case hb_http:post(Node, PostMsg, LegacyOpts) of + {ok, PostRes} -> + ?event({remote_schedule_result, PostRes}), + JSONRes = + hb_json:decode( + hb_ao:get(<<"body">>, PostRes, Opts) + ), + % Legacy SUs return only the ID of the assignment, so we need + % to read and return it. +``` + +### find_target_id + +Find the schedule ID from a given request. The precidence order for + +```erlang +find_target_id(Msg1, Msg2, Opts) -> + TempOpts = Opts#{ hashpath => ignore }, + Res = case hb_ao:resolve(Msg2, <<"target">>, TempOpts) of + {ok, Target} -> + % ID found at Msg2/target + Target; + _ -> + case hb_ao:resolve(Msg2, <<"type">>, TempOpts) of + {ok, <<"Process">>} -> + % Msg2 is a Process, so the ID is at Msg2/id + hb_message:id(Msg2, all, Opts); + _ -> + case hb_ao:resolve(Msg1, <<"process">>, TempOpts) of + {ok, Process} -> + % ID found at Msg1/process/id + hb_message:id(Process, all, Opts); + _ -> + % Does the message have a type of Process? + case hb_ao:get(<<"type">>, Msg1, TempOpts) of + <<"Process">> -> + % Yes, so try Msg1/id + hb_message:id(Msg1, all, Opts); + _ -> + % No, so the ID is at Msg2/id + hb_message:id(Msg2, all, Opts) + end + end + end + end, + ?event({found_id, {id, Res}, {msg1, Msg1}, {msg2, Msg2}}), + Res. +``` + +### find_message_to_schedule + +Search the given base and request message pair to find the message to + +```erlang +find_message_to_schedule(_Msg1, Msg2, Opts) -> + Subject = + hb_ao:get( + <<"subject">>, + Msg2, + not_found, + Opts#{ hashpath => ignore } + ), + case Subject of + <<"self">> -> Msg2; + not_found -> + hb_ao:get(<<"body">>, Msg2, Msg2, Opts#{ hashpath => ignore }); + Subject -> + hb_ao:get(Subject, Msg2, Opts#{ hashpath => ignore }) + end. +``` + +### generate_local_schedule + +Generate a `GET /schedule` response for a process. + +```erlang +generate_local_schedule(Format, ProcID, From, To, Opts) -> + ?event( + {servicing_request_for_assignments, + {proc_id, ProcID}, + {from, From}, + {to, To} + } + ), + ?event(generating_schedule_from_local_server), + {Assignments, More} = get_local_assignments(ProcID, From, To, Opts), + ?event({got_assignments, length(Assignments), {more, More}}), + % Determine and apply the formatting function to use for generation + % of the response, based on the `Accept' header. +``` + +### get_local_assignments + +Get the assignments for a process, and whether the request was truncated. + +```erlang +get_local_assignments(ProcID, From, undefined, Opts) -> + case dev_scheduler_cache:latest(ProcID, Opts) of + not_found -> + % No assignments in cache. +``` + +### get_local_assignments + +```erlang +get_local_assignments(ProcID, From, RequestedTo, Opts) -> + ?event({handling_req_to_get_assignments, ProcID, {from, From}, {to, RequestedTo}}), + ComputedTo = + case (RequestedTo - From) > ?MAX_ASSIGNMENT_QUERY_LEN of + true -> From + ?MAX_ASSIGNMENT_QUERY_LEN; + false -> RequestedTo + end, + { + read_local_assignments(ProcID, From, ComputedTo, Opts), + ComputedTo < RequestedTo + }. +``` + +### read_local_assignments + +Get the assignments for a process. + +```erlang +read_local_assignments(_ProcID, From, To, _Opts) when From > To -> + []; +``` + +### read_local_assignments + +Get the assignments for a process. + +```erlang +read_local_assignments(ProcID, CurrentSlot, To, Opts) -> + case dev_scheduler_cache:read(ProcID, CurrentSlot, Opts) of + not_found -> + % No assignment found in cache. +``` + +### checkpoint + +Returns the current state of the scheduler. +Generate a _transformed_ process message, not as they are generated + +```erlang +checkpoint(State) -> {ok, State}. +%%% Tests +``` + +### test_process + +Returns the current state of the scheduler. +Generate a _transformed_ process message, not as they are generated + +```erlang +test_process() -> test_process(#{ priv_wallet => hb:wallet()}). +``` + +### test_process + +Returns the current state of the scheduler. +Generate a _transformed_ process message, not as they are generated + +```erlang +test_process(#{ priv_wallet := Wallet}) -> + test_process(hb_util:human_id(ar_wallet:to_address(Wallet))); +``` + +### test_process + +Returns the current state of the scheduler. +Generate a _transformed_ process message, not as they are generated + +```erlang +test_process(Address) -> + #{ + <<"device">> => <<"scheduler@1.0">>, + <<"device-stack">> => [<<"cron@1.0">>, <<"wasm-64@1.0">>, <<"poda@1.0">>], + <<"image">> => <<"wasm-image-id">>, + <<"type">> => <<"Process">>, + <<"scheduler-location">> => Address, + <<"test-random-seed">> => rand:uniform(1337) + }. +``` + +### status_test + +```erlang +status_test() -> + start(), + ?assertMatch( + #{<<"processes">> := Processes, + <<"address">> := Address} + when is_list(Processes) and is_binary(Address), + hb_ao:get(status, test_process()) + ). +``` + +### register_new_process_test + +```erlang +register_new_process_test() -> + start(), + Opts = #{ priv_wallet => hb:wallet() }, + Msg1 = hb_message:commit(test_process(Opts), Opts), + ?event({test_registering_new_process, {msg, Msg1}}), + ?assertMatch({ok, _}, + hb_ao:resolve( + Msg1, + #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"schedule">>, + <<"body">> => Msg1 + }, + #{} + ) + ), + ?event({status_response, Msg1}), + Procs = hb_ao:get(<<"processes">>, hb_ao:get(status, Msg1)), + ?event({procs, Procs}), + ?assert( + lists:member( + hb_util:id(Msg1, all), + hb_ao:get(<<"processes">>, hb_ao:get(status, Msg1)) + ) + ). +``` + +### register_location_on_boot_test + +Test that a scheduler location is registered on boot. + +```erlang +register_location_on_boot_test() -> + NotifiedPeerWallet = ar_wallet:new(), + RegisteringNodeWallet = ar_wallet:new(), + start(), + NotifiedPeer = + hb_http_server:start_node(#{ + priv_wallet => NotifiedPeerWallet, + store => [ + #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/scheduler-location-notified">> + } + ] + }), + RegisteringNode = hb_http_server:start_node( + #{ + priv_wallet => RegisteringNodeWallet, + on => + #{ + <<"start">> => #{ + <<"device">> => <<"scheduler@1.0">>, + <<"path">> => <<"location">>, + <<"method">> => <<"POST">>, + <<"target">> => <<"self">>, + <<"require-codec">> => <<"ans104@1.0">>, + <<"url">> => <<"https://hyperbeam-test-ignore.com">>, + <<"hook">> => #{ + <<"result">> => <<"ignore">>, + <<"commit-request">> => true + } + } + }, + scheduler_location_notify_peers => [NotifiedPeer] + } + ), + {ok, CurrentLocation} = + hb_http:get( + RegisteringNode, + #{ + <<"method">> => <<"GET">>, + <<"path">> => <<"/~scheduler@1.0/location">>, + <<"address">> => + hb_util:human_id(ar_wallet:to_address(RegisteringNodeWallet)) + }, + #{} + ), + ?event({current_location, CurrentLocation}), + ?assertMatch( + #{ + <<"url">> := <<"https://hyperbeam-test-ignore.com">>, + <<"nonce">> := 0 + }, + hb_ao:get(<<"body">>, CurrentLocation, #{}) + ). +``` + +### schedule_message_and_get_slot_test + +```erlang +schedule_message_and_get_slot_test() -> + start(), + Msg1 = test_process(), + Msg2 = #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit(#{ + <<"type">> => <<"Message">>, + <<"test-key">> => <<"true">> + }, hb:wallet()) + }, + ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg2, #{})), + ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg2, #{})), + Msg3 = #{ + <<"path">> => <<"slot">>, + <<"method">> => <<"GET">>, + <<"process">> => hb_util:id(Msg1) + }, + ?event({pg, dev_scheduler_registry:get_processes()}), + ?event({getting_schedule, {msg, Msg3}}), + ?assertMatch({ok, #{ <<"current">> := CurrentSlot }} + when CurrentSlot > 0, + hb_ao:resolve(Msg1, Msg3, #{})). +``` + +### redirect_to_hint_test + +```erlang +redirect_to_hint_test() -> + start(), + RandAddr = hb_util:human_id(crypto:strong_rand_bytes(32)), + TestLoc = <<"http://test.computer">>, + Msg1 = test_process(<< RandAddr/binary, "?hint=", TestLoc/binary>>), + Msg2 = #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => Msg1 + }, + ?assertMatch( + {ok, #{ <<"location">> := Location }} when is_binary(Location), + hb_ao:resolve( + Msg1, + Msg2, + #{ + scheduler_follow_hints => true, + scheduler_follow_redirects => false + } + ) + ). +``` + +### redirect_from_graphql_test_ + +```erlang +redirect_from_graphql_test_() -> + {timeout, 60, fun redirect_from_graphql/0}. +``` + +### redirect_from_graphql + +```erlang +redirect_from_graphql() -> + start(), + Opts = + #{ store => + [ + #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-mainnet">> }, + #{ <<"store-module">> => hb_store_gateway, <<"store">> => false } + ] + }, + {ok, Msg} = hb_cache:read(<<"0syT13r0s0tgPmIed95bJnuSqaD29HQNN8D3ElLSrsc">>, Opts), + ?assertMatch( + {ok, #{ <<"location">> := Location }} when is_binary(Location), + hb_ao:resolve( + Msg, + #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit(#{ + <<"type">> => <<"Message">>, + <<"target">> => + <<"0syT13r0s0tgPmIed95bJnuSqaD29HQNN8D3ElLSrsc">>, + <<"test-key">> => <<"Test-Val">> + }, + hb:wallet() + ) + }, + #{ + scheduler_follow_redirects => false + } + ) + ). +``` + +### get_local_schedule_test + +```erlang +get_local_schedule_test() -> + start(), + Msg1 = test_process(), + Msg2 = #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit(#{ + <<"type">> => <<"Message">>, + <<"test-key">> => <<"Test-Val">> + }, hb:wallet()) + }, + Msg3 = #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit(#{ + <<"type">> => <<"Message">>, + <<"test-key">> => <<"Test-Val-2">> + }, hb:wallet()) + }, + ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg2, #{})), + ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg3, #{})), + ?assertMatch( + {ok, _}, + hb_ao:resolve(Msg1, #{ + <<"method">> => <<"GET">>, + <<"path">> => <<"schedule">>, + <<"target">> => hb_util:id(Msg1) + }, + #{}) + ). +``` + +### http_init + +```erlang +http_init() -> http_init(#{}). +``` + +### http_init + +```erlang +http_init(Opts) -> + start(), + Wallet = ar_wallet:new(), + ExtendedOpts = Opts#{ + priv_wallet => Wallet, + store => [ + #{ + <<"store-module">> => hb_store_lmdb, + <<"name">> => <<"cache-mainnet/lmdb">> + }, + #{ <<"store-module">> => hb_store_gateway, <<"store">> => false } + ] + }, + Node = hb_http_server:start_node(ExtendedOpts), + {Node, ExtendedOpts}. +``` + +### register_scheduler_test + +```erlang +register_scheduler_test() -> + start(), + {Node, Wallet} = http_init(), + Msg1 = hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/location">>, + <<"url">> => <<"https://hyperbeam-test-ignore.com">>, + <<"method">> => <<"POST">>, + <<"nonce">> => 1, + <<"require-codec">> => <<"ans104@1.0">> + }, Wallet), + {ok, Res} = hb_http:post(Node, Msg1, #{}), + ?assertMatch(#{ <<"url">> := Location } when is_binary(Location), Res). +``` + +### http_post_schedule_sign + +```erlang +http_post_schedule_sign(Node, Msg, ProcessMsg, Wallet) -> + Msg1 = hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + Msg#{ + <<"target">> => + hb_util:human_id(hb_message:id(ProcessMsg, all)), + <<"type">> => <<"Message">> + }, + Wallet + ) + }, Wallet), + hb_http:post(Node, Msg1, #{}). +``` + +### http_get_slot + +```erlang +http_get_slot(N, PMsg) -> + ID = hb_message:id(PMsg, all), + Wallet = hb:wallet(), + {ok, _} = hb_http:get(N, hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/slot">>, + <<"method">> => <<"GET">>, + <<"target">> => ID + }, Wallet), #{}). +``` + +### http_get_schedule + +```erlang +http_get_schedule(N, PMsg, From, To) -> + http_get_schedule(N, PMsg, From, To, <<"application/http">>). +``` + +### http_get_schedule + +```erlang +http_get_schedule(N, PMsg, From, To, Format) -> + ID = hb_message:id(PMsg, all), + Wallet = hb:wallet(), + {ok, _} = hb_http:get(N, hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"method">> => <<"GET">>, + <<"target">> => hb_util:human_id(ID), + <<"from">> => From, + <<"to">> => To, + <<"accept">> => Format + }, Wallet), #{}). +``` + +### http_get_schedule_redirect_test_ + +```erlang +http_get_schedule_redirect_test_() -> + {timeout, 60, fun http_get_schedule_redirect/0}. +``` + +### http_get_schedule_redirect + +```erlang +http_get_schedule_redirect() -> + Opts = + #{ + store => + [ + #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-mainnet">> }, + #{ <<"store-module">> => hb_store_gateway, <<"opts">> => #{} } + ], + scheduler_follow_redirects => false + }, + {N, _Wallet} = http_init(Opts), + start(), + ProcID = <<"0syT13r0s0tgPmIed95bJnuSqaD29HQNN8D3ElLSrsc">>, + Res = hb_http:get(N, <<"/", ProcID/binary, "/schedule">>, Opts), + ?assertMatch({ok, #{ <<"location">> := Location }} when is_binary(Location), Res). +``` + +### http_post_schedule_test_ + +```erlang +http_post_schedule_test_() -> + {timeout, 60, fun http_post_schedule/0}. +``` + +### http_post_schedule + +```erlang +http_post_schedule() -> + {N, Opts} = http_init(), + PMsg = hb_message:commit(test_process(Opts), Opts), + Msg1 = hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => PMsg + }, Opts), + {ok, _Res} = hb_http:post(N, Msg1, Opts), + {ok, Res2} = + http_post_schedule_sign( + N, + #{ <<"inner">> => <<"test-message">> }, + PMsg, + Opts + ), + ?assertEqual(<<"test-message">>, hb_ao:get(<<"body/inner">>, Res2, Opts)), + ?assertMatch({ok, #{ <<"current">> := 1 }}, http_get_slot(N, PMsg)). +``` + +### http_get_schedule_test_ + +```erlang +http_get_schedule_test_() -> + {timeout, 20, fun() -> + {Node, Opts} = http_init(), + PMsg = hb_message:commit(test_process(Opts), Opts), + Msg1 = hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => PMsg + }, Opts), + Msg2 = hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + #{ + <<"target">> => + hb_util:human_id( + hb_message:id(PMsg, all, Opts) + ), + <<"body">> => <<"test-message">>, + <<"type">> => <<"Message">> + }, + Opts + ) + }, Opts), + {ok, _} = hb_http:post(Node, Msg1, Opts), + lists:foreach( + fun(_) -> + {ok, Res} = hb_http:post(Node, Msg2, Opts), + ?event(debug_scheduler_test, {res, Res}) + end, + lists:seq(1, 10) + ), + ?assertMatch({ok, #{ <<"current">> := 10 }}, http_get_slot(Node, PMsg)), + ?debug_wait(5000), + {ok, Schedule} = http_get_schedule(Node, PMsg, 0, 10), + Assignments = hb_ao:get(<<"assignments">>, Schedule, Opts), + ?assertEqual( + 12, % +1 for the hashpath + hb_maps:size(Assignments, Opts) + ) + end}. +``` + +### http_get_legacy_schedule_test_ + +```erlang +http_get_legacy_schedule_test_() -> + {timeout, 60, fun() -> + Target = <<"CtOVB2dBtyN_vw3BdzCOrvcQvd9Y1oUGT-zLit8E3qM">>, + {Node, Opts} = http_init(), + {ok, Res} = hb_http:get(Node, <<"/~scheduler@1.0/schedule&target=", Target/binary>>, Opts), + LoadedRes = hb_cache:ensure_all_loaded(Res, Opts), + ?assertMatch(#{ <<"assignments">> := As } when map_size(As) > 0, LoadedRes) + end}. +``` + +### http_get_legacy_slot_test_ + +```erlang +http_get_legacy_slot_test_() -> + {timeout, 60, fun() -> + Target = <<"CtOVB2dBtyN_vw3BdzCOrvcQvd9Y1oUGT-zLit8E3qM">>, + {Node, Opts} = http_init(), + Res = hb_http:get(Node, <<"/~scheduler@1.0/slot&target=", Target/binary>>, Opts), + ?assertMatch({ok, #{ <<"current">> := Slot }} when Slot > 0, Res) + end}. +``` + +### http_get_legacy_schedule_slot_range_test_ + +```erlang +http_get_legacy_schedule_slot_range_test_() -> + {timeout, 60, fun() -> + Target = <<"zrhm4OpfW85UXfLznhdD-kQ7XijXM-s2fAboha0V5GY">>, + {Node, Opts} = http_init(), + {ok, Res} = hb_http:get(Node, <<"/~scheduler@1.0/schedule&target=", Target/binary, + "&from=0&to=10">>, Opts), + LoadedRes = hb_cache:ensure_all_loaded(Res, Opts), + ?event({res, LoadedRes}), + ?assertMatch(#{ <<"assignments">> := As } when map_size(As) == 11, LoadedRes) + end}. +``` + +### http_get_legacy_schedule_as_aos2_test_ + +```erlang +http_get_legacy_schedule_as_aos2_test_() -> + {timeout, 60, fun() -> + Target = <<"CtOVB2dBtyN_vw3BdzCOrvcQvd9Y1oUGT-zLit8E3qM">>, + {Node, Opts} = http_init(), + {ok, Res} = + hb_http:get( + Node, + #{ + <<"path">> => <<"/~scheduler@1.0/schedule?target=", Target/binary>>, + <<"accept">> => <<"application/aos-2">>, + <<"method">> => <<"GET">> + }, + #{} + ), + Decoded = hb_json:decode(hb_ao:get(<<"body">>, Res, Opts)), + ?assertMatch(#{ <<"edges">> := As } when length(As) > 0, Decoded) + end}. +``` + +### http_post_legacy_schedule_test_ + +```erlang +http_post_legacy_schedule_test_() -> + {timeout, 60, fun() -> + {Node, Opts} = http_init(), + Target = <<"zrhm4OpfW85UXfLznhdD-kQ7XijXM-s2fAboha0V5GY">>, + Signed = + hb_message:commit( + #{ + <<"data-protocol">> => <<"ao">>, + <<"variant">> => <<"ao.TN.1">>, + <<"type">> => <<"Message">>, + <<"action">> => <<"ping">>, + <<"target">> => Target, + <<"test-from">> => hb_util:human_id(hb:address()) + }, + Opts, + <<"ans104@1.0">> + ), + WithMethodAndPath = + Signed#{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"method">> => <<"POST">> + }, + ?event(debug_downgrade, {signed, Signed}), + {Status, Res} = hb_http:post(Node, WithMethodAndPath, Opts), + ?event(debug_downgrade, {status, Status}), + ?event({res, Res}), + ?assertMatch( + {ok, #{ <<"slot">> := Slot }} when Slot > 0, + {Status, Res} + ) + end}. +``` + +### http_get_json_schedule_test_ + +```erlang +http_get_json_schedule_test_() -> + {timeout, 60, fun() -> + {Node, Opts} = http_init(), + PMsg = hb_message:commit(test_process(Opts), Opts), + Msg1 = hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => PMsg + }, Opts), + {ok, _} = hb_http:post(Node, Msg1, Opts), + Msg2 = hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + hb_message:commit( + #{ + <<"inner">> => <<"test">>, + <<"target">> => hb_util:human_id(hb_message:id(PMsg, all)) + }, + Opts + ) + }, + Opts + ), + lists:foreach( + fun(_) -> {ok, _} = hb_http:post(Node, Msg2, Opts) end, + lists:seq(1, 10) + ), + ?assertMatch({ok, #{ <<"current">> := 10 }}, http_get_slot(Node, PMsg)), + {ok, Schedule} = http_get_schedule(Node, PMsg, 0, 10, <<"application/aos-2">>), + ?event({schedule, Schedule}), + JSON = hb_ao:get(<<"body">>, Schedule, Opts), + Assignments = hb_json:decode(JSON), + ?assertEqual( + 11, % +1 for the hashpath + length(hb_maps:get(<<"edges">>, Assignments)) + ) + end}. +``` + +### single_resolution + +```erlang +single_resolution(Opts) -> + start(), + BenchTime = 1, + Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), + Msg1 = test_process(Opts#{ priv_wallet => Wallet }), + ?event({benchmark_start, ?MODULE}), + MsgToSchedule = hb_message:commit(#{ + <<"type">> => <<"Message">>, + <<"test-key">> => <<"test-val">> + }, Opts), + Iterations = hb_test_utils:benchmark( + fun(_) -> + MsgX = #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => MsgToSchedule + }, + ?assertMatch({ok, _}, hb_ao:resolve(Msg1, MsgX, Opts)) + end, + BenchTime + ), + ?event(benchmark, {scheduled, Iterations}), + Msg3 = #{ + <<"path">> => <<"slot">>, + <<"method">> => <<"GET">>, + <<"process">> => hb_util:human_id(hb_message:id(Msg1, all, Opts)) + }, + ?assertMatch({ok, #{ <<"current">> := CurrentSlot }} + when CurrentSlot == Iterations - 1, + hb_ao:resolve(Msg1, Msg3, Opts)), + ?event(bench, {res, Iterations - 1}), + hb_test_utils:benchmark_print( + <<"Scheduled through AO-Core:">>, + <<"messages">>, + Iterations, + BenchTime + ), + ?assert(Iterations > 3). +``` + +### many_clients + +```erlang +many_clients(Opts) -> + BenchTime = 1, + Processes = hb_opts:get(workers, 25, Opts), + {Node, Opts} = http_init(Opts), + PMsg = hb_message:commit(test_process(Opts), Opts), + Msg1 = hb_message:commit(#{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"method">> => <<"POST">>, + <<"process">> => PMsg, + <<"body">> => hb_message:commit(#{ <<"inner">> => <<"test">> }, Opts) + }, Opts), + {ok, _} = hb_http:post(Node, Msg1, Opts), + Iterations = hb_test_utils:benchmark( + fun(X) -> + {ok, _} = hb_http:post(Node, Msg1, Opts), + ?event(bench, {iteration, X, self()}) + end, + BenchTime, + Processes + ), + ?event({iterations, Iterations}), + hb_format:eunit_print( + "Scheduled ~p messages with ~p workers through HTTP in ~ps (~.2f msg/s)", + [Iterations, Processes, BenchTime, Iterations / BenchTime] + ), + {ok, Res} = http_get_slot(Node, PMsg), + ?event(bench, {res, Res}), + ?assert(Iterations > 10). +``` + +### benchmark_suite_test_ + +```erlang +benchmark_suite_test_() -> + {timeout, 10, fun() -> + rand:seed(exsplus, erlang:timestamp()), + Port = 30000 + rand:uniform(10000), + Bench = [ + {benchmark, "benchmark", fun single_resolution/1}, + {multihttp_benchmark, "multihttp_benchmark", fun many_clients/1} + ], + filelib:ensure_dir( + binary_to_list(Base = <<"cache-TEST/run-">>) + ), + hb_test_utils:suite_with_opts(Bench, benchmark_suite(Port, Base)) + end}. +``` + +### benchmark_suite + +```erlang +benchmark_suite(Port, Base) -> + PortBin = integer_to_binary(Port), + [ + #{ + name => fs, + requires => [hb_store_fs], + opts => #{ + store => #{ <<"store-module">> => hb_store_fs, + <<"name">> => <> + }, + scheduling_mode => local_confirmation, + port => Port + }, + desc => <<"FS store, local conf.">> + }, + #{ + name => fs_aggressive, + requires => [hb_store_fs], + opts => #{ + store => #{ <<"store-module">> => hb_store_fs, + <<"name">> => <> + }, + scheduling_mode => aggressive, + port => Port + 1 + }, + desc => <<"FS store, aggressive conf.">> + }, + #{ + name => rocksdb, + requires => [hb_store_rocksdb], + opts => #{ + store => #{ <<"store-module">> => hb_store_rocksdb, + <<"name">> => <> + }, + scheduling_mode => local_confirmation, + port => Port + 2 + }, + desc => <<"RocksDB store, local conf.">> + }, + #{ + name => rocksdb_aggressive, + requires => [hb_store_rocksdb], + opts => #{ + store => #{ <<"store-module">> => hb_store_rocksdb, + <<"name">> => <> + }, + scheduling_mode => aggressive, + port => Port + 3 + }, + desc => <<"RocksDB store, aggressive conf.">> + }, + #{ + name => rocksdb_extreme_aggressive_h3, + requires => [http3], + opts => #{ + store => #{ <<"store-module">> => hb_store_rocksdb, + <<"name">> => + << + Base/binary, + "run-", + (integer_to_binary(Port+4))/binary + >> + }, + scheduling_mode => aggressive, + protocol => http3, + workers => 100 + }, + desc => <<"100xRocksDB store, aggressive conf, http/3.">> + } + ]. +``` + +--- + +*Generated from [dev_scheduler.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler.erl)* diff --git a/docs/book/src/dev_scheduler_cache.erl.md b/docs/book/src/dev_scheduler_cache.erl.md new file mode 100644 index 000000000..8ed7256cb --- /dev/null +++ b/docs/book/src/dev_scheduler_cache.erl.md @@ -0,0 +1,645 @@ +# dev_scheduler_cache + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_cache.erl) + +A module that provides a cache for scheduler assignments and locations. + +--- + +## Exported Functions + +- `latest/2` +- `list/2` +- `read_location/2` +- `read/3` +- `write_location/2` +- `write_spawn/2` +- `write/2` + +--- + +### opts + +A module that provides a cache for scheduler assignments and locations. +Merge the scheduler store with the main store. Used before writing + +```erlang +opts(Opts) -> + Opts#{ + store => + hb_opts:get( + scheduler_store, + hb_opts:get(store, no_viable_store, Opts), + Opts + ) + }. +``` + +### write + +Write an assignment message into the cache. + +```erlang +write(RawAssignment, RawOpts) -> + Assignment = hb_cache:ensure_all_loaded(RawAssignment, RawOpts), + Opts = opts(RawOpts), + Store = hb_opts:get(store, no_viable_store, Opts), + % Write the message into the main cache + ProcID = hb_ao:get(<<"process">>, Assignment, Opts), + Slot = hb_ao:get(<<"slot">>, Assignment, Opts), + ?event( + {writing_assignment, + {proc_id, ProcID}, + {slot, Slot}, + {assignment, Assignment} + } + ), + case hb_cache:write(Assignment, Opts) of + {ok, RootPath} -> + % Create symlinks from the message on the process and the + % slot on the process to the underlying data. +``` + +### write_spawn + +Write the initial assignment message to the cache. + +```erlang +write_spawn(RawInitMessage, Opts) -> + InitMessage = hb_cache:ensure_all_loaded(RawInitMessage, Opts), + hb_cache:write(InitMessage, opts(Opts)). +``` + +### read + +Get an assignment message from the cache. + +```erlang +read(ProcID, Slot, Opts) when is_integer(Slot) -> + read(ProcID, hb_util:bin(Slot), Opts); +``` + +### read + +Get an assignment message from the cache. + +```erlang +read(ProcID, Slot, RawOpts) -> + Opts = opts(RawOpts), + Store = hb_opts:get(store, no_viable_store, Opts), + ResolvedPath = + P2 = hb_store:resolve( + Store, + P1 = hb_store:path(Store, [ + ?SCHEDULER_CACHE_PREFIX, + "assignments", + hb_util:human_id(ProcID), + Slot + ]) + ), + ?event( + {read_assignment, + {proc_id, ProcID}, + {slot, Slot}, + {store, Store} + } + ), + ?event({resolved_path, {p1, P1}, {p2, P2}, {resolved, ResolvedPath}}), + case hb_cache:read(ResolvedPath, Opts) of + {ok, Assignment} -> + % If the slot key is not present, the format of the assignment is + % AOS2, so we need to convert it to the canonical format. +``` + +### list + +Get the assignments for a process. + +```erlang +list(ProcID, RawOpts) -> + Opts = opts(RawOpts), + hb_cache:list_numbered( + hb_store:path(hb_opts:get(store, no_viable_store, Opts), [ + ?SCHEDULER_CACHE_PREFIX, + "assignments", + hb_util:human_id(ProcID) + ]), + Opts + ). +``` + +### latest + +Get the latest assignment from the cache. + +```erlang +latest(ProcID, RawOpts) -> + Opts = opts(RawOpts), + ?event({getting_assignments_from_cache, {proc_id, ProcID}, {opts, Opts}}), + case dev_scheduler_cache:list(ProcID, Opts) of + [] -> + ?event({no_assignments_in_cache, {proc_id, ProcID}}), + not_found; + Assignments -> + AssignmentNum = lists:max(Assignments), + ?event( + {found_assignment_from_cache, + {proc_id, ProcID}, + {assignment_num, AssignmentNum} + } + ), + {ok, Assignment} = dev_scheduler_cache:read( + ProcID, + AssignmentNum, + Opts + ), + { + AssignmentNum, + hb_ao:get( + <<"hash-chain">>, Assignment, #{ hashpath => ignore }) + } + end. +``` + +### read_location + +Read the latest known scheduler location for an address. + +```erlang +read_location(Address, RawOpts) -> + Opts = opts(RawOpts), + Res = + hb_cache:read( + hb_store:path(hb_opts:get(store, no_viable_store, Opts), [ + ?SCHEDULER_CACHE_PREFIX, + "locations", + hb_util:human_id(Address) + ]), + Opts + ), + Event = + case Res of + {ok, _} -> found_in_store; + not_found -> not_found_in_store; + _ -> local_lookup_unexpected_result + end, + ?event(scheduler_location, {Event, {address, Address}, {res, Res}}), + Res. +``` + +### write_location + +Write the latest known scheduler location for an address. + +```erlang +write_location(LocationMsg, RawOpts) -> + Opts = opts(RawOpts), + Signers = hb_message:signers(LocationMsg, Opts), + ?event( + scheduler_location, + {caching_locally, + {signers, Signers}, + {location_msg, LocationMsg} + } + ), + case hb_cache:write(LocationMsg, Opts) of + {ok, RootPath} -> + lists:foreach( + fun(Signer) -> + hb_store:make_link( + hb_opts:get(store, no_viable_store, Opts), + RootPath, + hb_store:path( + hb_opts:get(store, no_viable_store, Opts), + [ + ?SCHEDULER_CACHE_PREFIX, + "locations", + hb_util:human_id(Signer) + ] + ) + ) + end, + Signers + ), + ok; + false -> + % The message is not valid, so we don't cache it. +``` + +### volatile_schedule_test + +Test that a volatile schedule is lost on restart. + +```erlang +volatile_schedule_test() -> + VolStore = hb_test_utils:test_store(hb_store_fs, <<"volatile-sched">>), + NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"non-volatile-sched">>), + Opts = #{ + store => [NonVolStore], + scheduler_store => [VolStore] + }, + hb_store:start(VolStore), + hb_store:start(NonVolStore), + Assignment = #{ + <<"variant">> => <<"ao.N.1">>, + <<"process">> => ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), + <<"slot">> => 1, + <<"hash-chain">> => <<"test-hash-chain">> + }, + ?assertEqual(ok, write(Assignment, Opts)), + ?assertMatch({1, _}, latest(ProcID, Opts)), + ?assertEqual({ok, Assignment}, read(ProcID, 1, Opts)), + hb_store:stop(VolStore), + hb_store:reset(VolStore), + hb_store:start(VolStore), + ?assertMatch(not_found, latest(ProcID, Opts)), + ?assertMatch(not_found, read(ProcID, 1, Opts)). +``` + +### concurrent_scheduler_write_test + +Test concurrent writes to scheduler store from multiple processes. + +```erlang +concurrent_scheduler_write_test() -> + VolStore = hb_test_utils:test_store(hb_store_fs, <<"concurrent-vol">>), + NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"concurrent-nonvol">>), + Opts = #{ + store => [NonVolStore], + scheduler_store => [VolStore] + }, + hb_store:start(VolStore), + hb_store:start(NonVolStore), + Workers = 50, + ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), + Parent = self(), + lists:foreach(fun(Slot) -> + spawn_link(fun() -> + Assignment = #{ + <<"process">> => ProcID, + <<"slot">> => Slot, + <<"hash-chain">> => + <<"concurrent-test-", (integer_to_binary(Slot))/binary>> + }, + Result = write(Assignment, Opts), + Parent ! {write_result, Slot, Result} + end) + end, lists:seq(1, Workers)), + Results = + lists:map( + fun(Slot) -> + receive + {write_result, Slot, Result} -> + ?event(testing, {write_result, Slot, Result}), + Result + after 5000 -> + timeout + end + end, + lists:seq(1, Workers) + ), + ?event(testing, {concurrent_write_results, Results,Workers}), + ?assertEqual(lists:duplicate(Workers, ok), Results), + AllSlots = list(ProcID, Opts), + ?event(testing, {all_slots, AllSlots}), + ?assertEqual(Workers, length(AllSlots)), + ?assertEqual(lists:seq(1, Workers), lists:sort(AllSlots)). +``` + +### concurrent_read_write_test + +Test concurrent reads during writes to detect race conditions. + +```erlang +concurrent_read_write_test() -> + VolStore = hb_test_utils:test_store(hb_store_fs, <<"race-vol">>), + NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"race-nonvol">>), + Opts = #{ + store => [NonVolStore], + scheduler_store => [VolStore] + }, + hb_store:start(VolStore), + hb_store:start(NonVolStore), + ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), + Parent = self(), + ?event(testing, {concurrent_test_proc_id, ProcID}), + spawn_link(fun() -> + lists:foreach(fun(Slot) -> + Assignment = #{ + <<"variant">> => <<"ao.N.1">>, + <<"process">> => ProcID, + <<"slot">> => Slot, + <<"hash-chain">> => <<"race-test-", (integer_to_binary(Slot))/binary>> + }, + write(Assignment, Opts), + timer:sleep(1) + end, lists:seq(1, 100)), + ?event(testing, {writer_completed}), + Parent ! writer_done + end), + lists:foreach( + fun(ReaderNum) -> + spawn_link(fun() -> + ReadResults = lists:map(fun(Slot) -> + timer:sleep(rand:uniform(5)), + case read(ProcID, Slot, Opts) of + {ok, _} -> success; + not_found -> not_found + end + end, lists:seq(1, 100)), + SuccessCount = length([R || R <- ReadResults, R == success]), + ?event(testing, {reader_done, ReaderNum, SuccessCount}), + Parent ! {reader_done, ReaderNum, ReadResults} + end) + end, + lists:seq(1, 10) + ), + receive + writer_done -> ok + after 15000 -> + ?assert(false) + end, + AllReaderResults = lists:map(fun(ReaderNum) -> + receive + {reader_done, ReaderNum, Results} -> Results + after 5000 -> + ?assert(false), + [] + end + end, lists:seq(1, 10)), + FinalSlots = list(ProcID, Opts), + ?event(testing, {final_verification, {slots_found, length(FinalSlots)}}), + ?assertEqual(100, length(FinalSlots)), + ?assertEqual(lists:seq(1, 100), lists:sort(FinalSlots)), + TotalSuccessfulReads = lists:sum([ + length([R || R <- Results, R == success]) || Results <- AllReaderResults + ]), + ?event(testing, { + concurrent_read_stats, + {total_successful_reads, TotalSuccessfulReads} + }), + ?assert(TotalSuccessfulReads > 0). +``` + +### large_assignment_volume_test + +Test writing a large volume of assignments to stress memory. Helps + +```erlang +large_assignment_volume_test() -> + VolStore = hb_test_utils:test_store(hb_store_fs, <<"volume-vol">>), + NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"volume-nonvol">>), + Opts = #{ + store => [NonVolStore], + scheduler_store => [VolStore] + }, + hb_store:start(VolStore), + hb_store:start(NonVolStore), + VolumeSize = 1000, + ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), + StartTime = erlang:monotonic_time(millisecond), + lists:foreach( + fun(Slot) -> + Assignment = #{ + <<"variant">> => <<"ao.N.1">>, + <<"process">> => ProcID, + <<"slot">> => Slot, + <<"hash-chain">> => crypto:strong_rand_bytes(64) + }, + ?assertEqual(ok, write(Assignment, Opts)) + end, + lists:seq(1, VolumeSize) + ), + EndTime = erlang:monotonic_time(millisecond), + ?event(testing, {large_volume_write_time, EndTime - StartTime}), + AllSlots = list(ProcID, Opts), + ?assertEqual(VolumeSize, length(AllSlots)), + ?assertEqual(lists:seq(1, VolumeSize), lists:sort(AllSlots)), + ReadStartTime = erlang:monotonic_time(millisecond), + lists:foreach(fun(Slot) -> + ?assertMatch({ok, _}, read(ProcID, Slot, Opts)) + end, lists:seq(1, VolumeSize)), + ReadEndTime = erlang:monotonic_time(millisecond), + ?event(testing, {large_volume_read_time, ReadEndTime - ReadStartTime}). +``` + +### rapid_restart_test + +Test rapid store restarts under load. + +```erlang +rapid_restart_test() -> + VolStore = hb_test_utils:test_store(hb_store_fs, <<"restart-vol">>), + NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"restart-nonvol">>), + Opts = #{ + store => [NonVolStore], + scheduler_store => [VolStore] + }, + hb_store:start(VolStore), + hb_store:start(NonVolStore), + ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), + lists:foreach( + fun(Cycle) -> + lists:foreach( + fun(Slot) -> + Assignment = #{ + <<"variant">> => <<"ao.N.1">>, + <<"process">> => ProcID, + <<"slot">> => Slot + (Cycle * 10), + <<"hash-chain">> => + <<"restart-cycle-", (integer_to_binary(Cycle))/binary>> + }, + ?assertEqual(ok, write(Assignment, Opts)) + end, + lists:seq(1, 10) + ), + SlotsBeforeRestart = list(ProcID, Opts), + ?assertMatch([_|_], SlotsBeforeRestart), + ?event(testing, { + restart_cycle, Cycle, {slots_before, length(SlotsBeforeRestart)} + }), + hb_store:stop(VolStore), + timer:sleep(10), + hb_store:reset(VolStore), + hb_store:start(VolStore), + SlotsAfterRestart = list(ProcID, Opts), + ?assertEqual([], SlotsAfterRestart), + ?event({restart_verified, Cycle, {slots_after, length(SlotsAfterRestart)}}) + end, + lists:seq(1, 5) + ). +``` + +### mixed_store_reset_operations_test + +Test scheduler store behavior during reset store operations. + +```erlang +mixed_store_reset_operations_test() -> + VolStore = hb_test_utils:test_store(hb_store_fs, <<"mixed-vol">>), + NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"mixed-nonvol">>), + Opts = #{ + store => [NonVolStore], + scheduler_store => [VolStore] + }, + hb_store:start(VolStore), + hb_store:start(NonVolStore), + ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), + Assignment1 = #{ + <<"variant">> => <<"ao.N.1">>, + <<"process">> => ProcID, + <<"slot">> => 1, + <<"hash-chain">> => <<"mixed-test-1">> + }, + ?assertEqual(ok, write(Assignment1, Opts)), + ?event(testing, {assignment_written, ProcID}), + hb_store:reset(NonVolStore), + ReadAfterNonVolReset = read(ProcID, 1, Opts), + ?assertMatch({ok, _}, ReadAfterNonVolReset), + ?event(testing, {after_nonvol_reset, ReadAfterNonVolReset}), + hb_store:reset(VolStore), + ReadAfterVolReset = read(ProcID, 1, Opts), + ?assertEqual(not_found, ReadAfterVolReset), + ?event(testing, {after_vol_reset, ReadAfterVolReset}). +``` + +### invalid_assignment_stress_test + +Test handling of invalid assignment data. + +```erlang +invalid_assignment_stress_test() -> + VolStore = hb_test_utils:test_store(hb_store_fs, <<"invalid-vol">>), + NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"invalid-nonvol">>), + Opts = #{ + store => [NonVolStore], + scheduler_store => [VolStore] + }, + hb_store:start(VolStore), + hb_store:start(NonVolStore), + InvalidAssignments = [ + #{}, + #{<<"process">> => <<"invalid">>}, + #{<<"slot">> => 1}, + #{<<"process">> => <<>>, <<"slot">> => 1}, + #{<<"process">> => <<"valid">>, <<"slot">> => -1}, + #{<<"process">> => <<"valid">>, <<"slot">> => <<"not-integer">>} + ], + ?event(testing, {testing_invalid_assignments, length(InvalidAssignments)}), + Results = lists:map(fun(Assignment) -> + Result = try + write(Assignment, Opts) + catch + _:_ -> error + end, + ?assertNotEqual(ok, Result), + Result + end, InvalidAssignments), + ErrorCount = length([R || R <- Results, R == error]), + ?event( + {invalid_assignment_results, + {errors, ErrorCount}, + {total, length(InvalidAssignments)} + } + ), + ?assertEqual(6, ErrorCount). +``` + +### scheduler_location_stress_test + +Test scheduler location operations under stress. + +```erlang +scheduler_location_stress_test() -> + VolStore = hb_test_utils:test_store(hb_store_fs, <<"location-vol">>), + NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"location-nonvol">>), + Wallet = ar_wallet:new(), + Opts = #{ + store => [NonVolStore], + scheduler_store => [VolStore], + priv_wallet => Wallet + }, + hb_store:start(VolStore), + hb_store:start(NonVolStore), + LocationCount = 10, + ?event(testing, {location_stress_test_starting, LocationCount}), + Results = + lists:map( + fun(N) -> + LocationMsg = #{ + <<"scheduler">> => + hb_util:human_id(ar_wallet:to_address(Wallet)), + <<"location">> => + << + "http://scheduler", + (integer_to_binary(N))/binary, + ".com" + >>, + <<"timestamp">> => erlang:system_time(millisecond), + <<"ttl">> => 3600000 + }, + Result = + try + write_location(LocationMsg, Opts) + catch + Res -> + ?event(testing, {location_write_error, {error, Res}}), + ok + end, + ?assert(Result == ok orelse element(1, Result) == error), + Result + end, + lists:seq(1, LocationCount) + ), + SuccessCount = length([R || R <- Results, R == ok]), + ?event( + {location_stress_results, + {successes, SuccessCount}, + {total, LocationCount} + } + ). +``` + +### volatile_store_corruption_test + +Test system behavior with corrupted data in volatile store. + +```erlang +volatile_store_corruption_test() -> + VolStore = hb_test_utils:test_store(hb_store_fs, <<"corruption-vol">>), + NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"corruption-nonvol">>), + Opts = #{ + store => [NonVolStore], + scheduler_store => [VolStore] + }, + hb_store:start(VolStore), + hb_store:start(NonVolStore), + ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), + Assignment = #{ + <<"variant">> => <<"ao.N.1">>, + <<"process">> => ProcID, + <<"slot">> => 1, + <<"hash-chain">> => <<"corruption-test">> + }, + ?assertEqual(ok, write(Assignment, Opts)), + ReadBeforeCorruption = read(ProcID, 1, Opts), + ?assertMatch({ok, _}, ReadBeforeCorruption), + ?event(testing, {before_corruption, ReadBeforeCorruption}), + hb_store:reset(VolStore), + ?event(testing, {volatile_store_reset}), + ReadAfterCorruption = read(ProcID, 1, Opts), + SlotsAfterCorruption = list(ProcID, Opts), + LatestAfterCorruption = latest(ProcID, Opts), + ?assertEqual(not_found, ReadAfterCorruption), + ?assertEqual([], SlotsAfterCorruption), + ?assertEqual(not_found, LatestAfterCorruption), + ?event(testing, + { corruption_recovery_verified, + { read, ReadAfterCorruption }, + { list, length(SlotsAfterCorruption) }, + { latest, LatestAfterCorruption } +``` + +--- + +*Generated from [dev_scheduler_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_cache.erl)* diff --git a/docs/book/src/dev_scheduler_formats.erl.md b/docs/book/src/dev_scheduler_formats.erl.md new file mode 100644 index 000000000..33793cd1d --- /dev/null +++ b/docs/book/src/dev_scheduler_formats.erl.md @@ -0,0 +1,309 @@ +# dev_scheduler_formats + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_formats.erl) + +This module is used by dev_scheduler in order to produce outputs that +are compatible with various forms of AO clients. It features two main formats: +- `application/json` +- `application/http` +The `application/json` format is a legacy format that is not recommended for +new integrations of the AO protocol. + +--- + +## Exported Functions + +- `aos2_normalize_types/1` +- `aos2_to_assignment/2` +- `aos2_to_assignments/3` +- `assignments_to_aos2/4` +- `assignments_to_bundle/4` + +--- + +### assignments_to_bundle + +This module is used by dev_scheduler in order to produce outputs that +Generate a `GET /schedule` response for a process as HTTP-sig bundles. + +```erlang +assignments_to_bundle(ProcID, Assignments, More, Opts) -> + TimeInfo = ar_timestamp:get(), + assignments_to_bundle(ProcID, Assignments, More, TimeInfo, Opts). +``` + +### assignments_to_bundle + +```erlang +assignments_to_bundle(ProcID, Assignments, More, TimeInfo, RawOpts) -> + Opts = format_opts(RawOpts), + {Timestamp, Height, Hash} = TimeInfo, + {ok, #{ + <<"type">> => <<"schedule">>, + <<"process">> => hb_util:human_id(ProcID), + <<"continues">> => hb_util:atom(More), + <<"timestamp">> => hb_util:int(Timestamp), + <<"block-height">> => hb_util:int(Height), + <<"block-hash">> => hb_util:human_id(Hash), + <<"assignments">> => + hb_maps:from_list( + lists:map( + fun(Assignment) -> + { + hb_ao:get( + <<"slot">>, + Assignment, + Opts#{ hashpath => ignore } + ), + Assignment + } + end, + Assignments + ) + ) + }}. +``` + +### assignments_to_aos2 + +```erlang +assignments_to_aos2(ProcID, Assignments, More, RawOpts) when is_map(Assignments) -> + assignments_to_aos2( + ProcID, + hb_util:message_to_ordered_list(Assignments), + More, + format_opts(RawOpts) + ); +``` + +### assignments_to_aos2 + +```erlang +assignments_to_aos2(ProcID, Assignments, More, RawOpts) -> + Opts = format_opts(RawOpts), + {Timestamp, Height, Hash} = ar_timestamp:get(), + BodyStruct = + #{ + <<"page_info">> => + #{ + <<"process">> => hb_util:human_id(ProcID), + <<"has_next_page">> => More, + <<"timestamp">> => list_to_binary(integer_to_list(Timestamp)), + <<"block-height">> => list_to_binary(integer_to_list(Height)), + <<"block-hash">> => hb_util:human_id(Hash) + }, + <<"edges">> => + lists:map( + fun(Assignment) -> + #{ + <<"cursor">> => cursor(Assignment, Opts), + <<"node">> => assignment_to_aos2(Assignment, Opts) + } + end, + Assignments + ) + }, + Encoded = hb_json:encode(BodyStruct), + ?event({body_struct, BodyStruct}), + ?event({encoded, {explicit, Encoded}}), + {ok, + #{ + <<"content-type">> => <<"application/json">>, + <<"body">> => Encoded + } + }. +``` + +### cursor + +Generate a cursor for an assignment. This should be the slot number, at +Convert an assignment to an AOS2-compatible JSON structure. + +```erlang +cursor(Assignment, RawOpts) -> + Opts = format_opts(RawOpts), + hb_ao:get(<<"slot">>, Assignment, Opts). +``` + +### assignment_to_aos2 + +Generate a cursor for an assignment. This should be the slot number, at +Convert an assignment to an AOS2-compatible JSON structure. + +```erlang +assignment_to_aos2(Assignment, RawOpts) -> + Opts = format_opts(RawOpts), + Message = hb_ao:get(<<"body">>, Assignment, Opts), + AssignmentWithoutBody = hb_maps:without([<<"body">>], Assignment, Opts), + #{ + <<"message">> => + dev_json_iface:message_to_json_struct(Message, Opts), + <<"assignment">> => + dev_json_iface:message_to_json_struct(AssignmentWithoutBody, Opts) + }. +``` + +### aos2_to_assignments + +Convert an AOS2-style JSON structure to a normalized HyperBEAM + +```erlang +aos2_to_assignments(ProcID, Body, RawOpts) -> + Opts = format_opts(RawOpts), + Assignments = hb_maps:get(<<"edges">>, Body, Opts, Opts), + ?event({raw_assignments, Assignments}), + ParsedAssignments = + lists:map( + fun(A) -> aos2_to_assignment(A, Opts) end, + Assignments + ), + ?event({parsed_assignments, ParsedAssignments}), + TimeInfo = + case ParsedAssignments of + [] -> {0, 0, hb_util:encode(<<0:256>>)}; + _ -> + Last = lists:last(ParsedAssignments), + { + hb_ao:get(<<"timestamp">>, Last, Opts), + hb_ao:get(<<"block-height">>, Last, Opts), + hb_ao:get(<<"block-hash">>, Last, Opts) + } + end, + assignments_to_bundle(ProcID, ParsedAssignments, false, TimeInfo, Opts). +``` + +### aos2_to_assignment + +Create and normalize an assignment from an AOS2-style JSON structure. +The `hb_gateway_client` module expects all JSON structures to at least + +```erlang +aos2_to_assignment(A, RawOpts) -> + Opts = format_opts(RawOpts), + % Unwrap the node if it is provided + Node = hb_maps:get(<<"node">>, A, A, Opts), + ?event({node, Node}), + {ok, Assignment} = + hb_gateway_client:result_to_message( + aos2_normalize_data(hb_maps:get(<<"assignment">>, Node, undefined, Opts)), + Opts + ), + NormalizedAssignment = aos2_normalize_types(Assignment), + {ok, Message} = + case hb_maps:get(<<"message">>, Node, undefined, Opts) of + null -> + MessageID = hb_maps:get(<<"message">>, Assignment, undefined, Opts), + ?event(error, {scheduler_did_not_provide_message, MessageID}), + case hb_cache:read(MessageID, Opts) of + {ok, Msg} -> {ok, Msg}; + {error, _} -> + throw({error, + {message_not_given_by_scheduler_or_cache, + MessageID} + } + ) + end; + Body -> + hb_gateway_client:result_to_message( + aos2_normalize_data(Body), + Opts + ) + end, + NormalizedMessage = aos2_normalize_types(Message), + ?event({message, Message}), + NormalizedAssignment#{ <<"body">> => NormalizedMessage }. +``` + +### aos2_normalize_data + +Create and normalize an assignment from an AOS2-style JSON structure. +The `hb_gateway_client` module expects all JSON structures to at least + +```erlang +aos2_normalize_data(JSONStruct) -> + case JSONStruct of + #{<<"data">> := _} -> JSONStruct; + _ -> JSONStruct#{ <<"data">> => <<>> } + end. +``` + +### aos2_normalize_types + +Normalize an AOS2 formatted message to ensure that all field NAMES and + +```erlang +aos2_normalize_types(Msg = #{ <<"timestamp">> := TS }) when is_binary(TS) -> + aos2_normalize_types(Msg#{ <<"timestamp">> => hb_util:int(TS) }); +``` + +### aos2_normalize_types + +Normalize an AOS2 formatted message to ensure that all field NAMES and + +```erlang +aos2_normalize_types(Msg = #{ <<"nonce">> := Nonce }) + when is_binary(Nonce) and not is_map_key(<<"slot">>, Msg) -> + aos2_normalize_types( + Msg#{ <<"slot">> => hb_util:int(Nonce) } + ); +``` + +### aos2_normalize_types + +Normalize an AOS2 formatted message to ensure that all field NAMES and + +```erlang +aos2_normalize_types(Msg = #{ <<"epoch">> := DS }) when is_binary(DS) -> + aos2_normalize_types(Msg#{ <<"epoch">> => hb_util:int(DS) }); +``` + +### aos2_normalize_types + +Normalize an AOS2 formatted message to ensure that all field NAMES and + +```erlang +aos2_normalize_types(Msg = #{ <<"slot">> := Slot }) when is_binary(Slot) -> + aos2_normalize_types(Msg#{ <<"slot">> => hb_util:int(Slot) }); +``` + +### aos2_normalize_types + +Normalize an AOS2 formatted message to ensure that all field NAMES and + +```erlang +aos2_normalize_types(Msg) when not is_map_key(<<"block-hash">>, Msg) -> + ?event({missing_block_hash, Msg}), + aos2_normalize_types(Msg#{ <<"block-hash">> => hb_util:encode(<<0:256>>) }); +``` + +### aos2_normalize_types + +Normalize an AOS2 formatted message to ensure that all field NAMES and + +```erlang +aos2_normalize_types(Msg) -> + ?event( + { + aos2_normalized_types, + {msg, Msg}, + {anchor, hb_ao:get(<<"anchor">>, Msg, <<>>, #{})} + } + ), + Msg. +``` + +### format_opts + +For all scheduler format operations, we do not calculate hashpaths, + +```erlang +format_opts(Opts) -> + Opts#{ + hashpath => ignore, + cache_control => [<<"no-cache">>, <<"no-store">>], + await_inprogress => false +``` + +--- + +*Generated from [dev_scheduler_formats.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_formats.erl)* diff --git a/docs/book/src/dev_scheduler_registry.erl.md b/docs/book/src/dev_scheduler_registry.erl.md new file mode 100644 index 000000000..566936c31 --- /dev/null +++ b/docs/book/src/dev_scheduler_registry.erl.md @@ -0,0 +1,186 @@ +# dev_scheduler_registry + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_registry.erl) + +A simple registry for local services in AO, using pg. Currently, +only SU processes are supported. + +--- + +## Exported Functions + +- `find/1` +- `find/2` +- `find/3` +- `get_processes/0` +- `get_wallet/0` +- `start/0` + +--- + +### start + +```erlang +start() -> + hb_name:start(), + ok. +``` + +### get_wallet + +```erlang +get_wallet() -> + % TODO: We might want to use a different wallet per SU later. +``` + +### find + +Find a process associated with the processor ID in the local registry +Find a process associated with the processor ID in the local registry + +```erlang +find(ProcID) -> find(ProcID, false). +``` + +### find + +Find a process associated with the processor ID in the local registry +Find a process associated with the processor ID in the local registry + +```erlang +find(ProcID, ProcMsgOrFalse) -> + find(ProcID, ProcMsgOrFalse, #{ priv_wallet => hb:wallet() }). +``` + +### find + +Same as `find/2` but with additional options passed when spawning a + +```erlang +find(ProcID, ProcMsgOrFalse, Opts) -> + case hb_name:lookup({<<"scheduler@1.0">>, ProcID}) of + undefined -> maybe_new_proc(ProcID, ProcMsgOrFalse, Opts); + Pid -> Pid + end. +``` + +### get_processes + +Return a list of all currently registered ProcID. + +```erlang +get_processes() -> + ?event({getting_processes, hb_name:all()}), + [ ProcID || {{<<"scheduler@1.0">>, ProcID}, _} <- hb_name:all() ]. +``` + +### maybe_new_proc + +Return a list of all currently registered ProcID. + +```erlang +maybe_new_proc(_ProcID, false, _Opts) -> not_found; +``` + +### maybe_new_proc + +Return a list of all currently registered ProcID. + +```erlang +maybe_new_proc(ProcID, ProcMsg, Opts) -> + dev_scheduler_server:start(ProcID, ProcMsg, Opts). +``` + +### test_opts + +```erlang +test_opts() -> + #{ + store => hb_test_utils:test_store(), + priv_wallet => hb:wallet() + }. +``` + +### generate_test_procs + +```erlang +generate_test_procs(Opts) -> + [ + hb_message:commit( + #{ + <<"type">> => <<"Process">>, + <<"image">> => <<0:(1024*32)>> + }, + Opts + ), + hb_message:commit( + #{ + <<"type">> => <<"Process">>, + <<"image">> => <<0:(1024*32)>> + }, + Opts + ) + ]. +``` + +### find_non_existent_process_test + +```erlang +find_non_existent_process_test() -> + Opts = test_opts(), + [Proc1, _Proc2] = generate_test_procs(Opts), + start(), + ?assertEqual(not_found, ?MODULE:find(hb_message:id(Proc1, all))). +``` + +### create_and_find_process_test + +```erlang +create_and_find_process_test() -> + Opts = test_opts(), + [Proc1, _Proc2] = generate_test_procs(Opts), + ID = hb_message:id(Proc1, all, Opts), + start(), + Pid1 = ?MODULE:find(ID, Proc1), + ?assert(is_pid(Pid1)), + ?assertEqual(Pid1, ?MODULE:find(ID, Proc1)). +``` + +### create_multiple_processes_test + +```erlang +create_multiple_processes_test() -> + Opts = test_opts(), + [Proc1, Proc2] = generate_test_procs(Opts), + start(), + ID1 = hb_message:id(Proc1, all, Opts), + ID2 = hb_message:id(Proc2, all, Opts), + Pid1 = ?MODULE:find(ID1, Proc1), + Pid2 = ?MODULE:find(ID2, Proc2), + ?assert(is_pid(Pid1)), + ?assert(is_pid(Pid2)), + ?assertNotEqual(Pid1, Pid2), + ?assertEqual(Pid1, ?MODULE:find(ID1, Proc1)), + ?assertEqual(Pid2, ?MODULE:find(ID2, Proc2)). +``` + +### get_all_processes_test + +```erlang +get_all_processes_test() -> + Opts = test_opts(), + [Proc1, Proc2] = generate_test_procs(Opts), + start(), + ID1 = hb_message:id(Proc1, all, Opts), + ID2 = hb_message:id(Proc2, all, Opts), + ?MODULE:find(ID1, Proc1), + ?MODULE:find(ID2, Proc2), + Processes = ?MODULE:get_processes(), + ?assert(length(Processes) >= 2), + ?event({processes, Processes}), + ?assert(lists:member(ID1, Processes)), +``` + +--- + +*Generated from [dev_scheduler_registry.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_registry.erl)* diff --git a/docs/book/src/dev_scheduler_server.erl.md b/docs/book/src/dev_scheduler_server.erl.md new file mode 100644 index 000000000..20a84c69d --- /dev/null +++ b/docs/book/src/dev_scheduler_server.erl.md @@ -0,0 +1,238 @@ +# dev_scheduler_server + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_server.erl) + +A long-lived server that schedules messages for a process. +It acts as a deliberate 'bottleneck' to prevent the server accidentally +assigning multiple messages to the same slot. + +--- + +## Exported Functions + +- `info/1` +- `schedule/2` +- `start/3` +- `stop/1` + +--- + +### start + +A long-lived server that schedules messages for a process. +Start a scheduling server for a given computation. + +```erlang +start(ProcID, Proc, Opts) -> + ?event(scheduling, {starting_scheduling_server, {proc_id, ProcID}}), + spawn_link( + fun() -> + % Before we start, register the scheduler name. +``` + +### commitment_wallets + +Determine the appropriate list of keys to use to commit assignments for + +```erlang +commitment_wallets(ProcMsg, Opts) -> + SchedulerVal = + hb_ao:get_first( + [ + {ProcMsg, <<"scheduler">>}, + {ProcMsg, <<"scheduler-location">>} + ], + [], + Opts + ), + lists:filtermap( + fun(Scheduler) -> + case hb_opts:as(Scheduler, Opts) of + {ok, #{ priv_wallet := Wallet }} -> {true, Wallet}; + _ -> false + end + end, + dev_scheduler:parse_schedulers(SchedulerVal) + ). +``` + +### schedule + +Call the appropriate scheduling server to assign a message. + +```erlang +schedule(AOProcID, Message) when is_binary(AOProcID) -> + schedule(dev_scheduler_registry:find(AOProcID), Message); +``` + +### schedule + +Call the appropriate scheduling server to assign a message. + +```erlang +schedule(ErlangProcID, Message) -> + ?event( + {scheduling_message, + {proc_id, ErlangProcID}, + {message, Message}, + {is_alive, is_process_alive(ErlangProcID)} + } + ), + AbortTime = scheduler_time() + ?DEFAULT_TIMEOUT, + ErlangProcID ! {schedule, Message, self(), AbortTime}, + receive + {scheduled, Message, Assignment} -> + Assignment + after ?DEFAULT_TIMEOUT -> + throw({scheduler_timeout, {proc_id, ErlangProcID}, {message, Message}}) + end. +``` + +### info + +Get the current slot from the scheduling server. + +```erlang +info(ProcID) -> + ?event({getting_info, {proc_id, ProcID}}), + ProcID ! {info, self()}, + receive {info, Info} -> Info end. +``` + +### stop + +```erlang +stop(ProcID) -> + ?event({stopping_scheduling_server, {proc_id, ProcID}}), + ProcID ! stop. +``` + +### server + +The main loop of the server. Simply waits for messages to assign and + +```erlang +server(State) -> + receive + {schedule, Message, Reply, AbortTime} -> + case SchedTime = scheduler_time() > AbortTime of + true -> + % Ignore scheduling requests if they are too old. The + % `abort-time' signals to us that the client has already + % given up on the request, so in order to maintain + % predictability we ignore it. +``` + +### assign + +Assign a message to the next slot. + +```erlang +assign(State, Message, ReplyPID) -> + try + do_assign(State, Message, ReplyPID) + catch + _Class:Reason:Stack -> + ?event({error_scheduling, Reason, Stack}), + State + end. +``` + +### do_assign + +Generate and store the actual assignment message. + +```erlang +do_assign(State, Message, ReplyPID) -> + % Ensure that only committed keys from the message are included in the + % assignment. +``` + +### commit_assignment + +Commit to the assignment using all of our appropriate wallets. + +```erlang +commit_assignment(BaseAssignment, State) -> + Wallets = maps:get(wallets, State), + Opts = maps:get(opts, State), + lists:foldr( + fun(Wallet, Assignment) -> + hb_message:commit(Assignment, Opts#{ priv_wallet => Wallet }) + end, + BaseAssignment, + Wallets + ). +``` + +### maybe_inform_recipient + +Potentially inform the caller that the assignment has been scheduled. + +```erlang +maybe_inform_recipient(Mode, ReplyPID, Message, Assignment, State) -> + case maps:get(mode, State) of + Mode -> ReplyPID ! {scheduled, Message, Assignment}; + _ -> ok + end. +``` + +### next_hashchain + +Create the next element in a chain of hashes that links this and prior + +```erlang +next_hashchain(HashChain, Message, Opts) -> + ?event({creating_next_hashchain, {hash_chain, HashChain}, {message, Message}}), + ID = hb_message:id(Message, all, Opts), + crypto:hash( + sha256, + << HashChain/binary, ID/binary >> + ). +``` + +### scheduler_time + +Return the current time in milliseconds. + +```erlang +scheduler_time() -> + erlang:system_time(millisecond). +``` + +### new_proc_test + +Test the basic functionality of the server. + +```erlang +new_proc_test() -> + Wallet = ar_wallet:new(), + SignedItem = hb_message:commit( + #{ <<"data">> => <<"test">>, <<"random-key">> => rand:uniform(10000) }, + #{ priv_wallet => Wallet } + ), + SignedItem2 = hb_message:commit( + #{ <<"data">> => <<"test2">> }, + #{ priv_wallet => Wallet } + ), + SignedItem3 = hb_message:commit( + #{ + <<"data">> => <<"test2">>, + <<"deep-key">> => + #{ <<"data">> => <<"test3">> } + }, + #{ priv_wallet => Wallet } + ), + dev_scheduler_registry:find(hb_message:id(SignedItem, all), SignedItem), + schedule(ID = hb_message:id(SignedItem, all), SignedItem), + schedule(ID, SignedItem2), + schedule(ID, SignedItem3), + ?assertMatch( + #{ current := 2 }, + dev_scheduler_server:info(dev_scheduler_registry:find(ID)) + ). +``` + +--- + +*Generated from [dev_scheduler_server.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_server.erl)* diff --git a/docs/book/src/dev_secret.erl.md b/docs/book/src/dev_secret.erl.md new file mode 100644 index 000000000..70846ff82 --- /dev/null +++ b/docs/book/src/dev_secret.erl.md @@ -0,0 +1,845 @@ +# dev_secret + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_secret.erl) + +A device that allows a node to create, export, and commit messages with +secrets that are stored on the node itself. Users of this device must specify +an `access-control` message which requests are validated against before +access to secrets is granted. +This device is intended for use in situations in which the node is trusted +by the user, for example if it is running on their own machine or in a +TEE-protected environment that they deem to be secure. +# Authentication Flow +Each secret is associated with an `access-control` message and a list of +`controllers` that may access it. The `access-control` system is pluggable +-- users may configure their messages to call any AO-Core device that is +executable on the host node. The default `access-control` message uses the +`~cookie@1.0` device's `generate` and `verify` keys to authenticate users. +During secret generation: +1. This device creates the secret and determines its `committer` address. +2. The device invokes the caller's `access-control` message with the `commit` + path and the `keyid` in the request. +3. The `access-control` message sets up authentication (e.g., creates cookies, + secrets) and returns a response, containing a commitment with a `keyid` + field. This `keyid` is used to identify the user's 'access secret' which + grants them the ability to use the device's 'hidden' secret in the future. +4. This device stores both the secret and the initialized `access-control` + message, as well as its other metadata. +5. This device returns the initialized `access-control` message with the + secret's `keyid` added to the `body` field. +During secret operations (commit, export, etc.): +1. This device retrieves the stored `access-control` message for the + secret either from persistent storage or from the node message's private + element. The keyid of the `access secret` is either provided by the + user in the request, or is determined from a provided `secret` parameter + in the request. +2. This device calls the `access-control` message with path `verify` and + the user's request. +3. The `access-control` message verifies the request (e.g., checks cookies, + provided authentication credentials, etc.). +4. If verification passes, the device performs the requested operation. +5. If verification fails, a 400 error is returned. +# Access Control Message Requirements +Access control messages are fully customizable by callers, but must support +two paths: +`/commit`: Called during secret generation to bind the `access-control` + template message to the given `keyid` (secret reference). + - Input: Request message containing `keyid` field with the secret's `keyid` + in the `body` field. + - Output: Response message with authentication setup (cookies, tokens, etc.). + This message will be used as the `Base` message for the `verify` + path. +`/verify`: Called before allowing an operation that requires access to a + secret to proceed. + - Base: The initialized `access-control` message from the `commit` path. + - Request: Caller's request message with authentication credentials. + - Output: `false` if an error has occurred. If the request is valid, the + `access-control` message should return either `true` or a modification + of the request message which will be used for any subsequent + operations. +The default `access-control` message is `~cookie@1.0`, which uses HTTP +cookies with secrets to authenticate users. +# Secret Generation Parameters +The following parameters are supported by the `generate` key: +``` +/generate + - `access-control` (optional): The `access-control` message to use. + Defaults to `#{<<"device">> => <<"cookie@1.0">>}`. + - `keyid` (optional): The `keyid` of the secret to generate. If not + provided, the secret's address will be used as the name. + - `persist` (optional): How the node should persist the secret. Options: + - `client`: The secret is generated on the server, but not persisted. + The full secret key is returned for the user to store. + - `in-memory`: The wallet is generated on the server and persisted only + in local memory, never written to disk. + - `non-volatile`: The wallet is persisted to non-volatile storage on + the node. The store used by this option is segmented from + the node's main storage, configurable via the `priv_store` + node message option. + - `controllers` (optional): A list of controllers that may access the + secret. Defaults to the node's `wallet_admin` option if set, + or its operator address if not. + - `required-controllers` (optional): The number of controllers that must + sign the secret for it to be valid. Defaults to `1`. + The response will contain authentication setup (such as cookies) from the + `access-control` message, plus the secret's `keyid` in the `body` field. + The secret's key is not returned to the user unless the `persist` option + is set to `client`. If it is, the `~cookie@1.0` device will be employed + to set the user's cookie with the secret. +/import + Parameters: + - `key` (optional): The JSON-encoded secret to import. + - `cookie` (optional): A structured-fields cookie containing a map with + a `key` field which is a JSON-encoded secret. + - `access-control` (optional): The `access-control` message to use. + - `persist` (optional): How the node should persist the secret. The + supported options are as with the `generate` key. + Imports a secret for hosting from the user. Executes as `generate` does, + except that it expects the key to store to be provided either directly + via the `key` parameter as a `keyid` field in the cookie Structured-Fields + map. Support for loading the key from the cookie is provided such that + a previously-generated secret by the user can have its persistence mode + changed. +/list + Parameters: + - `keyids` (optional): A list of `keyid`s to list. If not provided, + all secrets will be listed via the `keyid` that is must be provided + in order to access them. + Lists all hosted secrets on the node by the `keyid` that is used to + access them. If `keyids` is provided, only the secrets with those + `keyid`s will be listed. +/commit + Parameters: + - `keyid` (optional): The `keyid` of the secret to commit with. + - Authentication credentials as required by the `access-control` message. + Commits the given message using the specified secret after authentication. + If no `keyid` parameter is provided, the request's authentication data + (such as cookies) must contain secret identification. +/export + Parameters: + - `keyids` (optional): A list of `keyid`s to export, or `all` to + export all secrets for which the request passes authentication. + Exports a given secret or set of secrets. If multiple secrets are + requested, the result is a message with form `keyid => #{ `key` => + JSON-encoded secret, `access-control` => `access-control` message, + `controllers` => [address, ...], `required-controllers` => integer, + `persist` => `client` | `in-memory` | `non-volatile` }'. + A secret will be exported if: + - The given request passes each requested secret's `access-control` + message; or + - The request passes each requested secret's `controllers` parameter + checks. +/sync + Parameters: + - `node`: The peer node to pull secrets from. + - `as` (optional): The identity it should use when signing its request + to the remote peer. + - `keyids` (optional): A list of `keyid`s to export, or `all` to load + every available secret. Defaults to `all`. + Attempts to download all (or a given subset of) secrets from the given + node and import them. If the `keyids` parameter is provided, only the + secrets with those `keyid`s will be imported. The `as` parameter is + used to inform the node which key it should use to sign its request to + the remote peer, such that its request validates against the secret's + `access-control` messages on the remote peer. +''' + +--- + +## Exported Functions + +- `commit/3` +- `export/3` +- `generate/3` +- `import/3` +- `list/3` +- `sync/3` + +--- + +### generate + +A device that allows a node to create, export, and commit messages with +Generate a new wallet for a user and register it on the node. If the + +```erlang +generate(Base, Request, Opts) -> + case request_to_wallets(Base, Request, Opts) of + [] -> + % No wallets found, create a new one. +``` + +### import + +Import a wallet for hosting on the node. Expects the keys to be either + +```erlang +import(Base, Request, Opts) -> + Wallets = + case hb_maps:find(<<"key">>, Request, Opts) of + {ok, Keys} when is_list(Keys) -> + [ wallet_from_key(Key) || Key <- Keys ]; + {ok, Key} -> + [ wallet_from_key(hb_escape:decode_quotes(Key)) ]; + error -> + request_to_wallets(Base, Request, Opts) + end, + case Wallets of + [] -> + {error, <<"No viable wallets found to import.">>}; + Wallets -> + import_wallets(Wallets, Base, Request, Opts) + end. +``` + +### import_wallets + +Register a series of wallets, returning a summary message with the + +```erlang +import_wallets(Wallets, Base, Request, Opts) -> + Res = + lists:foldl( + fun(Wallet, Acc) -> + case register_wallet(Wallet, Base, Request, Opts) of + {ok, RegRes} -> + % Merge the private element of the registration response + % into the accumulator. +``` + +### wallet_from_key + +Transform a wallet key serialized form into a wallet. + +```erlang +wallet_from_key(Key) when is_binary(Key) -> + ar_wallet:from_json(Key); +``` + +### wallet_from_key + +Transform a wallet key serialized form into a wallet. + +```erlang +wallet_from_key(Key) -> + Key. +``` + +### register_wallet + +Register a wallet on the node. + +```erlang +register_wallet(Wallet, Base, Request, Opts) -> + % Find the wallet's address. +``` + +### persist_registered_wallet + +Persist a wallet and return the auth response. Optionally takes a + +```erlang +persist_registered_wallet(WalletDetails, Opts) -> + persist_registered_wallet(WalletDetails, #{}, Opts). +``` + +### persist_registered_wallet + +```erlang +persist_registered_wallet(WalletDetails, RespBase, Opts) -> + % Add the wallet address as the body of the response. +``` + +### list + +List all hosted wallets + +```erlang +list(_Base, _Request, Opts) -> + {ok, list_wallets(Opts)}. +``` + +### commit + +Sign a message with a wallet. + +```erlang +commit(Base, Request, Opts) -> + ?event({commit_invoked, {base, Base}, {request, Request}}), + case request_to_wallets(Base, Request, Opts) of + [] -> {error, <<"No wallets found to sign with.">>}; + WalletDetailsList -> + ?event( + {commit_signing, + {request, Request}, + {wallet_list, WalletDetailsList} + } + ), + { + ok, + lists:foldl( + fun(WalletDetails, Acc) -> + ?event( + {invoking_commit_message, + {message, Acc}, + {wallet, WalletDetails} + } + ), + commit_message(Acc, WalletDetails, Opts) + end, + Base, + WalletDetailsList + ) + } + end. +``` + +### request_to_wallets + +Take a request and return the wallets it references. Performs validation + +```erlang +request_to_wallets(Base, Request, Opts) -> + % Get the wallet references or keys from the request or cookie. +``` + +### load_and_verify + +Load a wallet from a keyid and verify we have the authority to access it. + +```erlang +load_and_verify({wallet, WalletKey}, _Base, _Request, _Opts) -> + % Return the wallet key. +``` + +### load_and_verify + +```erlang +load_and_verify({secret, KeyID, _}, _Base, Request, Opts) -> + % Get the wallet from the node's options. +``` + +### verify_controllers + +Validate if a calling message has the required `controllers` for the + +```erlang +verify_controllers(WalletDetails, Request, Opts) -> + RequiredControllers = + hb_util:int(hb_maps:get(<<"required-controllers">>, WalletDetails, 1, Opts)), + Controllers = + parse_controllers( + hb_maps:get(<<"controllers">>, WalletDetails, [], Opts), + Opts + ), + PresentControllers = + lists:filter( + fun(Signer) -> + lists:member(Signer, Controllers) + end, + hb_message:signers(Request, Opts) + ), + length(PresentControllers) >= RequiredControllers. +``` + +### verify_auth + +Verify a wallet for a given request. + +```erlang +verify_auth(WalletDetails, Req, Opts) -> + AuthBase = hb_maps:get(<<"access-control">>, WalletDetails, #{}, Opts), + AuthRequest = + Req#{ + <<"path">> => <<"verify">>, + <<"committer">> => + hb_maps:get(<<"committer">>, WalletDetails, undefined, Opts) + }, + ?event({verify_wallet, {auth_base, AuthBase}, {request, AuthRequest}}), + hb_ao:resolve(AuthBase, AuthRequest, Opts). +``` + +### wallets_from_cookie + +Parse cookie from a message to extract wallets. + +```erlang +wallets_from_cookie(Msg, Opts) -> + % Parse the cookie as a Structured-Fields map. +``` + +### commit_message + +Sign a message using hb_message:commit, taking either a wallet as a + +```erlang +commit_message(Message, NonMap, Opts) when not is_map(NonMap) -> + commit_message(Message, #{ <<"wallet">> => NonMap }, Opts); +``` + +### commit_message + +Sign a message using hb_message:commit, taking either a wallet as a + +```erlang +commit_message(Message, #{ <<"wallet">> := Key }, Opts) when is_binary(Key) -> + commit_message(Message, ar_wallet:from_json(Key), Opts); +``` + +### commit_message + +Sign a message using hb_message:commit, taking either a wallet as a + +```erlang +commit_message(Message, #{ <<"wallet">> := Key }, Opts) -> + ?event({committing_with_proxy, {message, Message}, {wallet, Key}}), + hb_message:commit(Message, Opts#{ priv_wallet => Key }). +``` + +### export + +Export wallets from a request. The request should contain a source of + +```erlang +export(Base, Request, Opts) -> + PrivOpts = priv_store_opts(Opts), + ModReq = + case hb_ao:get(<<"keyids">>, Request, not_found, Opts) of + <<"all">> -> + AllLocalWallets = list_wallets(Opts), + Request#{ <<"keyids">> => AllLocalWallets }; + _ -> Request + end, + ?event({export, {base, Base}, {request, ModReq}}), + case request_to_wallets(Base, ModReq, Opts) of + [] -> {error, <<"No wallets found to export.">>}; + Wallets -> + { + ok, + lists:map( + fun(Wallet) -> + Loaded = hb_cache:ensure_all_loaded(Wallet, PrivOpts), + ?event({exported, {wallet, Loaded}}), + Loaded + end, + Wallets + ) + } + end. +``` + +### sync + +Sync wallets from a remote node + +```erlang +sync(_Base, Request, Opts) -> + case hb_ao:get(<<"node">>, Request, undefined, Opts) of + undefined -> + {error, <<"Node not specified.">>}; + Node -> + Wallets = hb_maps:get(<<"keyids">>, Request, <<"all">>, Opts), + SignAsOpts = + case hb_ao:get(<<"as">>, Request, undefined, Opts) of + undefined -> Opts; + SignAs -> hb_opts:as(SignAs, Opts) + end, + ExportRequest = + (hb_message:commit( + #{ <<"keyids">> => Wallets }, + SignAsOpts + ))#{ <<"path">> => <<"/~secret@1.0/export">> }, + ?event({sync, {export_req, ExportRequest}}), + case hb_http:get(Node, ExportRequest, SignAsOpts) of + {ok, ExportResponse} -> + ExportedWallets = export_response_to_list(ExportResponse, #{}), + ?event({sync, {received_wallets, ExportedWallets}}), + % Import each wallet. Ignore wallet imports that fail. +``` + +### secrets_to_keyids + +Convert a key to a wallet reference. + +```erlang +secrets_to_keyids(Secrets) when is_list(Secrets) -> + [ hd(secrets_to_keyids(Secret)) || Secret <- Secrets ]; +``` + +### secrets_to_keyids + +Convert a key to a wallet reference. +Parse the exportable setting for a wallet and return a list of addresses + +```erlang +secrets_to_keyids(Secret) when is_binary(Secret) -> + ?event({secrets_to_keyids, {secret, Secret}}), + KeyID = dev_codec_httpsig_keyid:secret_key_to_committer(Secret), + [ {secret, <<"secret:", KeyID/binary>>, Secret} ]. +``` + +### parse_controllers + +Convert a key to a wallet reference. +Parse the exportable setting for a wallet and return a list of addresses + +```erlang +parse_controllers(default, Opts) -> + case hb_opts:get(wallet_admin, undefined, Opts) of + undefined -> + case hb_opts:get(operator, undefined, Opts) of + undefined -> + [hb_util:human_id(hb_opts:get(priv_wallet, undefined, Opts))]; + Op -> [hb_util:human_id(Op)] + end; + Admin -> [Admin] + end; +``` + +### parse_controllers + +Convert a key to a wallet reference. +Parse the exportable setting for a wallet and return a list of addresses + +```erlang +parse_controllers(true, Opts) -> parse_controllers(default, Opts); +``` + +### parse_controllers + +Convert a key to a wallet reference. +Parse the exportable setting for a wallet and return a list of addresses + +```erlang +parse_controllers(false, _Opts) -> []; +``` + +### parse_controllers + +Convert a key to a wallet reference. +Parse the exportable setting for a wallet and return a list of addresses + +```erlang +parse_controllers(Addresses, _Opts) when is_list(Addresses) -> Addresses; +``` + +### parse_controllers + +Convert a key to a wallet reference. +Parse the exportable setting for a wallet and return a list of addresses +Store a wallet in the appropriate location. + +```erlang +parse_controllers(Address, _Opts) when is_binary(Address) -> [Address]. +``` + +### store_wallet + +Convert a key to a wallet reference. +Parse the exportable setting for a wallet and return a list of addresses +Store a wallet in the appropriate location. + +```erlang +store_wallet(in_memory, KeyID, Details, Opts) -> + % Get existing wallets + CurrentWallets = hb_opts:get(priv_wallet_hosted, #{}, Opts), + % Add new wallet + UpdatedWallets = CurrentWallets#{ KeyID => Details }, + ?event({wallet_store, {updated_wallets, UpdatedWallets}}), + % Update the node's options with the new wallets. +``` + +### store_wallet + +```erlang +store_wallet(non_volatile, KeyID, Details, Opts) -> + % Find the private store of the node. +``` + +### find_wallet + +Find the wallet by name or address in the node's options. + +```erlang +find_wallet(KeyID, Opts) -> + case find_wallet(in_memory, KeyID, Opts) of + not_found -> find_wallet(non_volatile, KeyID, Opts); + Wallet -> Wallet + end. +``` + +### find_wallet + +Loop over the wallets and find the reference to the wallet. + +```erlang +find_wallet(in_memory, KeyID, Opts) -> + Wallets = hb_opts:get(priv_wallet_hosted, #{}, Opts), + ?event({find_wallet, {keyid, KeyID}, {wallets, Wallets}}), + case hb_maps:find(KeyID, Wallets, Opts) of + {ok, Wallet} -> Wallet; + error -> not_found + end; +``` + +### find_wallet + +Loop over the wallets and find the reference to the wallet. + +```erlang +find_wallet(non_volatile, KeyID, Opts) -> + PrivOpts = priv_store_opts(Opts), + Store = hb_opts:get(priv_store, undefined, PrivOpts), + Resolved = hb_store:resolve(Store, <<"wallet@1.0/", KeyID/binary>>), + case hb_cache:read(Resolved, PrivOpts) of + {ok, Wallet} -> + WalletDetails = hb_maps:get(KeyID, Wallet, not_found, PrivOpts), + hb_cache:ensure_all_loaded(WalletDetails, PrivOpts); + _ -> not_found + end. +``` + +### list_wallets + +Generate a list of all hosted wallets. + +```erlang +list_wallets(Opts) -> + list_wallets(in_memory, Opts) ++ list_wallets(non_volatile, Opts). +``` + +### list_wallets + +```erlang +list_wallets(in_memory, Opts) -> + hb_maps:keys(hb_opts:get(priv_wallet_hosted, #{}, Opts)); +``` + +### list_wallets + +Generate a new `Opts` message with the `priv_store` as the only `store` + +```erlang +list_wallets(non_volatile, Opts) -> + PrivOpts = priv_store_opts(Opts), + hb_cache:ensure_all_loaded(hb_cache:list(<<"wallet@1.0/">>, PrivOpts), PrivOpts). +``` + +### priv_store_opts + +Generate a new `Opts` message with the `priv_store` as the only `store` + +```erlang +priv_store_opts(Opts) -> + hb_private:opts(Opts). +``` + +### export_response_to_list + +Convert an export response into a list of wallet details. This is + +```erlang +export_response_to_list(ExportResponse, Opts) -> + hb_util:numbered_keys_to_list(ExportResponse, Opts). +``` + +### addresses_to_binary + +Convert a list of addresses to a binary string. If the input is a + +```erlang +addresses_to_binary(Addresses) when is_list(Addresses) -> + hb_util:bin(string:join( + lists:map(fun hb_util:list/1, Addresses), + ", " + )); +``` + +### addresses_to_binary + +Convert a list of addresses to a binary string. If the input is a + +```erlang +addresses_to_binary(Address) when is_binary(Address) -> + Address. +``` + +### binary_to_addresses + +Convert a binary string to a list of addresses. If the input is a + +```erlang +binary_to_addresses(AddressesBin) when is_binary(AddressesBin) -> + binary:split(AddressesBin, <<",">>, [global]); +``` + +### binary_to_addresses + +Convert a binary string to a list of addresses. If the input is a + +```erlang +binary_to_addresses(Addresses) when is_list(Addresses) -> + Addresses. +``` + +### test_wallet_generate_and_verify + +Helper function to test wallet generation and verification flow. + +```erlang +test_wallet_generate_and_verify(GeneratePath, ExpectedName, CommitParams) -> + Node = hb_http_server:start_node(#{ + priv_wallet => ar_wallet:new() + }), + % Generate wallet with specified parameters + {ok, GenResponse} = hb_http:get(Node, GeneratePath, #{}), + % Should get wallet name in body, wallet-address, and auth cookie + ?assertMatch(#{<<"body">> := _}, GenResponse), + WalletAddr = maps:get(<<"wallet-address">>, GenResponse), + case ExpectedName of + undefined -> + % For unnamed wallets, just check it's a non-empty binary + ?assert(is_binary(WalletAddr) andalso byte_size(WalletAddr) > 0); + _ -> + % For named wallets, check exact match + ?assertEqual(ExpectedName, WalletAddr) + end, + ?assertMatch(#{ <<"priv">> := #{ <<"cookie">> := _ } }, GenResponse), + #{ <<"priv">> := Priv } = GenResponse, + % Now verify by signing a message + TestMessage = + maps:merge( + #{ + <<"device">> => <<"secret@1.0">>, + <<"path">> => <<"commit">>, + <<"body">> => <<"Test message">>, + <<"priv">> => Priv + }, + CommitParams + ), + ?event({signing_with_cookie, {test_message, TestMessage}}), + {ok, SignedMessage} = hb_http:post(Node, TestMessage, #{}), + % Should return signed message with correct signer + ?assertMatch(#{ <<"body">> := <<"Test message">> }, SignedMessage), + ?assert(hb_message:signers(SignedMessage, #{}) =:= [WalletAddr]). +``` + +### client_persist_generate_and_verify_test + +```erlang +client_persist_generate_and_verify_test() -> + test_wallet_generate_and_verify( + <<"/~secret@1.0/generate?persist=client">>, + undefined, + #{} + ). +``` + +### cookie_wallet_generate_and_verify_test + +```erlang +cookie_wallet_generate_and_verify_test() -> + test_wallet_generate_and_verify( + <<"/~secret@1.0/generate?persist=in-memory">>, + undefined, + #{} + ). +``` + +### non_volatile_persist_generate_and_verify_test + +```erlang +non_volatile_persist_generate_and_verify_test() -> + test_wallet_generate_and_verify( + <<"/~secret@1.0/generate?persist=non-volatile">>, + undefined, + #{} + ). +``` + +### import_wallet_with_key_test + +```erlang +import_wallet_with_key_test() -> + Node = hb_http_server:start_node(#{ + priv_wallet => ar_wallet:new() + }), + % Create a test wallet key to import (in real scenario from user). +``` + +### list_wallets_test + +```erlang +list_wallets_test() -> + Node = hb_http_server:start_node(#{ + priv_wallet => ar_wallet:new() + }), + % Generate some wallets first. +``` + +### commit_with_cookie_wallet_test + +```erlang +commit_with_cookie_wallet_test() -> + Node = hb_http_server:start_node(#{ + priv_wallet => ar_wallet:new() + }), + % Generate a client wallet to get a cookie with full wallet key. +``` + +### export_wallet_test + +```erlang +export_wallet_test() -> + Node = hb_http_server:start_node(#{}), + % Generate a wallet to export. +``` + +### export_non_volatile_wallet_test + +```erlang +export_non_volatile_wallet_test() -> + Node = hb_http_server:start_node(#{ + priv_wallet => ar_wallet:new() + }), + % Generate a wallet to export. +``` + +### export_individual_batch_wallets_test + +```erlang +export_individual_batch_wallets_test() -> + Node = + hb_http_server:start_node( + AdminOpts = + #{ + priv_wallet => AdminWallet = ar_wallet:new() + } + ), + % Generate multiple wallets and collect auth cookies. +``` + +### export_batch_all_wallets_test + +```erlang +export_batch_all_wallets_test() -> + % Remove all previous cached wallets. +``` + +### sync_wallets_test + +```erlang +sync_wallets_test() -> + % Remove all previous cached wallets. +``` + +### sync_non_volatile_wallets_test + +```erlang +sync_non_volatile_wallets_test() -> + % Remove all the previous cached wallets. +``` + +--- + +*Generated from [dev_secret.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_secret.erl)* diff --git a/docs/book/src/dev_simple_pay.erl.md b/docs/book/src/dev_simple_pay.erl.md new file mode 100644 index 000000000..6a2dc03ec --- /dev/null +++ b/docs/book/src/dev_simple_pay.erl.md @@ -0,0 +1,408 @@ +# dev_simple_pay + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_simple_pay.erl) + +A simple device that allows the operator to specify a price for a +request and then charge the user for it, on a per route and optionally +per message basis. +The device's pricing rules are as follows: +1. If the request is from the operator, the cost is 0. +2. If the request matches one of the `router_opts/offered` routes, the + explicit price of the route is used. +3. Else, the price is calculated by counting the number of messages in the + request, and multiplying by the `simple_pay_price` node option, plus the + price of the apply subrequest if applicable. Subrequests are priced by + recursively calling `estimate/3` upon them. In the case of an `apply@1.0` + subrequest, the two initiating apply messages are not counted towards the + message count price. +The device's ledger is stored in the node message at `simple_pay_ledger`, +and can be topped-up by either the operator, or an external device. The +price is specified in the node message at `simple_pay_price`. +This device acts as both a pricing device and a ledger device, by p4's +definition. + +--- + +## Exported Functions + +- `balance/3` +- `charge/3` +- `estimate/3` +- `topup/3` + +--- + +### estimate + +A simple device that allows the operator to specify a price for a +Estimate the cost of the request, using the rules outlined in the + +```erlang +estimate(_Base, EstimateReq, NodeMsg) -> + Req = hb_ao:get(<<"request">>, EstimateReq, NodeMsg#{ hashpath => ignore }), + case is_operator(Req, NodeMsg) of + true -> + ?event(payment, + {estimate_preprocessing, caller_is_operator} + ), + {ok, 0}; + false -> + ?event(payment, {starting_estimate, {req, Req}}), + ReqSequence = hb_singleton:from(Req, NodeMsg), + ?event(payment, + {estimating_cost, + {singleton, Req}, + {request_sequence, ReqSequence} + } + ), + % Get the user's request to match against router registration options + case price_from_routes(Req, NodeMsg) of + no_matches -> + {ok, ApplyPrice, SeqWithoutApply} = apply_price(ReqSequence, NodeMsg), + MessageCountPrice = price_from_count(SeqWithoutApply, NodeMsg), + Price = MessageCountPrice + ApplyPrice, + ?event(payment, + {calculated_generic_route_price, + {price, Price}, + {message_count_price, MessageCountPrice}, + {apply_price, ApplyPrice} + }), + {ok, Price}; + Price -> + ?event(payment, + {calculated_specific_route_price, + {price, Price} + } + ), + {ok, Price} + end + end. +``` + +### apply_price + +If the request is for the `apply@1.0` device, we should price the + +```erlang +apply_price([{as, Device, Msg} | Rest], NodeMsg) -> + apply_price([Msg#{ <<"device">> => Device } | Rest], NodeMsg); +``` + +### apply_price + +If the request is for the `apply@1.0` device, we should price the + +```erlang +apply_price( + [Req = #{ <<"device">> := <<"apply@1.0">> }, #{ <<"path">> := Path } | Rest], + NodeMsg + ) -> + UserPath = hb_maps:get(Path, Req, <<"">>, NodeMsg), + UserMessage = + case hb_maps:find(<<"source">>, Req, NodeMsg) of + {ok, Source} -> hb_maps:get(Source, Req, Req, NodeMsg); + error -> Req + end, + UserRequest = + hb_maps:without( + [<<"device">>], + UserMessage#{ <<"path">> => UserPath } + ), + ?event(payment, {estimating_price_of_subrequest, {req, UserRequest}}), + {ok, Price} = estimate(#{}, #{ <<"request">> => UserRequest }, NodeMsg), + ?event(payment, {price_of_apply_subrequest, {price, Price}}), + {ok, Price, Rest}; +``` + +### apply_price + +If the request is for the `apply@1.0` device, we should price the + +```erlang +apply_price(Seq, _) -> + {ok, 0, Seq}. +``` + +### price_from_routes + +Calculate the price of a request based on the offered routes, if + +```erlang +price_from_routes(UserRequest, NodeMsg) -> + RouterOpts = hb_opts:get(<<"router_opts">>, #{}, NodeMsg), + Routes = hb_maps:get(<<"offered">>, RouterOpts, [], NodeMsg), + MatchRes = + dev_router:match( + #{ <<"routes">> => Routes }, + UserRequest, + NodeMsg + ), + case MatchRes of + {ok, OfferedRoute} -> + Price = hb_maps:get(<<"price">>, OfferedRoute, 0, NodeMsg), + ?event(payment, {price_from_routes, {price, Price}}), + Price; + _ -> + no_matches + end. +``` + +### price_from_count + +Calculate the price of a request based on the number of messages in + +```erlang +price_from_count(Messages, NodeMsg) -> + Price = + hb_util:int(hb_opts:get(simple_pay_price, 1, NodeMsg)) + * length(Messages), + ?event(payment, {price_from_count, {price, Price}, {count, length(Messages)}}), + Price. +``` + +### charge + +Preprocess a request by checking the ledger and charging the user. We + +```erlang +charge(_, RawReq, NodeMsg) -> + ?event(payment, {charge, RawReq}), + Req = hb_ao:get(<<"request">>, RawReq, NodeMsg#{ hashpath => ignore }), + case hb_message:signers(Req, NodeMsg) of + [] -> + ?event(payment, {charge, {error, <<"No signers">>}}), + {ok, false}; + [Signer] -> + UserBalance = get_balance(Signer, NodeMsg), + Price = hb_ao:get(<<"quantity">>, RawReq, 0, NodeMsg), + ?event(payment, + {charge, + {user, Signer}, + {balance, UserBalance}, + {price, Price} + }), + {ok, _} = + set_balance( + Signer, + NewBalance = UserBalance - Price, + NodeMsg + ), + case NewBalance >= 0 of + true -> + {ok, true}; + false -> + ?event(payment, + {charge, + {user, Signer}, + {balance, UserBalance}, + {price, Price} + } + ), + {error, #{ + <<"status">> => 402, + <<"body">> => <<"Insufficient funds. " + "User balance before charge: ", + (hb_util:bin(UserBalance))/binary, + ". Price of request: ", + (hb_util:bin(Price))/binary, + ". New balance: ", + (hb_util:bin(NewBalance))/binary, + ".">> + }} + end; + MultipleSigners -> + ?event(payment, {charge, {error_multiple_signers, MultipleSigners}}), + {error, #{ + <<"status">> => 400, + <<"body">> => <<"Multiple signers in charge.">> + }} + end. +``` + +### balance + +Get the balance of a user in the ledger. + +```erlang +balance(_, RawReq, NodeMsg) -> + Target = + case hb_ao:get(<<"request">>, RawReq, NodeMsg#{ hashpath => ignore }) of + not_found -> + case hb_message:signers(RawReq, NodeMsg) of + [] -> hb_ao:get(<<"target">>, RawReq, undefined, NodeMsg); + [Signer] -> Signer + end; + Req -> hd(hb_message:signers(Req, NodeMsg)) + end, + {ok, get_balance(Target, NodeMsg)}. +``` + +### set_balance + +Adjust a user's balance, normalizing their wallet ID first. + +```erlang +set_balance(Signer, Amount, NodeMsg) -> + NormSigner = hb_util:human_id(Signer), + Ledger = hb_opts:get(simple_pay_ledger, #{}, NodeMsg), + ?event(payment, + {modifying_balance, + {user, NormSigner}, + {amount, Amount}, + {ledger_before, Ledger} + } + ), + hb_http_server:set_opts( + #{}, + NewMsg = NodeMsg#{ + simple_pay_ledger => + hb_ao:set( + Ledger, + NormSigner, + Amount, + NodeMsg + ) + } + ), + {ok, NewMsg}. +``` + +### get_balance + +Get the balance of a user in the ledger. + +```erlang +get_balance(Signer, NodeMsg) -> + NormSigner = hb_util:human_id(Signer), + Ledger = hb_opts:get(simple_pay_ledger, #{}, NodeMsg), + hb_ao:get(NormSigner, Ledger, 0, NodeMsg). +``` + +### topup + +Top up the user's balance in the ledger. + +```erlang +topup(_, Req, NodeMsg) -> + ?event({topup, {req, Req}, {node_msg, NodeMsg}}), + case is_operator(Req, NodeMsg) of + false -> {error, <<"Unauthorized">>}; + true -> + Amount = hb_ao:get(<<"amount">>, Req, 0, NodeMsg), + Recipient = hb_ao:get(<<"recipient">>, Req, undefined, NodeMsg), + CurrentBalance = get_balance(Recipient, NodeMsg), + ?event(payment, + {topup, + {amount, Amount}, + {recipient, Recipient}, + {balance, CurrentBalance}, + {expected_new_balance, CurrentBalance + Amount} + }), + {ok, NewNodeMsg} = + set_balance( + Recipient, + CurrentBalance + Amount, + NodeMsg + ), + % Briefly wait for the ledger to be updated. +``` + +### is_operator + +Check if the request is from the operator. + +```erlang +is_operator(Req, NodeMsg) -> + is_operator(Req, NodeMsg, hb_opts:get(operator, undefined, NodeMsg)). +``` + +### is_operator + +```erlang +is_operator(Req, NodeMsg, OperatorAddr) when ?IS_ID(OperatorAddr) -> + Signers = hb_message:signers(Req, NodeMsg), + HumanOperatorAddr = hb_util:human_id(OperatorAddr), + lists:any( + fun(Signer) -> + HumanOperatorAddr =:= hb_util:human_id(Signer) + end, + Signers + ); +``` + +### is_operator + +```erlang +is_operator(_, _, _) -> + false. +``` + +### test_opts + +```erlang +test_opts(Ledger) -> + Wallet = ar_wallet:new(), + Address = hb_util:human_id(ar_wallet:to_address(Wallet)), + ProcessorMsg = + #{ + <<"device">> => <<"p4@1.0">>, + <<"ledger-device">> => <<"simple-pay@1.0">>, + <<"pricing-device">> => <<"simple-pay@1.0">> + }, + { + Address, + Wallet, + #{ + simple_pay_ledger => Ledger, + simple_pay_price => 10, + operator => Address, + on => #{ + <<"request">> => ProcessorMsg, + <<"response">> => ProcessorMsg + } + } + }. +``` + +### get_balance_and_top_up_test + +```erlang +get_balance_and_top_up_test() -> + ClientWallet = ar_wallet:new(), + ClientAddress = hb_util:human_id(ar_wallet:to_address(ClientWallet)), + {HostAddress, HostWallet, Opts} = test_opts(#{ ClientAddress => 100 }), + Node = hb_http_server:start_node(Opts), + ?event({host_address, HostAddress}), + ?event({client_address, ClientAddress}), + {ok, Res} = + hb_http:get( + Node, + Req = hb_message:commit( + #{<<"path">> => <<"/~simple-pay@1.0/balance">>}, + Opts#{ priv_wallet => ClientWallet } + ), + Opts + ), + ?event({req_signers, hb_message:signers(Req, Opts)}), + % Balance is given during the request, before the charge is made, so we + % should expect to see the original balance. +``` + +### apply_price_test + +```erlang +apply_price_test() -> + ClientWallet = ar_wallet:new(), + ClientAddress = hb_util:human_id(ar_wallet:to_address(ClientWallet)), + ClientOpts = #{ priv_wallet => ClientWallet }, + {HostAddress, _HostWallet, Opts} = + test_opts(#{ ClientAddress => 100 }), + Node = hb_http_server:start_node(Opts), + ?event({host_address, HostAddress}), + ?event({client_address, ClientAddress}), + % The balance should now be 80, as the check will have charged us 20. +``` + +--- + +*Generated from [dev_simple_pay.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_simple_pay.erl)* diff --git a/docs/book/src/dev_snp.erl.md b/docs/book/src/dev_snp.erl.md new file mode 100644 index 000000000..e9f8c3518 --- /dev/null +++ b/docs/book/src/dev_snp.erl.md @@ -0,0 +1,693 @@ +# dev_snp + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_snp.erl) + +This device provides an interface for validating and generating AMD SEV-SNP +commitment reports. +AMD SEV-SNP (Secure Encrypted Virtualization - Secure Nested Paging) is a +hardware-based security technology that provides confidential computing +capabilities. This module handles the cryptographic validation of attestation +reports and the generation of commitment reports for trusted execution environments. +The device supports two main operations: +1. Verification of remote node attestation reports with comprehensive validation +2. Generation of local attestation reports for proving node identity and software integrity + +--- + +## Exported Functions + +- `generate/3` +- `verify/3` + +--- + +### verify + +This device provides an interface for validating and generating AMD SEV-SNP +Verify an AMD SEV-SNP commitment report message. + +```erlang +-spec verify(M1 :: term(), M2 :: term(), NodeOpts :: map()) -> + {ok, binary()} | {error, term()}. +``` + +```erlang +verify(M1, M2, NodeOpts) -> + ?event(snp_verify, verify_called), + maybe + {ok, {Msg, Address, NodeMsgID, ReportJSON, MsgWithJSONReport}} + ?= extract_and_normalize_message(M2, NodeOpts), + % Perform all validation steps + {ok, NonceResult} ?= verify_nonce(Address, NodeMsgID, Msg, NodeOpts), + {ok, SigResult} ?= + verify_signature_and_address( + MsgWithJSONReport, + Address, + NodeOpts + ), + {ok, DebugResult} ?= verify_debug_disabled(Msg), + {ok, TrustedResult} ?= verify_trusted_software(M1, Msg, NodeOpts), + {ok, MeasurementResult} ?= verify_measurement(Msg, ReportJSON, NodeOpts), + {ok, ReportResult} ?= verify_report_integrity(ReportJSON), + Valid = lists:all( + fun(Bool) -> Bool end, + [ + NonceResult, + SigResult, + DebugResult, + TrustedResult, + MeasurementResult, + ReportResult + ] + ), + ?event({final_validation_result, Valid}), + {ok, hb_util:bin(Valid)} + else + {error, Reason} -> {error, Reason} + end. +``` + +### generate + +Generate an AMD SEV-SNP commitment report and emit it as a message. + +```erlang +-spec generate(M1 :: term(), M2 :: term(), Opts :: map()) -> + {ok, map()} | {error, term()}. +``` + +```erlang +generate(_M1, _M2, Opts) -> + maybe + LoadedOpts = hb_cache:ensure_all_loaded(Opts, Opts), + ?event({generate_opts, {explicit, LoadedOpts}}), + % Validate wallet availability + {ok, ValidWallet} ?= + case hb_opts:get(priv_wallet, no_viable_wallet, LoadedOpts) of + no_viable_wallet -> {error, no_wallet_available}; + Wallet -> {ok, Wallet} + end, + % Generate address and node message components + Address = hb_util:human_id(ar_wallet:to_address(ValidWallet)), + NodeMsg = hb_private:reset(LoadedOpts), + {ok, PublicNodeMsgID} ?= dev_message:id( + NodeMsg, + #{ <<"committers">> => <<"none">> }, + LoadedOpts + ), + RawPublicNodeMsgID = hb_util:native_id(PublicNodeMsgID), + ?event({snp_node_msg, NodeMsg}), + % Generate the commitment report components + ?event({snp_address, byte_size(Address)}), + ReportData = generate_nonce(Address, RawPublicNodeMsgID), + ?event({snp_report_data, byte_size(ReportData)}), + % Extract local hashes + {ok, ValidLocalHashes} ?= + case hb_opts:get(snp_trusted, [#{}], LoadedOpts) of + [] -> {error, no_trusted_configs}; + [FirstConfig | _] -> {ok, FirstConfig}; + _ -> {error, invalid_trusted_configs_format} + end, + ?event(snp_local_hashes, {explicit, ValidLocalHashes}), + % Generate the hardware attestation report + {ok, ReportJSON} ?= case get(mock_snp_nif_enabled) of + true -> + % Return mocked response for testing + MockResponse = get(mock_snp_nif_response), + {ok, MockResponse}; + _ -> + % Call actual NIF function + dev_snp_nif:generate_attestation_report( + ReportData, + ?REPORT_DATA_VERSION + ) + end, + ?event({snp_report_json, ReportJSON}), + ?event({snp_report_generated, {nonce, ReportData}, {report, ReportJSON}}), + % Package the complete report message + ReportMsg = #{ + <<"local-hashes">> => ValidLocalHashes, + <<"nonce">> => hb_util:encode(ReportData), + <<"address">> => Address, + <<"node-message">> => NodeMsg, + <<"report">> => ReportJSON + }, + ?event({snp_report_msg, ReportMsg}), + {ok, ReportMsg} + else + {error, Reason} -> {error, Reason}; + Error -> {error, Error} + end. +``` + +### extract_and_normalize_message + +Extract and normalize the SNP commitment message from the input. + +```erlang +-spec extract_and_normalize_message(M2 :: term(), NodeOpts :: map()) -> + {ok, {map(), binary(), binary(), binary(), map()}} | {error, term()}. +``` + +```erlang +extract_and_normalize_message(M2, NodeOpts) -> + maybe + % Search for a `body' key in the message, and if found use it as the source + % of the report. If not found, use the message itself as the source. +``` + +### extract_node_message_id + +Extract the node message ID from the SNP message. + +```erlang +-spec extract_node_message_id(Msg :: map(), NodeOpts :: map()) -> + {ok, binary()} | {error, missing_node_msg_id}. +``` + +```erlang +extract_node_message_id(Msg, NodeOpts) -> + case {hb_ao:get(<<"node-message">>, Msg, NodeOpts#{ hashpath => ignore }), + hb_ao:get(<<"node-message-id">>, Msg, NodeOpts)} of + {undefined, undefined} -> + {error, missing_node_msg_id}; + {undefined, ID} -> + {ok, ID}; + {NodeMsg, _} -> + dev_message:id(NodeMsg, #{}, NodeOpts) + end. +``` + +### verify_nonce + +Verify that the nonce in the report matches the expected value. + +```erlang +-spec verify_nonce(Address :: binary(), NodeMsgID :: binary(), + Msg :: map(), NodeOpts :: map()) -> {ok, true} | {error, nonce_mismatch}. +``` + +```erlang +verify_nonce(Address, NodeMsgID, Msg, NodeOpts) -> + Nonce = hb_util:decode(hb_ao:get(<<"nonce">>, Msg, NodeOpts)), + ?event({snp_nonce, Nonce}), + NonceMatches = report_data_matches(Address, NodeMsgID, Nonce), + ?event({nonce_matches, NonceMatches}), + case NonceMatches of + true -> {ok, true}; + false -> {error, nonce_mismatch} + end. +``` + +### verify_signature_and_address + +Verify that the message signature and signing address are valid. + +```erlang +-spec verify_signature_and_address(MsgWithJSONReport :: map(), + Address :: binary(), NodeOpts :: map()) -> + {ok, true} | {error, signature_or_address_invalid}. +``` + +```erlang +verify_signature_and_address(MsgWithJSONReport, Address, NodeOpts) -> + Signers = hb_message:signers(MsgWithJSONReport, NodeOpts), + ?event({snp_signers, {explicit, Signers}}), + SigIsValid = hb_message:verify(MsgWithJSONReport, Signers), + ?event({snp_sig_is_valid, SigIsValid}), + AddressIsValid = lists:member(Address, Signers), + ?event({address_is_valid, AddressIsValid, {signer, Signers}, {address, Address}}), + case SigIsValid andalso AddressIsValid of + true -> {ok, true}; + false -> {error, signature_or_address_invalid} + end. +``` + +### verify_trusted_software + +Verify that the software configuration is trusted. + +```erlang +-spec verify_trusted_software(M1 :: term(), Msg :: map(), NodeOpts :: map()) -> + {ok, true} | {error, untrusted_software}. +``` + +```erlang +verify_trusted_software(M1, Msg, NodeOpts) -> + {ok, IsTrustedSoftware} = execute_is_trusted(M1, Msg, NodeOpts), + ?event({trusted_software, IsTrustedSoftware}), + case IsTrustedSoftware of + true -> {ok, true}; + false -> {error, untrusted_software} + end. +``` + +### verify_measurement + +Verify that the measurement in the SNP report is valid. + +```erlang +-spec verify_measurement(Msg :: map(), ReportJSON :: binary(), + NodeOpts :: map()) -> {ok, true} | {error, measurement_invalid}. +``` + +```erlang +verify_measurement(Msg, ReportJSON, NodeOpts) -> + Args = extract_measurement_args(Msg, NodeOpts), + ?event({args, { explicit, Args}}), + {ok, Expected} = dev_snp_nif:compute_launch_digest(Args), + ExpectedBin = list_to_binary(Expected), + ?event({expected_measurement, {explicit, Expected}}), + Measurement = hb_ao:get(<<"measurement">>, Msg, NodeOpts), + ?event({measurement, {explicit,Measurement}}), + {Status, MeasurementIsValid} = + dev_snp_nif:verify_measurement( + ReportJSON, + ExpectedBin + ), + ?event({status, Status}), + ?event({measurement_is_valid, MeasurementIsValid}), + case MeasurementIsValid of + true -> {ok, true}; + false -> {error, measurement_invalid} + end. +``` + +### verify_report_integrity + +Verify the integrity of the SNP report's digital signature. + +```erlang +-spec verify_report_integrity(ReportJSON :: binary()) -> + {ok, true} | {error, report_signature_invalid}. +``` + +```erlang +verify_report_integrity(ReportJSON) -> + {ok, ReportIsValid} = dev_snp_nif:verify_signature(ReportJSON), + ?event({report_is_valid, ReportIsValid}), + case ReportIsValid of + true -> {ok, true}; + false -> {error, report_signature_invalid} + end. +``` + +### execute_is_trusted + +Validate that all software hashes match trusted configurations. + +```erlang +-spec execute_is_trusted(M1 :: term(), Msg :: map(), NodeOpts :: map()) -> + {ok, boolean()}. +``` + +```erlang +execute_is_trusted(_M1, Msg, NodeOpts) -> + FilteredLocalHashes = get_filtered_local_hashes(Msg, NodeOpts), + TrustedSoftware = hb_opts:get(snp_trusted, [#{}], NodeOpts), + ?event({trusted_software, {explicit, TrustedSoftware}}), + IsTrusted = + is_software_trusted( + FilteredLocalHashes, + TrustedSoftware, + NodeOpts + ), + ?event({is_all_software_trusted, IsTrusted}), + {ok, IsTrusted}. +``` + +### report_data_matches + +Validate that the report data matches the expected nonce. + +```erlang +-spec report_data_matches(Address :: binary(), NodeMsgID :: binary(), + ReportData :: binary()) -> boolean(). +``` + +```erlang +report_data_matches(Address, NodeMsgID, ReportData) -> + ?event({generated_nonce, {explicit, generate_nonce(Address, NodeMsgID)}}), + ?event({expected_nonce, {explicit, ReportData}}), + generate_nonce(Address, NodeMsgID) == ReportData. +``` + +### get_test_hashes + +```erlang +-spec generate_nonce(RawAddress :: binary(), RawNodeMsgID :: binary()) -> binary(). +generate_nonce(RawAddress, RawNodeMsgID) -> + Address = hb_util:native_id(RawAddress), + NodeMsgID = hb_util:native_id(RawNodeMsgID), + << Address/binary, NodeMsgID/binary >>. +``` + +```erlang +get_test_hashes() -> + #{ + <<"vcpus">> => ?TEST_VCPUS_COUNT, + <<"vcpu_type">> => ?TEST_VCPU_TYPE, + <<"vmm_type">> => ?TEST_VMM_TYPE, + <<"guest_features">> => ?TEST_GUEST_FEATURES, + <<"firmware">> => ?TEST_FIRMWARE_HASH, + <<"kernel">> => ?TEST_KERNEL_HASH, + <<"initrd">> => ?TEST_INITRD_HASH, + <<"append">> => ?TEST_APPEND_HASH + }. +``` + +### setup_test_nodes + +```erlang +setup_test_nodes() -> + ProxyWallet = hb:wallet(<<"test/admissible-report-wallet.json">>), + ProxyOpts = #{ + store => hb_opts:get(store), + priv_wallet => ProxyWallet + }, + _ReportNode = hb_http_server:start_node(ProxyOpts), + VerifyingNode = hb_http_server:start_node(#{ + priv_wallet => ar_wallet:new(), + store => hb_opts:get(store), + snp_trusted => [ + #{ + <<"vcpus">> => ?TEST_VCPUS_COUNT, + <<"vcpu_type">> => ?TEST_VCPU_TYPE, + <<"vmm_type">> => ?TEST_VMM_TYPE, + <<"guest_features">> => ?TEST_GUEST_FEATURES, + <<"firmware">> => ?TEST_FIRMWARE_HASH, + <<"kernel">> => ?TEST_KERNEL_HASH, + <<"initrd">> => ?TEST_INITRD_HASH, + <<"append">> => ?TEST_APPEND_HASH + } + ], + snp_enforced_keys => [ + vcpu_type, vmm_type, guest_features, + firmware, kernel, initrd, append + ] + }), + {ProxyOpts, VerifyingNode}. +``` + +### execute_is_trusted_exact_match_should_fail_test + +```erlang +-spec load_test_report_data() -> binary(). +load_test_report_data() -> + TestFile = <<"test/admissible-report.json">>, + case file:read_file(TestFile) of + {ok, Data} -> + Data; + {error, enoent} -> + throw({error, {file_not_found, TestFile}}); + {error, Reason} -> + throw({error, {file_read_error, TestFile, Reason}}) + end. +``` + +```erlang +execute_is_trusted_exact_match_should_fail_test() -> + % Test case: Exact match with trusted software should fail when vcpus differ + Msg = #{ + <<"local-hashes">> => (get_test_hashes())#{ + <<"vcpus">> => 16 + } + }, + NodeOpts = #{ + snp_trusted => [get_test_hashes()], + snp_enforced_keys => [ + vcpus, vcpu_type, vmm_type, guest_features, + firmware, kernel, initrd, append + ] + }, + {ok, Result} = execute_is_trusted(#{}, Msg, NodeOpts), + ?assertEqual(false, Result). +``` + +### execute_is_trusted_subset_match_should_pass_test + +```erlang +execute_is_trusted_subset_match_should_pass_test() -> + % Test case: Match with subset of keys in trusted software should pass + Msg = #{ + <<"local-hashes">> => (get_test_hashes())#{ + <<"vcpus">> => 16 + } + }, + NodeOpts = #{ + snp_trusted => [get_test_hashes()], + snp_enforced_keys => [ + vcpu_type, vmm_type, guest_features, + firmware, kernel, initrd, append + ] + }, + {ok, Result} = execute_is_trusted(#{}, Msg, NodeOpts), + ?assertEqual(true, Result). +``` + +### verify_test + +```erlang +verify_test() -> + % Note: If this test fails, it may be because the unsigned ID of the node + % message in `test/admissible-report.eterm` has changed. If the format ever + % changes, this value will need to be updated. Recalculate the unsigned ID + % of the `Request/node-message' field, decode `Request/address', concatenate + % the two, and encode. The result will be the new `Request/nonce' value. +``` + +### generate_success_test + +Test successful report generation with valid configuration. + +```erlang +generate_success_test() -> + % Set up test configuration + TestWallet = ar_wallet:new(), + TestOpts = #{ + priv_wallet => TestWallet, + snp_trusted => [#{ + <<"vcpus">> => ?TEST_VCPUS_COUNT, + <<"vcpu_type">> => ?TEST_VCPU_TYPE, + <<"firmware">> => ?TEST_FIRMWARE_HASH, + <<"kernel">> => ?TEST_KERNEL_HASH + }] + }, + % Load test report data from file + TestReportJSON = load_test_report_data(), + % Mock the NIF function to return test data + ok = mock_snp_nif(TestReportJSON), + try + % Call generate function + {ok, Result} = generate(#{}, #{}, TestOpts), + % Verify the result structure + ?assert(is_map(Result)), + ?assert(maps:is_key(<<"local-hashes">>, Result)), + ?assert(maps:is_key(<<"nonce">>, Result)), + ?assert(maps:is_key(<<"address">>, Result)), + ?assert(maps:is_key(<<"node-message">>, Result)), + ?assert(maps:is_key(<<"report">>, Result)), + % Verify the report content + ?assertEqual(TestReportJSON, maps:get(<<"report">>, Result)), + % Verify local hashes match the first trusted config + ExpectedHashes = maps:get(<<"local-hashes">>, Result), + ?assertEqual(?TEST_VCPUS_COUNT, maps:get(<<"vcpus">>, ExpectedHashes)), + ?assertEqual(?TEST_VCPU_TYPE, maps:get(<<"vcpu_type">>, ExpectedHashes)), + % Verify nonce is properly encoded + Nonce = maps:get(<<"nonce">>, Result), + ?assert(is_binary(Nonce)), + ?assert(byte_size(Nonce) > 0), + % Verify address is present and properly formatted + Address = maps:get(<<"address">>, Result), + ?assert(is_binary(Address)), + ?assert(byte_size(Address) > 0) + after + % Clean up mock + unmock_snp_nif() + end. +``` + +### generate_missing_wallet_test + +Test error handling when wallet is missing. + +```erlang +generate_missing_wallet_test() -> + TestOpts = #{ + % No priv_wallet provided + snp_trusted => [#{ <<"firmware">> => ?TEST_FIRMWARE_HASH }] + }, + % Mock the NIF function (shouldn't be called) + ok = mock_snp_nif(<<"dummy_report">>), + try + % Call generate function - should fail + Result = generate(#{}, #{}, TestOpts), + ?assertMatch({error, no_wallet_available}, Result) + after + unmock_snp_nif() + end. +``` + +### generate_missing_trusted_configs_test + +Test error handling when trusted configurations are missing. + +```erlang +generate_missing_trusted_configs_test() -> + TestWallet = ar_wallet:new(), + TestOpts = #{ + priv_wallet => TestWallet, + snp_trusted => [] % Empty trusted configs + }, + % Mock the NIF function (shouldn't be called) + ok = mock_snp_nif(<<"dummy_report">>), + try + % Call generate function - should fail + Result = generate(#{}, #{}, TestOpts), + ?assertMatch({error, no_trusted_configs}, Result) + after + unmock_snp_nif() + end. +``` + +### verify_mock_generate_success_test_ + +Test successful round-trip: generate then verify with same configuration. + +```erlang +verify_mock_generate_success_test_() -> + { timeout, 30, fun verify_mock_generate_success/0 }. +``` + +### verify_mock_generate_success + +```erlang +verify_mock_generate_success() -> + % Set up test configuration + TestWallet = ar_wallet:new(), + TestTrustedConfig = #{ + <<"vcpus">> => 32, + <<"vcpu_type">> => ?TEST_VCPU_TYPE, + <<"vmm_type">> => ?TEST_VMM_TYPE, + <<"guest_features">> => ?TEST_GUEST_FEATURES, + <<"firmware">> => ?TEST_FIRMWARE_HASH, + <<"kernel">> => ?TEST_KERNEL_HASH, + <<"initrd">> => ?TEST_INITRD_HASH, + <<"append">> => ?TEST_APPEND_HASH + }, + GenerateOpts = #{ + priv_wallet => TestWallet, + snp_trusted => [TestTrustedConfig] + }, + % Load test report data and set up mock + TestReportJSON = load_test_report_data(), + ok = mock_snp_nif(TestReportJSON), + try + % Step 1: Generate a test report using mocked SNP + {ok, GeneratedMsg} = generate(#{}, #{}, GenerateOpts), + % Verify the generated message structure + ?assert(is_map(GeneratedMsg)), + ?assert(maps:is_key(<<"report">>, GeneratedMsg)), + ?assert(maps:is_key(<<"address">>, GeneratedMsg)), + ?assert(maps:is_key(<<"nonce">>, GeneratedMsg)), + % Step 2: Set up verification options with the same trusted config + VerifyOpts = #{ + snp_trusted => [TestTrustedConfig], + snp_enforced_keys => [vcpu_type, vmm_type, guest_features, + firmware, kernel, initrd, append] + }, + % Step 3: Verify the generated report + {ok, VerifyResult} = + verify( + #{}, + hb_message:commit(GeneratedMsg, GenerateOpts), + VerifyOpts + ), + % Step 4: Assert that verification succeeds + ?assertEqual(<<"true">>, VerifyResult), + % Additional validation: verify specific fields + ReportData = maps:get(<<"report">>, GeneratedMsg), + ?assertEqual(TestReportJSON, ReportData), + LocalHashes = maps:get(<<"local-hashes">>, GeneratedMsg), + ?assertEqual(TestTrustedConfig, LocalHashes) + after + % Clean up mock + unmock_snp_nif() + end. +``` + +### verify_mock_generate_wrong_config_test_ + +Test verification failure when using wrong trusted configuration. + +```erlang +verify_mock_generate_wrong_config_test_() -> + { timeout, 30, fun verify_mock_generate_wrong_config/0 }. +``` + +### verify_mock_generate_wrong_config + +```erlang +verify_mock_generate_wrong_config() -> + % Set up test configuration for generation + TestWallet = ar_wallet:new(), + GenerateTrustedConfig = #{ + <<"vcpus">> => ?TEST_VCPUS_COUNT, + <<"vcpu_type">> => ?TEST_VCPU_TYPE, + <<"vmm_type">> => ?TEST_VMM_TYPE, + <<"guest_features">> => ?TEST_GUEST_FEATURES, + <<"firmware">> => ?TEST_FIRMWARE_HASH, + <<"kernel">> => ?TEST_KERNEL_HASH, + <<"initrd">> => ?TEST_INITRD_HASH, + <<"append">> => ?TEST_APPEND_HASH + }, + GenerateOpts = #{ + priv_wallet => TestWallet, + snp_trusted => [GenerateTrustedConfig] + }, + % Load test report data and set up mock + TestReportJSON = load_test_report_data(), + ok = mock_snp_nif(TestReportJSON), + try + % Step 1: Generate a test report + {ok, GeneratedMsg} = generate(#{}, #{}, GenerateOpts), + % Step 2: Set up verification with DIFFERENT trusted config + WrongTrustedConfig = #{ + <<"vcpus">> => 32, % Different from generation config + <<"vcpu_type">> => 3, % Different from generation config + <<"firmware">> => <<"different_firmware_hash">>, + <<"kernel">> => <<"different_kernel_hash">> + }, + VerifyOpts = #{ + snp_trusted => [WrongTrustedConfig], + snp_enforced_keys => [vcpus, vcpu_type, firmware, kernel] + }, + % Step 3: Verify the generated report with wrong config + VerifyResult = + verify( + #{}, + hb_message:commit(GeneratedMsg, GenerateOpts), + VerifyOpts + ), + ?event({verify_result, {explicit, VerifyResult}}), + % Step 4: Assert that verification fails (either as error or false result) + case VerifyResult of + {ok, <<"false">>} -> + % Verification completed but returned false (all validations ran) + ok; + {error, _Reason} -> + % Verification failed early (expected for wrong config) + ok; + Other -> + % Unexpected result - should fail the test + ?assertEqual({ok, <<"false">>}, Other) + end + after + % Clean up mock + unmock_snp_nif() + end. +``` + +--- + +*Generated from [dev_snp.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_snp.erl)* diff --git a/docs/book/src/dev_snp_nif.erl.md b/docs/book/src/dev_snp_nif.erl.md new file mode 100644 index 000000000..99cb197b8 --- /dev/null +++ b/docs/book/src/dev_snp_nif.erl.md @@ -0,0 +1,134 @@ +# dev_snp_nif + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_snp_nif.erl) + +## Exported Functions + +- `check_snp_support/0` +- `compute_launch_digest/1` +- `generate_attestation_report/2` +- `verify_measurement/2` +- `verify_signature/1` + +--- + +### check_snp_support + +```erlang +check_snp_support() -> + ?NOT_LOADED. +``` + +### generate_attestation_report + +```erlang +generate_attestation_report(_UniqueData, _VMPL) -> + ?NOT_LOADED. +``` + +### compute_launch_digest + +```erlang +compute_launch_digest(_Args) -> + ?NOT_LOADED. +``` + +### verify_measurement + +```erlang +verify_measurement(_Report, _Expected) -> + ?NOT_LOADED. +``` + +### verify_signature + +```erlang +verify_signature(_Report) -> + ?NOT_LOADED. +``` + +### init + +```erlang +init() -> + ?load_nif_from_crate(dev_snp_nif, 0). +``` + +### not_loaded + +```erlang +not_loaded(Line) -> + erlang:nif_error({not_loaded, [{module, ?MODULE}, {line, Line}]}). +``` + +### generate_attestation_report_test + +```erlang +generate_attestation_report_test() -> + %% Call check_support() to determine if SNP is supported + case dev_snp_nif:check_snp_support() of + {ok, true} -> + %% SNP is supported, generate unique data and test commitment report + UniqueData = crypto:strong_rand_bytes(64), + VMPL = 1, + ?assertEqual( + {ok, UniqueData}, + dev_snp_nif:generate_attestation_report(UniqueData, VMPL) + ); + {ok, false} -> + %% SNP is not supported, log event and assert NIF not loaded + ?event("SNP not supported on machine, skipping test..."), + ?assertEqual(ok, ok) + end. +``` + +### compute_launch_digest_test + +```erlang +compute_launch_digest_test() -> + %% Define the data structure + ArgsMap = #{ + vcpus => 32, + vcpu_type => 5, + vmm_type => 1, + guest_features => 16#1, + firmware => "b8c5d4082d5738db6b0fb0294174992738645df70c44cdecf7fad3a62244b788e7e408c582ee48a74b289f3acec78510", + kernel => "69d0cd7d13858e4fcef6bc7797aebd258730f215bc5642c4ad8e4b893cc67576", + initrd => "02e28b6c718bf0a5260d6f34d3c8fe0d71bf5f02af13e1bc695c6bc162120da1", + append => "56e1e5190622c8c6b9daa4fe3ad83f3831c305bb736735bf795b284cb462c9e7" + }, + ?event(ArgsMap), + %% Call the NIF + {ok, Result} = dev_snp_nif:compute_launch_digest(ArgsMap), + %% Expected result + EncTestVector = + <<"wmSDSQYuzE2M3rQcourJnDJHgalADM8TBev3gyjM5ObRNOn8oglvVznFbaWhajU_">>, + ?assertMatch(EncTestVector, hb_util:encode(Result)). +``` + +### verify_measurement_test + +```erlang +verify_measurement_test() -> + %% Define a mock report (JSON string) as binary + {ok, MockReport} = file:read_file("test/snp-measurement.json"), + %% Define the expected measurement (binary) + ExpectedMeasurement = <<94,87,4,197,20,11,255,129,179,197,146,104,8,212,152,248,110,11,60,246,82,254,24,55,201,47,157,229,163,82,108,66,191,138,241,229,40,144,133,170,116,109,17,62,20,241,144,119>>, + %% Call the NIF + Result = dev_snp_nif:verify_measurement(MockReport, ExpectedMeasurement), + ?assertMatch({ok, true}, Result). +``` + +### verify_signature_test + +```erlang +verify_signature_test() -> + %% Define a mock report (JSON string) as binary + {ok, MockAttestation} = file:read_file("test/snp-attestation.json"), + Result = dev_snp_nif:verify_signature(MockAttestation), + ?assertMatch({ok, true}, Result). +``` + +--- + +*Generated from [dev_snp_nif.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_snp_nif.erl)* diff --git a/docs/book/src/dev_stack.erl.md b/docs/book/src/dev_stack.erl.md new file mode 100644 index 000000000..9424529e2 --- /dev/null +++ b/docs/book/src/dev_stack.erl.md @@ -0,0 +1,774 @@ +# dev_stack + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_stack.erl) + +A device that contains a stack of other devices, and manages their +execution. It can run in two modes: fold (the default), and map. +In fold mode, it runs upon input messages in the order of their keys. A +stack maintains and passes forward a state (expressed as a message) as it +progresses through devices. +For example, a stack of devices as follows: +
+Device -> Stack
+Device-Stack/1/Name -> Add-One-Device
+Device-Stack/2/Name -> Add-Two-Device
+
+When called with the message: +
+#{ Path = "FuncName", binary => `<<"0">>` }
+
+Will produce the output: +
+#{ Path = "FuncName", binary => `<<"3">>` }
+{ok, #{ bin => `<<"3">>` }}
+
+In map mode, the stack will run over all the devices in the stack, and +combine their results into a single message. Each of the devices' +output values have a key that is the device's name in the `Device-Stack` +(its number if the stack is a list). +You can switch between fold and map modes by setting the `Mode` key in the +`Msg2` to either `Fold` or `Map`, or set it globally for the stack by +setting the `Mode` key in the `Msg1` message. The key in `Msg2` takes +precedence over the key in `Msg1`. +The key that is called upon the device stack is the same key that is used +upon the devices that are contained within it. For example, in the above +scenario we resolve FuncName on the stack, leading FuncName to be called on +Add-One-Device and Add-Two-Device. +A device stack responds to special statuses upon responses as follows: + `skip`: Skips the rest of the device stack for the current pass. + `pass`: Causes the stack to increment its pass number and re-execute + the stack from the first device, maintaining the state + accumulated so far. Only available in fold mode. +In all cases, the device stack will return the accumulated state to the +caller as the result of the call to the stack. +The dev_stack adds additional metadata to the message in order to track +the state of its execution as it progresses through devices. These keys +are as follows: + `Stack-Pass`: The number of times the stack has reset and re-executed + from the first device for the current message. + `Input-Prefix`: The prefix that the device should use for its outputs + and inputs. + `Output-Prefix`: The device that was previously executed. +All counters used by the stack are initialized to 1. +Additionally, as implemented in HyperBEAM, the device stack will honor a +number of options that are passed to it as keys in the message. Each of +these options is also passed through to the devices contained within the +stack during execution. These options include: + `Error-Strategy`: Determines how the stack handles errors from devices. + See `maybe_error/5` for more information. + `Allow-Multipass`: Determines whether the stack is allowed to automatically + re-execute from the first device when the `pass` tag is returned. See + `maybe_pass/3` for more information. +Under-the-hood, dev_stack uses a `default` handler to resolve all calls to +devices, aside `set/2` which it calls itself to mutate the message's `device` +key in order to change which device is currently being executed. This method +allows dev_stack to ensure that the message's HashPath is always correct, +even as it delegates calls to other devices. An example flow for a `dev_stack` +execution is as follows: +
+	/Msg1/AlicesExcitingKey ->
+		dev_stack:execute ->
+			/Msg1/Set?device=/Device-Stack/1 ->
+			/Msg2/AlicesExcitingKey ->
+			/Msg3/Set?device=/Device-Stack/2 ->
+			/Msg4/AlicesExcitingKey
+			... ->
+			/MsgN/Set?device=[This-Device] ->
+		returns {ok, /MsgN+1} ->
+	/MsgN+1
+
+In this example, the `device` key is mutated a number of times, but the +resulting HashPath remains correct and verifiable. + +--- + +## Exported Functions + +- `generate_append_device/1` +- `info/2` +- `input_prefix/3` +- `output_prefix/3` +- `prefix/3` +- `router/4` + +--- + +### info + +A device that contains a stack of other devices, and manages their + +```erlang +info(Msg, Opts) -> + hb_maps:merge( + #{ + handler => fun router/4, + excludes => [<<"set">>, <<"keys">>] + }, + case hb_maps:get(<<"stack-keys">>, Msg, not_found, Opts) of + not_found -> #{}; + StackKeys -> #{ exports => StackKeys } + end + ). +``` + +### prefix + +Return the default prefix for the stack. +Return the input prefix for the stack. + +```erlang +prefix(Msg1, _Msg2, Opts) -> + hb_ao:get(<<"output-prefix">>, {as, dev_message, Msg1}, <<"">>, Opts). +``` + +### input_prefix + +Return the default prefix for the stack. +Return the input prefix for the stack. +Return the output prefix for the stack. + +```erlang +input_prefix(Msg1, _Msg2, Opts) -> + hb_ao:get(<<"input-prefix">>, {as, dev_message, Msg1}, <<"">>, Opts). +``` + +### output_prefix + +Return the default prefix for the stack. +Return the input prefix for the stack. +Return the output prefix for the stack. +The device stack key router. Sends the request to `resolve_stack`, + +```erlang +output_prefix(Msg1, _Msg2, Opts) -> + hb_ao:get(<<"output-prefix">>, {as, dev_message, Msg1}, <<"">>, Opts). +``` + +### router + +Return the default prefix for the stack. +Return the input prefix for the stack. +Return the output prefix for the stack. +The device stack key router. Sends the request to `resolve_stack`, + +```erlang +router(<<"keys">>, Message1, Message2, Opts) -> + ?event({keys_called, {msg1, Message1}, {msg2, Message2}}), + dev_message:keys(Message1, Opts); +``` + +### router + +Return the default prefix for the stack. +Return the input prefix for the stack. +Return the output prefix for the stack. +The device stack key router. Sends the request to `resolve_stack`, + +```erlang +router(Key, Message1, Message2, Opts) -> + case hb_path:matches(Key, <<"transform">>) of + true -> transformer_message(Message1, Opts); + false -> router(Message1, Message2, Opts) + end. +``` + +### router + +```erlang +router(Message1, Message2, Opts) -> + ?event({router_called, {msg1, Message1}, {msg2, Message2}}), + Mode = + case hb_ao:get(<<"mode">>, Message2, not_found, Opts) of + not_found -> + hb_ao:get( + <<"mode">>, + {as, dev_message, Message1}, + <<"Fold">>, + Opts + ); + Msg2Mode -> Msg2Mode + end, + case Mode of + <<"Fold">> -> resolve_fold(Message1, Message2, Opts); + <<"Map">> -> resolve_map(Message1, Message2, Opts) + end. +``` + +### transformer_message + +Return a message which, when given a key, will transform the message + +```erlang +transformer_message(Msg1, Opts) -> + ?event({creating_transformer, {for, Msg1}}), + BaseInfo = info(Msg1, Opts), + {ok, + Msg1#{ + <<"device">> => #{ + info => + fun() -> + hb_maps:merge( + BaseInfo, + #{ + handler => + fun(Key, MsgX1) -> + transform(MsgX1, Key, Opts) + end + }, + Opts + ) + end, + <<"type">> => <<"stack-transformer">> + } + } + }. +``` + +### transform + +Return Message1, transformed such that the device named `Key` from the + +```erlang +transform(Msg1, Key, Opts) -> + % Get the device stack message from Msg1. +``` + +### resolve_fold + +The main device stack execution engine. See the moduledoc for more + +```erlang +resolve_fold(Message1, Message2, Opts) -> + {ok, InitDevMsg} = dev_message:get(<<"device">>, Message1, Opts), + StartingPassValue = + hb_ao:get(<<"pass">>, {as, dev_message, Message1}, unset, Opts), + PreparedMessage = hb_ao:set(Message1, <<"pass">>, 1, Opts), + case resolve_fold(PreparedMessage, Message2, 1, Opts) of + {ok, Raw} when not is_map(Raw) -> + {ok, Raw}; + {ok, Result} -> + dev_message:set( + Result, + #{ + <<"device">> => InitDevMsg, + <<"input-prefix">> => + hb_ao:get( + <<"previous-input-prefix">>, + {as, dev_message, Result}, + undefined, + Opts + ), + <<"output-prefix">> => + hb_ao:get( + <<"previous-output-prefix">>, + {as, dev_message, Result}, + undefined, + Opts + ), + <<"device-key">> => unset, + <<"device-stack-previous">> => unset, + <<"pass">> => StartingPassValue + }, + Opts + ); + Else -> + Else + end. +``` + +### resolve_fold + +```erlang +resolve_fold(Message1, Message2, DevNum, Opts) -> + case transform(Message1, DevNum, Opts) of + {ok, Message3} -> + ?event({stack_execute, DevNum, {msg1, Message3}, {msg2, Message2}}), + case hb_ao:resolve(Message3, Message2, Opts) of + {ok, Message4} when is_map(Message4) -> + ?event({result, ok, DevNum, Message4}), + resolve_fold(Message4, Message2, DevNum + 1, Opts); + {error, not_found} -> + ?event({skipping_device, not_found, DevNum, Message3}), + resolve_fold(Message3, Message2, DevNum + 1, Opts); + {ok, RawResult} -> + ?event({returning_raw_result, RawResult}), + {ok, RawResult}; + {skip, Message4} when is_map(Message4) -> + ?event({result, skip, DevNum, Message4}), + {ok, Message4}; + {pass, Message4} when is_map(Message4) -> + ?event({result, pass, {dev, DevNum}, Message4}), + resolve_fold( + increment_pass(Message4, Opts), + Message2, + 1, + Opts + ); + {error, Info} -> + ?event({result, error, {dev, DevNum}, Info}), + maybe_error(Message1, Message2, DevNum, Info, Opts); + Unexpected -> + ?event({result, unexpected, {dev, DevNum}, Unexpected}), + maybe_error( + Message1, + Message2, + DevNum, + {unexpected_result, Unexpected}, + Opts + ) + end; + not_found -> + ?event({execution_complete, DevNum, Message1}), + {ok, Message1} + end. +``` + +### resolve_map + +Map over the devices in the stack, accumulating the output in a single + +```erlang +resolve_map(Message1, Message2, Opts) -> + ?event({resolving_map, {msg1, Message1}, {msg2, Message2}}), + DevKeys = + hb_ao:get( + <<"device-stack">>, + {as, dev_message, Message1}, + Opts + ), + Res = {ok, + hb_maps:filtermap( + fun(Key, _Dev) -> + {ok, OrigWithDev} = transform(Message1, Key, Opts), + case hb_ao:resolve(OrigWithDev, Message2, Opts) of + {ok, Value} -> {true, Value}; + _ -> false + end + end, + hb_maps:without(?AO_CORE_KEYS, hb_ao:normalize_keys(DevKeys, Opts), Opts), + Opts + ) + }, + Res. +``` + +### increment_pass + +Helper to increment the pass number. + +```erlang +increment_pass(Message, Opts) -> + hb_ao:set( + Message, + #{ <<"pass">> => hb_ao:get(<<"pass">>, {as, dev_message, Message}, 1, Opts) + 1 }, + Opts + ). +``` + +### maybe_error + +```erlang +maybe_error(Message1, Message2, DevNum, Info, Opts) -> + case hb_opts:get(error_strategy, throw, Opts) of + stop -> + {error, {stack_call_failed, Message1, Message2, DevNum, Info}}; + throw -> + erlang:raise( + error, + {device_failed, + {dev_num, DevNum}, + {msg1, Message1}, + {msg2, Message2}, + {info, Info} + }, + [] + ) + end. +``` + +### generate_append_device + +```erlang +generate_append_device(Separator) -> + generate_append_device(Separator, ok). +``` + +### generate_append_device + +```erlang +generate_append_device(Separator, Status) -> + #{ + append => + fun(M1 = #{ <<"pass">> := 3 }, _) -> + % Stop after 3 passes. +``` + +### transform_internal_call_device_test + +Test that the transform function can be called correctly internally + +```erlang +transform_internal_call_device_test() -> + AppendDev = generate_append_device(<<"_">>), + Msg1 = + #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + #{ + <<"1">> => AppendDev, + <<"2">> => <<"message@1.0">> + } + }, + ?assertMatch( + <<"message@1.0">>, + hb_ao:get( + <<"device">>, + element(2, transform(Msg1, <<"2">>, #{})) + ) + ). +``` + +### transform_external_call_device_test + +Ensure we can generate a transformer message that can be called to + +```erlang +transform_external_call_device_test() -> + Msg1 = #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + #{ + <<"make-cool">> => + #{ + info => + fun() -> + #{ + handler => + fun(<<"keys">>, MsgX1) -> + ?event({test_dev_keys_called, MsgX1}), + {ok, hb_maps:keys(MsgX1, #{})}; + (Key, MsgX1) -> + {ok, Value} = + dev_message:get(Key, MsgX1, #{}), + dev_message:set( + MsgX1, + #{ Key => + << Value/binary, "-Cool">> + }, + #{} + ) + end + } + end, + <<"suffix">> => <<"-Cool">> + } + }, + <<"value">> => <<"Super">> + }, + ?assertMatch( + {ok, #{ <<"value">> := <<"Super-Cool">> }}, + hb_ao:resolve(Msg1, #{ + <<"path">> => <<"/transform/make-cool/value">> + }, #{}) + ). +``` + +### example_device_for_stack_test + +```erlang +example_device_for_stack_test() -> + % Test the example device that we use for later stack tests, such that + % we know that an error later is actually from the stack, and not from + % the example device. +``` + +### simple_stack_execute_test + +```erlang +simple_stack_execute_test() -> + Msg = #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + #{ + <<"1">> => generate_append_device(<<"!D1!">>), + <<"2">> => generate_append_device(<<"_D2_">>) + }, + <<"result">> => <<"INIT">> + }, + ?event({stack_executing, test, {explicit, Msg}}), + ?assertMatch( + {ok, #{ <<"result">> := <<"INIT!D1!2_D2_2">> }}, + hb_ao:resolve(Msg, #{ <<"path">> => <<"append">>, <<"bin">> => <<"2">> }, #{}) + ). +``` + +### many_devices_test + +```erlang +many_devices_test() -> + Msg = #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + #{ + <<"1">> => generate_append_device(<<"+D1">>), + <<"2">> => generate_append_device(<<"+D2">>), + <<"3">> => generate_append_device(<<"+D3">>), + <<"4">> => generate_append_device(<<"+D4">>), + <<"5">> => generate_append_device(<<"+D5">>), + <<"6">> => generate_append_device(<<"+D6">>), + <<"7">> => generate_append_device(<<"+D7">>), + <<"8">> => generate_append_device(<<"+D8">>) + }, + <<"result">> => <<"INIT">> + }, + ?assertMatch( + {ok, + #{ + <<"result">> := + <<"INIT+D12+D22+D32+D42+D52+D62+D72+D82">> + } + }, + hb_ao:resolve(Msg, #{ <<"path">> => <<"append">>, <<"bin">> => <<"2">> }, #{}) + ). +``` + +### benchmark_test + +```erlang +benchmark_test() -> + BenchTime = 0.3, + Msg = #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + #{ + <<"1">> => generate_append_device(<<"+D1">>), + <<"2">> => generate_append_device(<<"+D2">>), + <<"3">> => generate_append_device(<<"+D3">>), + <<"4">> => generate_append_device(<<"+D4">>), + <<"5">> => generate_append_device(<<"+D5">>) + }, + <<"result">> => <<"INIT">> + }, + Iterations = + hb_test_utils:benchmark( + fun() -> + hb_ao:resolve(Msg, + #{ + <<"path">> => <<"append">>, + <<"bin">> => <<"2">> + }, + #{} + ), + {count, 5} + end, + BenchTime + ), + hb_test_utils:benchmark_print( + <<"Stack:">>, + <<"resolutions">>, + Iterations, + BenchTime + ), + ?assert(Iterations >= 10). +``` + +### test_prefix_msg + +```erlang +test_prefix_msg() -> + Dev = #{ + prefix_set => + fun(M1, M2, Opts) -> + In = input_prefix(M1, M2, Opts), + Out = output_prefix(M1, M2, Opts), + Key = hb_ao:get(<<"key">>, M2, Opts), + Value = hb_ao:get(<>, M2, Opts), + ?event({setting, {inp, In}, {outp, Out}, {key, Key}, {value, Value}}), + {ok, hb_ao:set( + M1, + <>, + Value, + Opts + )} + end + }, + #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => #{ <<"1">> => Dev, <<"2">> => Dev } + }. +``` + +### no_prefix_test + +```erlang +no_prefix_test() -> + Msg2 = + #{ + <<"path">> => <<"prefix_set">>, + <<"key">> => <<"example">>, + <<"example">> => 1 + }, + {ok, Ex1Msg3} = hb_ao:resolve(test_prefix_msg(), Msg2, #{}), + ?event({ex1, Ex1Msg3}), + ?assertMatch(1, hb_ao:get(<<"example">>, Ex1Msg3, #{})). +``` + +### output_prefix_test + +```erlang +output_prefix_test() -> + Msg1 = + (test_prefix_msg())#{ + <<"output-prefixes">> => #{ <<"1">> => <<"out1/">>, <<"2">> => <<"out2/">> } + }, + Msg2 = + #{ + <<"path">> => <<"prefix_set">>, + <<"key">> => <<"example">>, + <<"example">> => 1 + }, + {ok, Ex2Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), + ?assertMatch(1, + hb_ao:get(<<"out1/example">>, {as, dev_message, Ex2Msg3}, #{})), + ?assertMatch(1, + hb_ao:get(<<"out2/example">>, {as, dev_message, Ex2Msg3}, #{})). +``` + +### input_and_output_prefixes_test + +```erlang +input_and_output_prefixes_test() -> + Msg1 = + (test_prefix_msg())#{ + <<"input-prefixes">> => #{ 1 => <<"in1/">>, 2 => <<"in2/">> }, + <<"output-prefixes">> => #{ 1 => <<"out1/">>, 2 => <<"out2/">> } + }, + Msg2 = + #{ + <<"path">> => <<"prefix_set">>, + <<"key">> => <<"example">>, + <<"in1">> => #{ <<"example">> => 1 }, + <<"in2">> => #{ <<"example">> => 2 } + }, + {ok, Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), + ?assertMatch(1, + hb_ao:get(<<"out1/example">>, {as, dev_message, Msg3}, #{})), + ?assertMatch(2, + hb_ao:get(<<"out2/example">>, {as, dev_message, Msg3}, #{})). +``` + +### input_output_prefixes_passthrough_test + +```erlang +input_output_prefixes_passthrough_test() -> + Msg1 = + (test_prefix_msg())#{ + <<"output-prefix">> => <<"combined-out/">>, + <<"input-prefix">> => <<"combined-in/">> + }, + Msg2 = + #{ + <<"path">> => <<"prefix_set">>, + <<"key">> => <<"example">>, + <<"combined-in">> => #{ <<"example">> => 1 } + }, + {ok, Ex2Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), + ?assertMatch(1, + hb_ao:get( + <<"combined-out/example">>, + {as, dev_message, Ex2Msg3}, + #{} + ) + ). +``` + +### reinvocation_test + +```erlang +reinvocation_test() -> + Msg = #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + #{ + <<"1">> => generate_append_device(<<"+D1">>), + <<"2">> => generate_append_device(<<"+D2">>) + }, + <<"result">> => <<"INIT">> + }, + Res1 = hb_ao:resolve(Msg, #{ <<"path">> => <<"append">>, <<"bin">> => <<"2">> }, #{}), + ?assertMatch( + {ok, #{ <<"result">> := <<"INIT+D12+D22">> }}, + Res1 + ), + {ok, Msg2} = Res1, + Res2 = hb_ao:resolve(Msg2, #{ <<"path">> => <<"append">>, <<"bin">> => <<"3">> }, #{}), + ?assertMatch( + {ok, #{ <<"result">> := <<"INIT+D12+D22+D13+D23">> }}, + Res2 + ). +``` + +### skip_test + +```erlang +skip_test() -> + Msg1 = #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + #{ + <<"1">> => generate_append_device(<<"+D1">>, skip), + <<"2">> => generate_append_device(<<"+D2">>) + }, + <<"result">> => <<"INIT">> + }, + ?assertMatch( + {ok, #{ <<"result">> := <<"INIT+D12">> }}, + hb_ao:resolve( + Msg1, + #{ <<"path">> => <<"append">>, <<"bin">> => <<"2">> }, + #{} + ) + ). +``` + +### pass_test + +```erlang +pass_test() -> + % The append device will return `ok' after 2 passes, so this test + % recursively calls the device by forcing its response to be `pass' + % until that happens. +``` + +### not_found_test + +```erlang +not_found_test() -> + % Ensure that devices not exposing a key are safely skipped. +``` + +### simple_map_test + +```erlang +simple_map_test() -> + Msg = #{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => + #{ + <<"1">> => generate_append_device(<<"+D1">>), + <<"2">> => generate_append_device(<<"+D2">>) + }, + <<"result">> => <<"INIT">> + }, + {ok, Msg3} = + hb_ao:resolve( + Msg, + #{ <<"path">> => <<"append">>, <<"mode">> => <<"Map">>, <<"bin">> => <<"/">> }, + #{} + ), + ?assertMatch(<<"INIT+D1/">>, hb_ao:get(<<"1/result">>, Msg3, #{})), +``` + +--- + +*Generated from [dev_stack.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_stack.erl)* diff --git a/docs/book/src/dev_test.erl.md b/docs/book/src/dev_test.erl.md new file mode 100644 index 000000000..068f15399 --- /dev/null +++ b/docs/book/src/dev_test.erl.md @@ -0,0 +1,333 @@ +# dev_test + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_test.erl) + +A simple test device for AO-Core, so that we can test the functionality that +depends on using Erlang's module system. +NOTE: This device is labelled `test-device/1.0` to avoid conflicts with +other testing functionality -- care should equally be taken to avoid +using the `test` key in other settings. + +--- + +## Exported Functions + +- `compute/3` +- `delay/3` +- `increment_counter/3` +- `index/3` +- `info/1` +- `info/3` +- `init/3` +- `load/3` +- `mul/2` +- `postprocess/3` +- `restore/3` +- `snapshot/3` +- `test_func/1` +- `update_state/3` + +--- + +### info + +Exports a default_handler function that can be used to test the + +```erlang +info(_) -> + #{ + <<"default">> => dev_message, + handlers => #{ + <<"info">> => fun info/3, + <<"update_state">> => fun update_state/3, + <<"increment_counter">> => fun increment_counter/3 + } + }. +``` + +### info + +Exports a default_handler function that can be used to test the +Example index handler. + +```erlang +info(_Msg1, _Msg2, _Opts) -> + InfoBody = #{ + <<"description">> => <<"Test device for testing the AO-Core framework">>, + <<"version">> => <<"1.0">>, + <<"paths">> => #{ + <<"info">> => <<"Get device info">>, + <<"test_func">> => <<"Test function">>, + <<"compute">> => <<"Compute function">>, + <<"init">> => <<"Initialize function">>, + <<"restore">> => <<"Restore function">>, + <<"mul">> => <<"Multiply function">>, + <<"snapshot">> => <<"Snapshot function">>, + <<"response">> => <<"Response function">>, + <<"update_state">> => <<"Update state function">> + } + }, + {ok, #{<<"status">> => 200, <<"body">> => InfoBody}}. +``` + +### index + +Exports a default_handler function that can be used to test the +Example index handler. + +```erlang +index(Msg, _Req, Opts) -> + Name = hb_ao:get(<<"name">>, Msg, <<"turtles">>, Opts), + {ok, + #{ + <<"content-type">> => <<"text/html">>, + <<"body">> => <<"i like ", Name/binary, "!">> + } + }. +``` + +### load + +Return a message with the device set to this module. + +```erlang +load(Base, _, _Opts) -> + {ok, Base#{ <<"device">> => <<"test-device@1.0">> }}. +``` + +### test_func + +Return a message with the device set to this module. +Example implementation of a `compute` handler. Makes a running list of + +```erlang +test_func(_) -> + {ok, <<"GOOD_FUNCTION">>}. +``` + +### compute + +Return a message with the device set to this module. +Example implementation of a `compute` handler. Makes a running list of + +```erlang +compute(Msg1, Msg2, Opts) -> + AssignmentSlot = hb_ao:get(<<"slot">>, Msg2, Opts), + Seen = hb_ao:get(<<"already-seen">>, Msg1, Opts), + ?event({compute_called, {msg1, Msg1}, {msg2, Msg2}, {opts, Opts}}), + {ok, + hb_ao:set( + Msg1, + #{ + <<"random-key">> => <<"random-value">>, + <<"results">> => + #{ <<"assignment-slot">> => AssignmentSlot }, + <<"already-seen">> => [AssignmentSlot | Seen] + }, + Opts + ) + }. +``` + +### init + +Example `init/3` handler. Sets the `Already-Seen` key to an empty list. +Example `restore/3` handler. Sets the hidden key `Test/Started` to the + +```erlang +init(Msg, _Msg2, Opts) -> + ?event({init_called_on_dev_test, Msg}), + {ok, hb_ao:set(Msg, #{ <<"already-seen">> => [] }, Opts)}. +``` + +### restore + +Example `init/3` handler. Sets the `Already-Seen` key to an empty list. +Example `restore/3` handler. Sets the hidden key `Test/Started` to the + +```erlang +restore(Msg, _Msg2, Opts) -> + ?event({restore_called_on_dev_test, Msg}), + case hb_ao:get(<<"already-seen">>, Msg, Opts) of + not_found -> + ?event({restore_not_found, Msg}), + {error, <<"No viable state to restore.">>}; + AlreadySeen -> + ?event({restore_found, AlreadySeen}), + {ok, + hb_private:set( + Msg, + #{ <<"test-key/started-state">> => AlreadySeen }, + Opts + ) + } + end. +``` + +### mul + +Example implementation of an `imported` function for a WASM +Do nothing when asked to snapshot. + +```erlang +mul(Msg1, Msg2) -> + ?event(mul_called), + State = hb_ao:get(<<"state">>, Msg1, #{ hashpath => ignore }), + [Arg1, Arg2] = hb_ao:get(<<"args">>, Msg2, #{ hashpath => ignore }), + ?event({mul_called, {state, State}, {args, [Arg1, Arg2]}}), + {ok, #{ <<"state">> => State, <<"results">> => [Arg1 * Arg2] }}. +``` + +### snapshot + +Example implementation of an `imported` function for a WASM +Do nothing when asked to snapshot. + +```erlang +snapshot(Msg1, Msg2, _Opts) -> + ?event({snapshot_called, {msg1, Msg1}, {msg2, Msg2}}), + {ok, #{}}. +``` + +### postprocess + +Set the `postprocessor-called` key to true in the HTTP server. + +```erlang +postprocess(_Msg, #{ <<"body">> := Msgs }, Opts) -> + ?event({postprocess_called, Opts}), + hb_http_server:set_opts(Opts#{ <<"postprocessor-called">> => true }), + {ok, Msgs}. +``` + +### update_state + +Find a test worker's PID and send it an update message. + +```erlang +update_state(_Msg, Msg2, _Opts) -> + case hb_ao:get(<<"test-id">>, Msg2) of + not_found -> + {error, <<"No test ID found in message.">>}; + ID -> + LookupResult = hb_name:lookup({<<"test">>, ID}), + case LookupResult of + undefined -> + {error, <<"No test worker found.">>}; + Pid -> + Pid ! {update, Msg2}, + {ok, Pid} + end + end. +``` + +### increment_counter + +Find a test worker's PID and send it an increment message. + +```erlang +increment_counter(_Msg1, Msg2, _Opts) -> + case hb_ao:get(<<"test-id">>, Msg2) of + not_found -> + {error, <<"No test ID found in message.">>}; + ID -> + LookupResult = hb_name:lookup({<<"test">>, ID}), + case LookupResult of + undefined -> + {error, <<"No test worker found for increment.">>}; + Pid when is_pid(Pid) -> + Pid ! {increment}, + {ok, Pid}; + _ -> % Handle case where registered value isn't a PID + {error, <<"Invalid registration found for test worker.">>} + end + end. +``` + +### delay + +Does nothing, just sleeps `Req/duration or 750` ms and returns the + +```erlang +delay(Msg1, Req, Opts) -> + Duration = + hb_ao:get_first( + [ + {Msg1, <<"duration">>}, + {Req, <<"duration">>} + ], + 750, + Opts + ), + ?event(delay, {delay, {sleeping, Duration}}), + timer:sleep(Duration), + ?event({delay, waking}), + Return = + case hb_ao:get(<<"return">>, Msg1, Opts) of + not_found -> + hb_ao:get(<<"body">>, Req, #{ <<"result">> => <<"slept">> }, Opts); + ReturnMsgs -> + ReturnMsgs + end, + ?event(delay, {returning, Return}), + {ok, Return}. +``` + +### device_with_function_key_module_test + +Tests the resolution of a default function. + +```erlang +device_with_function_key_module_test() -> + Msg = + #{ + <<"device">> => <<"test-device@1.0">> + }, + ?assertEqual( + {ok, <<"GOOD_FUNCTION">>}, + hb_ao:resolve(Msg, test_func, #{}) + ). +``` + +### compute_test + +```erlang +compute_test() -> + Msg0 = #{ <<"device">> => <<"test-device@1.0">> }, + {ok, Msg1} = hb_ao:resolve(Msg0, init, #{}), + Msg2 = + hb_ao:set( + #{ <<"path">> => <<"compute">> }, + #{ + <<"slot">> => 1, + <<"body/number">> => 1337 + }, + #{} + ), + {ok, Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), + ?assertEqual(1, hb_ao:get(<<"results/assignment-slot">>, Msg3, #{})), + Msg4 = + hb_ao:set( + #{ <<"path">> => <<"compute">> }, + #{ + <<"slot">> => 2, + <<"body/number">> => 9001 + }, + #{} + ), + {ok, Msg5} = hb_ao:resolve(Msg3, Msg4, #{}), + ?assertEqual(2, hb_ao:get(<<"results/assignment-slot">>, Msg5, #{})), + ?assertEqual([2, 1], hb_ao:get(<<"already-seen">>, Msg5, #{})). +``` + +### restore_test + +```erlang +restore_test() -> + Msg1 = #{ <<"device">> => <<"test-device@1.0">>, <<"already-seen">> => [1] }, + {ok, Msg3} = hb_ao:resolve(Msg1, <<"restore">>, #{}), +``` + +--- + +*Generated from [dev_test.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_test.erl)* diff --git a/docs/book/src/dev_volume.erl.md b/docs/book/src/dev_volume.erl.md new file mode 100644 index 000000000..778c8e528 --- /dev/null +++ b/docs/book/src/dev_volume.erl.md @@ -0,0 +1,576 @@ +# dev_volume + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_volume.erl) + +Secure Volume Management for HyperBEAM Nodes +This module handles encrypted storage operations for HyperBEAM, +providing a robust and secure approach to data persistence. It manages +the complete lifecycle of encrypted volumes from detection to creation, +formatting, and mounting. +Key responsibilities: +- Volume detection and initialization +- Encrypted partition creation and formatting +- Secure mounting using cryptographic keys +- Store path reconfiguration to use mounted volumes +- Automatic handling of various system states + (new device, existing partition, etc.) +The primary entry point is the `mount/3` function, which orchestrates +the entire process based on the provided configuration parameters. This +module works alongside `hb_volume` which provides the low-level +operations for device manipulation. +Security considerations: +- Ensures data at rest is protected through LUKS encryption +- Provides proper volume sanitization and secure mounting +- IMPORTANT: This module only applies configuration set in node options + and does NOT accept disk operations via HTTP requests. It cannot + format arbitrary disks as all operations are safeguarded by host + operating system permissions enforced upon the HyperBEAM environment. + +--- + +## Exported Functions + +- `info/1` +- `info/3` +- `mount/3` +- `public_key/3` + +--- + +### info + +Secure Volume Management for HyperBEAM Nodes +Exported function for getting device info, controls which functions + +```erlang +info(_) -> + ?event(debug_volume, {info, entry, device_info_requested}), + #{ exports => [info, mount, public_key] }. +``` + +### info + +HTTP info response providing information about this device +Handles the complete process of secure encrypted volume mounting. + +```erlang +-spec mount(term(), term(), map()) -> + {ok, binary()} | {error, binary()}. +``` + +```erlang +info(_Msg1, _Msg2, _Opts) -> + ?event(debug_volume, {info, http_request, starting}), + InfoBody = #{ + <<"description">> => + <<"Secure Volume Management for HyperBEAM Nodes">>, + <<"version">> => <<"1.0">>, + <<"api">> => #{ + <<"info">> => #{ + <<"description">> => <<"Get device info">> + }, + <<"mount">> => #{ + <<"description">> => <<"Mount an encrypted volume">>, + <<"required_node_opts">> => #{ + <<"priv_volume_key">> => <<"The encryption key">>, + <<"volume_device">> => <<"The base device path">>, + <<"volume_partition">> => <<"The partition path">>, + <<"volume_partition_type">> => <<"The partition type">>, + <<"volume_name">> => + <<"The name for the encrypted volume">>, + <<"volume_mount_point">> => + <<"Where to mount the volume">>, + <<"volume_store_path">> => + <<"The store path on the volume">> + } + }, + <<"public_key">> => #{ + <<"description">> => + <<"Get the node's public key for encrypted key exchange">> + } + } + }, + ?event(debug_volume, {info, http_response, success}), + {ok, #{<<"status">> => 200, <<"body">> => InfoBody}}. +%% +%% +%% +``` + +### mount + +HTTP info response providing information about this device +Handles the complete process of secure encrypted volume mounting. + +```erlang +-spec mount(term(), term(), map()) -> + {ok, binary()} | {error, binary()}. +``` + +```erlang +mount(_M1, _M2, Opts) -> + ?event(debug_volume, {mount, entry, starting}), + % Check if an encrypted key was sent in the request + EncryptedKey = hb_opts:get(priv_volume_key, not_found, Opts), + % Determine if we need to decrypt a key or use one from config + SkipDecryption = hb_opts:get(volume_skip_decryption, + <<"false">>, Opts), + Key = case SkipDecryption of + <<"true">> -> + ?event(debug_mount, {mount, skip_decryption, true}), + EncryptedKey; + _ -> + ?event(debug_volume, {decrypt_volume_key}), + case decrypt_volume_key(EncryptedKey, Opts) of + {ok, DecryptedKey} -> DecryptedKey; + {error, DecryptError} -> + ?event(debug_mount, + {mount, key_decrypt_error, DecryptError} + ), + not_found + end + end, + Device = hb_opts:get(volume_device, not_found, Opts), + Partition = hb_opts:get(volume_partition, not_found, Opts), + PartitionType = hb_opts:get(volume_partition_type, not_found, Opts), + VolumeName = hb_opts:get(volume_name, not_found, Opts), + MountPoint = hb_opts:get(volume_mount_point, not_found, Opts), + StorePath = hb_opts:get(volume_store_path, not_found, Opts), + ?event(debug_volume, + {mount, options_extracted, + { + device, Device, partition, Partition, + partition_type, PartitionType, volume_name, VolumeName, + mount_point, MountPoint, store_path, StorePath + } + } + ), + % Check for missing required node options + case hb_opts:check_required_opts([ + {<<"priv_volume_key">>, Key}, + {<<"volume_device">>, Device}, + {<<"volume_partition">>, Partition}, + {<<"volume_partition_type">>, PartitionType}, + {<<"volume_name">>, VolumeName}, + {<<"volume_mount_point">>, MountPoint}, + {<<"volume_store_path">>, StorePath} + ], Opts) of + {ok, _} -> + check_base_device( + Device, Partition, PartitionType, VolumeName, + MountPoint, StorePath, Key, Opts + ); + {error, ErrorMsg} -> + ?event(debug_volume, {mount, required_opts_error, ErrorMsg}), + {error, ErrorMsg} + end. +``` + +### public_key + +Returns the node's public key for secure key exchange. + +```erlang +-spec public_key(term(), term(), map()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +public_key(_M1, _M2, Opts) -> + % Retrieve the node's wallet + case hb_opts:get(priv_wallet, undefined, Opts) of + undefined -> + % Node doesn't have a wallet yet + ?event(debug_volume, + {public_key, wallet_error, no_wallet_found} + ), + {error, <<"Node wallet not available">>}; + {{_KeyType, _Priv, Pub}, _PubKey} -> + ?event(debug_volume, + {public_key, wallet_found, key_conversion_starting} + ), + % Convert to a standard RSA format (PKCS#1 or X.509) + RsaPubKey = #'RSAPublicKey'{ + publicExponent = 65537, % Common RSA exponent + modulus = crypto:bytes_to_integer(Pub) + }, + % Convert to DER format + DerEncoded = public_key:der_encode('RSAPublicKey', RsaPubKey), + % Base64 encode for transmission + Base64Key = base64:encode(DerEncoded), + ?event(debug_volume, {public_key, success, key_encoded}), + {ok, #{ + <<"status">> => 200, + <<"public_key">> => Base64Key, + <<"message">> => + <<"Use this public key to encrypt your volume key">> + }} + end. +``` + +### decrypt_volume_key + +Decrypts an encrypted volume key using the node's private key. + +```erlang +-spec decrypt_volume_key(binary(), map()) -> + {ok, binary()} | {error, binary()}. +``` + +```erlang +decrypt_volume_key(EncryptedKeyBase64, Opts) -> + % Decode the encrypted key + try + EncryptedKey = base64:decode(EncryptedKeyBase64), + ?event(debug_volume, + {decrypt_volume_key, base64_decoded, success} + ), + % Retrieve the node's wallet with private key + case hb_opts:get(priv_wallet, undefined, Opts) of + undefined -> + ?event(debug_volume, + {decrypt_volume_key, wallet_error, no_wallet} + ), + {error, <<"Node wallet not available for decryption">>}; + {{_KeyType = {rsa, E}, Priv, Pub}, _PubKey} -> + ?event(debug_volume, + {decrypt_volume_key, wallet_found, creating_private_key} + ), + % Create RSA private key record for decryption + RsaPrivKey = #'RSAPrivateKey'{ + publicExponent = E, + modulus = crypto:bytes_to_integer(Pub), + privateExponent = crypto:bytes_to_integer(Priv) + }, + % Decrypt the key + DecryptedKey = + public_key:decrypt_private( + EncryptedKey, + RsaPrivKey + ), + ?event(debug_volume, + {decrypt_volume_key, decryption_success, key_decrypted} + ), + {ok, DecryptedKey} + end + catch + _:Error -> + ?event(debug_volume, + {decrypt_volume_key, decryption_error, Error} + ), + {error, <<"Failed to decrypt volume key">>} + end. +``` + +### check_base_device + +Check if the base device exists and if it does, check if the + +```erlang +-spec check_base_device( + term(), term(), term(), term(), term(), term(), term(), map() +) -> {ok, binary()} | {error, binary()}. +``` + +```erlang +check_base_device( + Device, Partition, PartitionType, VolumeName, MountPoint, StorePath, + Key, Opts +) -> + ?event(debug_volume, + {check_base_device, entry, {checking_device, Device}} + ), + case hb_volume:check_for_device(Device) of + false -> + % Base device doesn't exist + ?event(debug_volume, + {check_base_device, device_not_found, Device} + ), + {error, <<"Base device not found">>}; + true -> + ?event(debug_volume, + {check_base_device, device_found, + {proceeding_to_partition_check, Device} + } + ), + check_partition( + Device, Partition, PartitionType, VolumeName, + MountPoint, StorePath, Key, Opts + ) + end. +``` + +### check_partition + +Check if the partition exists. If it does, attempt to mount it. + +```erlang +-spec check_partition( + term(), term(), term(), term(), term(), term(), term(), map() +) -> {ok, binary()} | {error, binary()}. +``` + +```erlang +check_partition( + Device, Partition, PartitionType, VolumeName, MountPoint, StorePath, + Key, Opts +) -> + ?event(debug_volume, + {check_partition, entry, {checking_partition, Partition}} + ), + case hb_volume:check_for_device(Partition) of + true -> + ?event(debug_volume, + {check_partition, partition_exists, + {mounting_existing, Partition} + } + ), + % Partition exists, try mounting it + mount_existing_partition( + Partition, Key, MountPoint, VolumeName, StorePath, Opts + ); + false -> + ?event(debug_volume, + {check_partition, partition_not_exists, + {creating_new, Partition} + } + ), + % Partition doesn't exist, create it + create_and_mount_partition( + Device, Partition, PartitionType, Key, + MountPoint, VolumeName, StorePath, Opts + ) + end. +``` + +### mount_existing_partition + +Mount an existing partition. + +```erlang +-spec mount_existing_partition( + term(), term(), term(), term(), term(), map() +) -> {ok, binary()} | {error, binary()}. +``` + +```erlang +mount_existing_partition( + Partition, Key, MountPoint, VolumeName, StorePath, Opts +) -> + ?event(debug_volume, + {mount_existing_partition, entry, + {attempting_mount, Partition, MountPoint} + } + ), + case hb_volume:mount_disk(Partition, Key, MountPoint, VolumeName) of + {ok, MountResult} -> + ?event(debug_volume, + {mount_existing_partition, mount_success, MountResult} + ), + update_store_path(StorePath, Opts); + {error, MountError} -> + ?event(debug_volume, + {mount_existing_partition, mount_error, + {error, MountError} + } + ), + {error, <<"Failed to mount volume">>} + end. +``` + +### create_and_mount_partition + +Create, format and mount a new partition. + +```erlang +-spec create_and_mount_partition( + term(), term(), term(), term(), term(), term(), term(), map() +) -> {ok, binary()} | {error, binary()}. +``` + +```erlang +create_and_mount_partition( + Device, Partition, PartitionType, Key, + MountPoint, VolumeName, StorePath, Opts +) -> + ?event(debug_volume, + {create_and_mount_partition, entry, + {creating_partition, Device, PartitionType} + } + ), + case hb_volume:create_partition(Device, PartitionType) of + {ok, PartitionResult} -> + ?event(debug_volume, + {create_and_mount_partition, partition_created, + PartitionResult + } + ), + format_and_mount( + Partition, Key, MountPoint, VolumeName, StorePath, Opts + ); + {error, PartitionError} -> + ?event(debug_volume, + {create_and_mount_partition, partition_error, + {error, PartitionError} + } + ), + {error, <<"Failed to create partition">>} + end. +``` + +### format_and_mount + +Format and mount a newly created partition. + +```erlang +-spec format_and_mount( + term(), term(), term(), term(), term(), map() +) -> {ok, binary()} | {error, binary()}. +``` + +```erlang +format_and_mount( + Partition, Key, MountPoint, VolumeName, StorePath, Opts +) -> + ?event(debug_volume, + {format_and_mount, entry, {formatting_partition, Partition}} + ), + case hb_volume:format_disk(Partition, Key) of + {ok, FormatResult} -> + ?event(debug_volume, + {format_and_mount, format_success, + {result, FormatResult} + } + ), + mount_formatted_partition( + Partition, Key, MountPoint, VolumeName, StorePath, Opts + ); + {error, FormatError} -> + ?event(debug_volume, + {format_and_mount, format_error, + {error, FormatError} + } + ), + {error, <<"Failed to format disk">>} + end. +``` + +### mount_formatted_partition + +Mount a newly formatted partition. + +```erlang +-spec mount_formatted_partition( + term(), term(), term(), term(), term(), map() +) -> {ok, binary()} | {error, binary()}. +``` + +```erlang +mount_formatted_partition( + Partition, Key, MountPoint, VolumeName, StorePath, Opts +) -> + ?event(debug_volume, + {mount_formatted_partition, entry, + {mounting_formatted, Partition, MountPoint} + } + ), + case hb_volume:mount_disk(Partition, Key, MountPoint, VolumeName) of + {ok, RetryMountResult} -> + ?event(debug_volume, + {mount_formatted_partition, mount_success, + {result, RetryMountResult} + } + ), + update_store_path(StorePath, Opts); + {error, RetryMountError} -> + ?event(debug_volume, + {mount_formatted_partition, mount_error, + {error, RetryMountError} + } + ), + {error, <<"Failed to mount newly formatted volume">>} + end. +``` + +### update_store_path + +Update the store path to use the mounted volume. + +```erlang +-spec update_store_path(term(), map()) -> + {ok, binary()} | {error, binary()}. +``` + +```erlang +update_store_path(StorePath, Opts) -> + ?event(debug_volume, + {update_store_path, entry, {updating_store, StorePath}} + ), + CurrentStore = hb_opts:get(store, [], Opts), + ?event(debug_volume, + {update_store_path, current_store, CurrentStore} + ), + case hb_volume:change_node_store(StorePath, CurrentStore) of + {ok, #{<<"store">> := NewStore} = StoreResult} -> + ?event(debug_volume, + {update_store_path, store_change_success, + {result, StoreResult} + } + ), + update_node_config(StorePath, NewStore, Opts); + {error, StoreError} -> + ?event(debug_volume, + {update_store_path, store_change_error, + {error, StoreError} + } + ), + {error, <<"Failed to update store">>} + end. +``` + +### update_node_config + +Update the node's configuration with the new store. + +```erlang +-spec update_node_config(term(), term(), map()) -> + {ok, binary()} | {error, binary()}. +``` + +```erlang +update_node_config(StorePath, NewStore, Opts) -> + ?event(debug_volume, + {update_node_config, entry, + {updating_config, StorePath, NewStore} + } + ), + GenesisWasmDBDir = + hb_opts:get( + genesis_wasm_db_dir, + "cache-mainnet/genesis-wasm", + Opts + ), + ?event(debug_volume, + {update_node_config, genesis_dir, GenesisWasmDBDir} + ), + BinaryGenesisWasmDBDir = list_to_binary(GenesisWasmDBDir), + FullGenesisPath = + <>, + ?event(debug_volume, + {update_node_config, full_path_created, FullGenesisPath} + ), + ok = + hb_http_server:set_opts( + Opts#{ + store => NewStore, + genesis_wasm_db_dir => FullGenesisPath + } + ), + ?event(debug_volume, + {update_node_config, config_updated, success} + ), +``` + +--- + +*Generated from [dev_volume.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_volume.erl)* diff --git a/docs/book/src/dev_wasi.erl.md b/docs/book/src/dev_wasi.erl.md new file mode 100644 index 000000000..2281d3110 --- /dev/null +++ b/docs/book/src/dev_wasi.erl.md @@ -0,0 +1,364 @@ +# dev_wasi + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_wasi.erl) + +A virtual filesystem device. +Implements a file-system-as-map structure, which is traversible externally. +Each file is a binary and each directory is an AO-Core message. +Additionally, this module adds a series of WASI-preview-1 compatible +functions for accessing the filesystem as imported functions by WASM +modules. + +--- + +## Exported Functions + +- `clock_time_get/3` +- `compute/1` +- `fd_read/3` +- `fd_write/3` +- `init/3` +- `path_open/3` +- `stdout/1` + +--- + +### init + +A virtual filesystem device. +On-boot, initialize the virtual file system with: + +```erlang +init(M1, _M2, Opts) -> + ?event(running_init), + MsgWithLib = + hb_ao:set( + M1, + #{ + <<"wasm/stdlib/wasi_snapshot_preview1">> => + #{ <<"device">> => <<"wasi@1.0">>} + }, + Opts + ), + MsgWithFDs = + hb_ao:set( + MsgWithLib, + <<"file-descriptors">>, + ?INIT_FDS, + Opts + ), + CompleteMsg = + hb_ao:set( + MsgWithFDs, + <<"vfs">>, + ?INIT_VFS, + Opts + ), + {ok, CompleteMsg}. +``` + +### compute + +```erlang +compute(Msg1) -> + {ok, Msg1}. +``` + +### stdout + +Return the stdout buffer from a state message. +Adds a file descriptor to the state message. + +```erlang +stdout(M) -> + hb_ao:get(<<"vfs/dev/stdout">>, M). +%path_open(M, Instance, [FDPtr, LookupFlag, PathPtr|_]) -> +``` + +### path_open + +Return the stdout buffer from a state message. +Adds a file descriptor to the state message. + +```erlang +path_open(Msg1, Msg2, Opts) -> + FDs = hb_ao:get(<<"file-descriptors">>, Msg1, Opts), + Instance = hb_private:get(<<"instance">>, Msg1, Opts), + [FDPtr, LookupFlag, PathPtr|_] = hb_ao:get(<<"args">>, Msg2, Opts), + ?event({path_open, FDPtr, LookupFlag, PathPtr}), + Path = hb_beamr_io:read_string(Instance, PathPtr), + ?event({path_open, Path}), + FD = #{ + <<"index">> := Index + } = + case hb_ao:get(<<"vfs/", Path/binary>>, Msg1, Opts) of + not_found -> + #{ + <<"index">> => length(hb_ao:keys(FDs)) + 1, + <<"filename">> => Path, + <<"offset">> => 0 + }; + F -> F + end, + { + ok, + #{ + <<"state">> => + hb_ao:set( + Msg1, + <<"vfs/", Path/binary>>, + FD + ), + <<"results">> => [0, Index] + } + }. +``` + +### fd_write + +WASM stdlib implementation of `fd_write`, using the WASI-p1 standard + +```erlang +fd_write(Msg1, Msg2, Opts) -> + State = hb_ao:get(<<"state">>, Msg1, Opts), + Instance = hb_private:get(<<"wasm/instance">>, State, Opts), + [FD, Ptr, Vecs, RetPtr|_] = hb_ao:get(<<"args">>, Msg2, Opts), + ?event({fd_write, {fd, FD}, {ptr, Ptr}, {vecs, Vecs}, {retptr, RetPtr}}), + Signature = hb_ao:get(<<"func-sig">>, Msg2, Opts), + ?event({signature, Signature}), + fd_write(State, Instance, [FD, Ptr, Vecs, RetPtr], 0, Opts). +``` + +### fd_write + +```erlang +fd_write(S, Instance, [_, _Ptr, 0, RetPtr], BytesWritten, _Opts) -> + hb_beamr_io:write( + Instance, + RetPtr, + <> + ), + {ok, #{ <<"state">> => S, <<"results">> => [0] }}; +``` + +### fd_write + +```erlang +fd_write(S, Instance, [FDnum, Ptr, Vecs, RetPtr], BytesWritten, Opts) -> + FDNumStr = integer_to_binary(FDnum), + FD = hb_ao:get(<<"file-descriptors/", FDNumStr/binary>>, S, Opts), + Filename = hb_ao:get(<<"filename">>, FD, Opts), + StartOffset = hb_ao:get(<<"offset">>, FD, Opts), + {VecPtr, Len} = parse_iovec(Instance, Ptr), + {ok, Data} = hb_beamr_io:read(Instance, VecPtr, Len), + Before = + binary:part( + OrigData = hb_ao:get(<<"data">>, FD, Opts), + 0, + StartOffset + ), + After = + binary:part(OrigData, StartOffset, byte_size(OrigData) - StartOffset), + S1 = + hb_ao:set( + S, + <<"file-descriptors/", FDNumStr/binary, "/offset">>, + StartOffset + byte_size(Data), + Opts + ), + S2 = + hb_ao:set( + S1, + <<"vfs/", Filename/binary>>, + <>, + Opts + ), + fd_write( + S2, + Instance, + [FD, Ptr + 16, Vecs - 1, RetPtr], + BytesWritten + byte_size(Data), + Opts + ). +``` + +### fd_read + +Read from a file using the WASI-p1 standard interface. + +```erlang +fd_read(Msg1, Msg2, Opts) -> + State = hb_ao:get(<<"state">>, Msg1, Opts), + Instance = hb_private:get(<<"wasm/instance">>, State, Opts), + [FD, VecsPtr, NumVecs, RetPtr|_] = hb_ao:get(<<"args">>, Msg2, Opts), + Signature = hb_ao:get(<<"func-sig">>, Msg2, Opts), + ?event({signature, Signature}), + fd_read(State, Instance, [FD, VecsPtr, NumVecs, RetPtr], 0, Opts). +``` + +### fd_read + +```erlang +fd_read(S, Instance, [FD, _VecsPtr, 0, RetPtr], BytesRead, _Opts) -> + ?event({{completed_read, FD, BytesRead}}), + hb_beamr_io:write(Instance, RetPtr, + <>), + {ok, #{ <<"state">> => S, <<"results">> => [0] }}; +``` + +### fd_read + +```erlang +fd_read(S, Instance, [FDNum, VecsPtr, NumVecs, RetPtr], BytesRead, Opts) -> + ?event({fd_read, FDNum, VecsPtr, NumVecs, RetPtr}), + % Parse the request + FDNumStr = integer_to_binary(FDNum), + Filename = + hb_ao:get( + <<"file-descriptors/", FDNumStr/binary, "/filename">>, S, Opts), + {VecPtr, Len} = parse_iovec(Instance, VecsPtr), + % Read the bytes from the file + Data = hb_ao:get(<<"vfs/", Filename/binary>>, S, Opts), + Offset = + hb_ao:get( + <<"file-descriptors/", FDNumStr/binary, "/offset">>, S, Opts), + ReadSize = min(Len, byte_size(Data) - Offset), + Bin = binary:part(Data, Offset, ReadSize), + % Write the bytes to the WASM Instance + ok = hb_beamr_io:write(Instance, VecPtr, Bin), + fd_read( + hb_ao:set( + S, + <<"file-descriptors/", FDNumStr/binary, "/offset">>, + Offset + ReadSize, + Opts + ), + Instance, + [FDNum, VecsPtr + 16, NumVecs - 1, RetPtr], + BytesRead + ReadSize, + Opts + ). +``` + +### parse_iovec + +Parse an iovec in WASI-preview-1 format. + +```erlang +parse_iovec(Instance, Ptr) -> + {ok, VecStruct} = hb_beamr_io:read(Instance, Ptr, 16), + << + BinPtr:64/little-unsigned-integer, + Len:64/little-unsigned-integer + >> = VecStruct, + {BinPtr, Len}. +``` + +### clock_time_get + +```erlang +clock_time_get(Msg1, _Msg2, Opts) -> + ?event({clock_time_get, {returning, 1}}), + State = hb_ao:get(<<"state">>, Msg1, Opts), + {ok, #{ <<"state">> => State, <<"results">> => [1] }}. +%%% Tests +``` + +### init + +```erlang +init() -> + application:ensure_all_started(hb). +``` + +### generate_wasi_stack + +```erlang +generate_wasi_stack(File, Func, Params) -> + init(), + Msg0 = dev_wasm:cache_wasm_image(File), + Msg1 = Msg0#{ + <<"device">> => <<"stack@1.0">>, + <<"device-stack">> => [<<"wasi@1.0">>, <<"wasm-64@1.0">>], + <<"output-prefixes">> => [<<"wasm">>, <<"wasm">>], + <<"stack-keys">> => [<<"init">>, <<"compute">>], + <<"function">> => Func, + <<"params">> => Params + }, + {ok, Msg2} = hb_ao:resolve(Msg1, <<"init">>, #{}), + Msg2. +``` + +### vfs_is_serializable_test + +```erlang +vfs_is_serializable_test() -> + StackMsg = generate_wasi_stack("test/test-print.wasm", <<"hello">>, []), + VFSMsg = hb_ao:get(<<"vfs">>, StackMsg), + VFSMsg2 = + hb_message:minimize( + hb_message:convert( + hb_message:convert(VFSMsg, <<"httpsig@1.0">>, #{}), + <<"structured@1.0">>, + <<"httpsig@1.0">>, + #{}) + ), + ?assert(hb_message:match(VFSMsg, VFSMsg2)). +``` + +### wasi_stack_is_serializable_test + +```erlang +wasi_stack_is_serializable_test() -> + Msg = generate_wasi_stack("test/test-print.wasm", <<"hello">>, []), + HTTPSigMsg = hb_message:convert(Msg, <<"httpsig@1.0">>, #{}), + Msg2 = hb_message:convert(HTTPSigMsg, <<"structured@1.0">>, <<"httpsig@1.0">>, #{}), + ?assert(hb_message:match(Msg, Msg2)). +``` + +### basic_aos_exec_test + +```erlang +basic_aos_exec_test() -> + Init = generate_wasi_stack("test/aos-2-pure-xs.wasm", <<"handle">>, []), + Msg = gen_test_aos_msg("return 1 + 1"), + Env = gen_test_env(), + Instance = hb_private:get(<<"wasm/instance">>, Init, #{}), + {ok, Ptr1} = hb_beamr_io:malloc(Instance, byte_size(Msg)), + ?assertNotEqual(0, Ptr1), + hb_beamr_io:write(Instance, Ptr1, Msg), + {ok, Ptr2} = hb_beamr_io:malloc(Instance, byte_size(Env)), + ?assertNotEqual(0, Ptr2), + hb_beamr_io:write(Instance, Ptr2, Env), + % Read the strings to validate they are correctly passed + {ok, MsgBin} = hb_beamr_io:read(Instance, Ptr1, byte_size(Msg)), + {ok, EnvBin} = hb_beamr_io:read(Instance, Ptr2, byte_size(Env)), + ?assertEqual(Env, EnvBin), + ?assertEqual(Msg, MsgBin), + Ready = Init#{ <<"parameters">> => [Ptr1, Ptr2] }, + {ok, StateRes} = hb_ao:resolve(Ready, <<"compute">>, #{}), + [Ptr] = hb_ao:get(<<"results/wasm/output">>, StateRes), + {ok, Output} = hb_beamr_io:read_string(Instance, Ptr), + ?event({got_output, Output}), + #{ <<"response">> := #{ <<"Output">> := #{ <<"data">> := Data }} } + = hb_json:decode(Output), + ?assertEqual(<<"2">>, Data). +%%% Test Helpers +``` + +### gen_test_env + +```erlang +gen_test_env() -> + <<"{\"Process\":{\"Id\":\"AOS\",\"Owner\":\"FOOBAR\",\"Tags\":[{\"name\":\"Name\",\"value\":\"Thomas\"}, {\"name\":\"Authority\",\"value\":\"FOOBAR\"}]}}\0">>. +``` + +### gen_test_aos_msg + +```erlang +gen_test_aos_msg(Command) -> +``` + +--- + +*Generated from [dev_wasi.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_wasi.erl)* diff --git a/docs/book/src/dev_wasm.erl.md b/docs/book/src/dev_wasm.erl.md new file mode 100644 index 000000000..130fc5ef8 --- /dev/null +++ b/docs/book/src/dev_wasm.erl.md @@ -0,0 +1,458 @@ +# dev_wasm + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_wasm.erl) + +A device that executes a WASM image on messages using the Memory-64 +preview standard. In the backend, this device uses `beamr`: An Erlang wrapper +for WAMR, the WebAssembly Micro Runtime. +The device has the following requirements and interface: +
+    M1/Init ->
+        Assumes:
+            M1/process
+            M1/[Prefix]/image
+        Generates:
+            /priv/[Prefix]/instance
+            /priv/[Prefix]/import-resolver
+        Side-effects:
+            Creates a WASM executor loaded in memory of the HyperBEAM node.
+    M1/Compute ->
+        Assumes:
+            M1/priv/[Prefix]/instance
+            M1/priv/[Prefix]/import-resolver
+            M1/process
+            M2/message
+            M2/message/function OR M1/function
+            M2/message/parameters OR M1/parameters
+        Generates:
+            /results/[Prefix]/type
+            /results/[Prefix]/output
+        Side-effects:
+            Calls the WASM executor with the message and process.
+    M1/[Prefix]/state ->
+        Assumes:
+            M1/priv/[Prefix]/instance
+        Generates:
+            Raw binary WASM state
+
+ +--- + +## Exported Functions + +- `cache_wasm_image/1` +- `cache_wasm_image/2` +- `compute/3` +- `import/3` +- `info/2` +- `init/3` +- `instance/3` +- `normalize/3` +- `snapshot/3` +- `terminate/3` + +--- + +### info + +A device that executes a WASM image on messages using the Memory-64 +Export all functions aside the `instance/3` function. + +```erlang +info(_Msg1, _Opts) -> + #{ + excludes => [instance] + }. +``` + +### init + +Boot a WASM image on the image stated in the `process/image` field of + +```erlang +init(M1, M2, Opts) -> + ?event(running_init), + % Where we should read initial parameters from. +``` + +### default_import_resolver + +Take a BEAMR import call and resolve it using `hb_ao`. + +```erlang +default_import_resolver(Msg1, Msg2, Opts) -> + #{ + instance := WASM, + module := Module, + func := Func, + args := Args, + func_sig := Signature + } = Msg2, + Prefix = dev_stack:prefix(Msg1, Msg2, Opts), + {ok, Msg3} = + hb_ao:resolve( + hb_private:set( + Msg1, + #{ <> => WASM }, + Opts + ), + #{ + <<"path">> => <<"import">>, + <<"module">> => list_to_binary(Module), + <<"func">> => list_to_binary(Func), + <<"args">> => Args, + <<"func-sig">> => list_to_binary(Signature) + }, + Opts + ), + NextState = hb_ao:get(state, Msg3, Opts), + Response = hb_ao:get(results, Msg3, Opts), + {ok, Response, NextState}. +``` + +### compute + +Call the WASM executor with a message that has been prepared by a prior + +```erlang +compute(RawM1, M2, Opts) -> + % Normalize the message to have an open WASM instance, but no literal `State'. +``` + +### normalize + +Normalize the message to have an open WASM instance, but no literal +Serialize the WASM state to a binary. + +```erlang +normalize(RawM1, M2, Opts) -> + ?event({normalize_raw_m1, RawM1}), + M3 = + case instance(RawM1, M2, Opts) of + not_found -> + DeviceKey = + case hb_ao:get(<<"device-key">>, RawM1, Opts) of + not_found -> []; + Key -> [Key] + end, + ?event( + {no_instance_attempting_to_get_snapshot, + {msg1, RawM1}, {device_key, DeviceKey} + } + ), + Memory = + hb_ao:get( + [<<"snapshot">>] ++ DeviceKey ++ [<<"body">>], + {as, dev_message, RawM1}, + Opts + ), + case Memory of + not_found -> throw({error, no_wasm_instance_or_snapshot}); + State -> + {ok, M1} = init(RawM1, State, Opts), + Res = hb_beamr:deserialize(instance(M1, M2, Opts), State), + ?event(snapshot, {wasm_deserialized, {result, Res}}), + M1 + end; + _ -> + ?event(wasm_instance_found_not_deserializing), + RawM1 + end, + dev_message:set(M3, #{ <<"snapshot">> => unset }, Opts). +``` + +### snapshot + +Normalize the message to have an open WASM instance, but no literal +Serialize the WASM state to a binary. + +```erlang +snapshot(M1, M2, Opts) -> + ?event(snapshot, generating_snapshot), + Instance = instance(M1, M2, Opts), + {ok, Serialized} = hb_beamr:serialize(Instance), + {ok, + #{ + <<"body">> => Serialized + } + }. +``` + +### terminate + +Tear down the WASM executor. + +```erlang +terminate(M1, M2, Opts) -> + ?event(terminate_called_on_dev_wasm), + Prefix = dev_stack:prefix(M1, M2, Opts), + Instance = instance(M1, M2, Opts), + hb_beamr:stop(Instance), + {ok, hb_private:set(M1, + #{ + <> => unset + }, + Opts + )}. +``` + +### instance + +Get the WASM instance from the message. Note that this function is exported + +```erlang +instance(M1, M2, Opts) -> + Prefix = dev_stack:prefix(M1, M2, Opts), + Path = <>, + ?event({searching_for_instance, Path, M1}), + hb_private:get(Path, M1, Opts#{ hashpath => ignore }). +``` + +### import + +Handle standard library calls by: + +```erlang +import(Msg1, Msg2, Opts) -> + % 1. Adjust the path to the stdlib. +``` + +### undefined_import_stub + +Log the call to the standard library as an event, and write the + +```erlang +undefined_import_stub(Msg1, Msg2, Opts) -> + ?event({unimplemented_dev_wasm_call, {msg1, Msg1}, {msg2, Msg2}}), + Prefix = dev_stack:prefix(Msg1, Msg2, Opts), + UndefinedCallsPath = + <<"state/results/", Prefix/binary, "/undefined-calls">>, + Msg3 = hb_ao:set( + Msg1, + #{ + UndefinedCallsPath => + [ + Msg2 + | + case hb_ao:get(UndefinedCallsPath, Msg1, Opts) of + not_found -> []; + X -> X + end + ] + }, + Opts + ), + {ok, #{ state => Msg3, results => [0] }}. +``` + +### init + +```erlang +init() -> + application:ensure_all_started(hb), + hb:init(). +``` + +### input_prefix_test + +```erlang +input_prefix_test() -> + init(), + #{ <<"image">> := ImageID } = cache_wasm_image("test/test.wasm"), + Msg1 = + #{ + <<"device">> => <<"wasm-64@1.0">>, + <<"input-prefix">> => <<"test-in">>, + <<"test-in">> => #{ <<"image">> => ImageID } + }, + {ok, Msg2} = hb_ao:resolve(Msg1, <<"init">>, #{}), + ?event({after_init, Msg2}), + Priv = hb_private:from_message(Msg2), + ?assertMatch( + {ok, Instance} when is_pid(Instance), + hb_ao:resolve(Priv, <<"instance">>, #{}) + ), + ?assertMatch( + {ok, Fun} when is_function(Fun), + hb_ao:resolve(Priv, <<"import-resolver">>, #{}) + ). +``` + +### process_prefixes_test + +Test that realistic prefixing for a `dev_process` works -- + +```erlang +process_prefixes_test() -> + init(), + Msg1 = + #{ + <<"device">> => <<"wasm-64@1.0">>, + <<"output-prefix">> => <<"wasm">>, + <<"input-prefix">> => <<"process">>, + <<"process">> => cache_wasm_image("test/test.wasm") + }, + {ok, Msg3} = hb_ao:resolve(Msg1, <<"init">>, #{}), + ?event({after_init, Msg3}), + Priv = hb_private:from_message(Msg3), + ?assertMatch( + {ok, Instance} when is_pid(Instance), + hb_ao:resolve(Priv, <<"wasm/instance">>, #{}) + ), + ?assertMatch( + {ok, Fun} when is_function(Fun), + hb_ao:resolve(Priv, <<"wasm/import-resolver">>, #{}) + ). +``` + +### init_test + +```erlang +init_test() -> + init(), + Msg = cache_wasm_image("test/test.wasm"), + {ok, Msg1} = hb_ao:resolve(Msg, <<"init">>, #{}), + ?event({after_init, Msg1}), + Priv = hb_private:from_message(Msg1), + ?assertMatch( + {ok, Instance} when is_pid(Instance), + hb_ao:resolve(Priv, <<"instance">>, #{}) + ), + ?assertMatch( + {ok, Fun} when is_function(Fun), + hb_ao:resolve(Priv, <<"import-resolver">>, #{}) + ). +``` + +### basic_execution_test + +```erlang +basic_execution_test() -> + ?assertEqual( + {ok, [120.0]}, + test_run_wasm("test/test.wasm", <<"fac">>, [5.0], #{}) + ). +``` + +### basic_execution_64_test + +```erlang +basic_execution_64_test() -> + ?assertEqual( + {ok, [120.0]}, + test_run_wasm("test/test-64.wasm", <<"fac">>, [5.0], #{}) + ). +``` + +### imported_function_test + +```erlang +imported_function_test() -> + ?assertEqual( + {ok, [32]}, + test_run_wasm( + "test/pow_calculator.wasm", + <<"pow">>, + [2, 5], + #{ + <<"stdlib/my_lib">> => + #{ <<"device">> => <<"test-device@1.0">> } + } + ) + ). +``` + +### benchmark_test + +```erlang +benchmark_test() -> + BenchTime = 0.5, + init(), + Msg0 = cache_wasm_image("test/test-64.wasm"), + {ok, Msg1} = hb_ao:resolve(Msg0, <<"init">>, #{}), + Msg2 = + hb_maps:merge( + Msg1, + #{ + <<"function">> => <<"fac">>, + <<"parameters">> => [5.0] + }, + #{} + ), + Iterations = + hb_test_utils:benchmark( + fun() -> + hb_ao:resolve(Msg2, <<"compute">>, #{}) + end, + BenchTime + ), + ?event(benchmark, {scheduled, Iterations}), + hb_test_utils:benchmark_print( + <<"Through AO-Core:">>, + <<"resolutions">>, + Iterations, + BenchTime + ), + ?assert(Iterations > 5), + ok. +``` + +### state_export_and_restore_test + +```erlang +state_export_and_restore_test() -> + init(), + % Generate a WASM message. We use the pow_calculator because it has a + % reasonable amount of memory to work with. +``` + +### cache_wasm_image + +```erlang +cache_wasm_image(Image) -> + cache_wasm_image(Image, #{}). +``` + +### cache_wasm_image + +```erlang +cache_wasm_image(Image, Opts) -> + {ok, Bin} = file:read_file(Image), + Msg = #{ <<"body">> => Bin }, + {ok, ID} = hb_cache:write(Msg, Opts), + #{ + <<"device">> => <<"wasm-64@1.0">>, + <<"image">> => ID + }. +``` + +### test_run_wasm + +```erlang +test_run_wasm(File, Func, Params, AdditionalMsg) -> + init(), + Msg0 = cache_wasm_image(File), + {ok, Msg1} = hb_ao:resolve(Msg0, <<"init">>, #{}), + ?event({after_init, Msg1}), + Msg2 = + hb_maps:merge( + Msg1, + hb_ao:set( + #{ + <<"function">> => Func, + <<"parameters">> => Params + }, + AdditionalMsg, + #{ hashpath => ignore } + ), + #{} + ), + ?event({after_setup, Msg2}), + {ok, StateRes} = hb_ao:resolve(Msg2, <<"compute">>, #{}), + ?event({after_resolve, StateRes}), + hb_ao:resolve(StateRes, <<"results/output">>, #{}). +``` + +--- + +*Generated from [dev_wasm.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_wasm.erl)* diff --git a/docs/book/src/dev_whois.erl.md b/docs/book/src/dev_whois.erl.md new file mode 100644 index 000000000..86026ae48 --- /dev/null +++ b/docs/book/src/dev_whois.erl.md @@ -0,0 +1,94 @@ +# dev_whois + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_whois.erl) + +A device for returning the IP/host information of a requester or +itself. + +--- + +## Exported Functions + +- `echo/3` +- `ensure_host/1` +- `node/3` + +--- + +### echo + +A device for returning the IP/host information of a requester or +Return the calculated host information for the requester. +Return the host information for the node. Sets the `host` key in the + +```erlang +echo(_, Req, Opts) -> + {ok, hb_maps:get(<<"ao-peer">>, Req, <<"unknown">>, Opts)}. +``` + +### node + +A device for returning the IP/host information of a requester or +Return the calculated host information for the requester. +Return the host information for the node. Sets the `host` key in the + +```erlang +node(_, _, Opts) -> + case ensure_host(Opts) of + {ok, NewOpts} -> + {ok, hb_opts:get(host, <<"unknown">>, NewOpts)}; + Error -> + Error + end. +``` + +### ensure_host + +Return the node message ensuring that the host is set. If it is not, we + +```erlang +ensure_host(Opts) -> + case hb_opts:get(host, <<"unknown">>, Opts) of + <<"unknown">> -> + case bootstrap_node_echo(Opts) of + {ok, Host} -> + % Set the host information in the persisted node message. +``` + +### bootstrap_node_echo + +Find the local host information from the specified bootstrap node. + +```erlang +bootstrap_node_echo(Opts) -> + case hb_opts:get(host_bootstrap_node, false, Opts) of + false -> + {error, <<"No bootstrap node configured.">>}; + BootstrapNode -> + hb_http:get(BootstrapNode, <<"/~whois@1.0/echo">>, Opts) + end. +``` + +### find_self_test + +```erlang +find_self_test() -> + BoostrapNode = + hb_http_server:start_node(#{ + priv_wallet => ar_wallet:new() + }), + PeerNode = + hb_http_server:start_node(#{ + port => Port = rand:uniform(40000) + 10000, + priv_wallet => ar_wallet:new(), + host_bootstrap_node => BoostrapNode, + http_client => httpc + }), + ?event({nodes, {peer, PeerNode}, {bootstrap, BoostrapNode}}), + {ok, ReceivedPeerHost} = hb_http:get(PeerNode, <<"/~whois@1.0/node">>, #{}), + ?event({find_self_test, ReceivedPeerHost}), +``` + +--- + +*Generated from [dev_whois.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_whois.erl)* diff --git a/docs/book/src/hb.erl.md b/docs/book/src/hb.erl.md new file mode 100644 index 000000000..6c5b84d88 --- /dev/null +++ b/docs/book/src/hb.erl.md @@ -0,0 +1,423 @@ +# hb + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb.erl) + +Hyperbeam is a decentralized node implementing the AO-Core protocol +on top of Arweave. +This protocol offers a computation layer for executing arbitrary logic on +top of the network's data. +Arweave is built to offer a robust, permanent storage layer for static data +over time. It can be seen as a globally distributed key-value store that +allows users to lookup IDs to retrieve data at any point in time: + `Arweave(ID) => Message` +Hyperbeam adds another layer of functionality on top of Arweave's protocol: +Allowing users to store and retrieve not only arbitrary bytes, but also to +perform execution of computation upon that data: + `Hyperbeam(Message1, Message2) => Message3` +When Hyperbeam executes a message, it will return a new message containing +the result of that execution, as well as signed commitments of its +correctness. If the computation that is executed is deterministic, recipients +of the new message are able to verify that the computation was performed +correctly. The new message may be stored back to Arweave if desired, +forming a permanent, verifiable, and decentralized log of computation. +The mechanisms described above form the basis of a decentralized and +verifiable compute engine without any relevant protocol-enforced +scalability limits. It is an implementation of a global, shared +supercomputer. +Hyperbeam can be used for an extremely large variety of applications, from +serving static Arweave data with signed commitments of correctness, to +executing smart contracts that have _built-in_ HTTP APIs. The Hyperbeam +node implementation implements AO, an Actor-Oriented process-based +environment for orchestrating computation over Arweave messages in order to +facilitate the execution of more traditional, consensus-based smart +contracts. +The core abstractions of the Hyperbeam node are broadly as follows: +1. The `hb` and `hb_opts` modules manage the node's configuration, + environment variables, and debugging tools. +2. The `hb_http` and `hb_http_server` modules manage all HTTP-related + functionality. `hb_http_server` handles turning received HTTP requests + into messages and applying those messages with the appropriate devices. + `hb_http` handles making requests and responding with messages. `cowboy` + is used to implement the underlying HTTP server. +3. `hb_ao` implements the computation logic of the node: A mechanism + for resolving messages to other messages, via the application of logic + implemented in `devices`. `hb_ao` also manages the loading of Erlang + modules for each device into the node's environment. There are many + different default devices implemented in the hyperbeam node, using the + namespace `dev_*`. Some of the critical components are: + - `dev_message`: The default handler for all messages that do not + specify their own device. The message device is also used to resolve + keys that are not implemented by the device specified in a message, + unless otherwise signalled. + - `dev_stack`: The device responsible for creating and executing stacks + of other devices on messages that request it. There are many uses for + this device, one of which is the resolution of AO processes. + - `dev_p4`: The device responsible for managing payments for the services + provided by the node. +4. `hb_store`, `hb_cache` and the store implementations forms a layered + system for managing the node's access to persistent storage. `hb_cache` + is used as a resolution mechanism for reading and writing messages, while + `hb_store` provides an abstraction over the underlying persistent key-value + byte storage mechanisms. Example `hb_store` mechanisms can be found in + `hb_store_fs` and `hb_store_remote_node`. +5. `ar_*` modules implement functionality related to the base-layer Arweave + protocol and are largely unchanged from their counterparts in the Arweave + node codebase presently maintained by the Digital History Association + (@dha-team/Arweave). +You can find documentation of a similar form to this note in each of the core +modules of the hyperbeam node. + +--- + +## Exported Functions + +- `address/0` +- `build/0` +- `debug_wait/4` +- `deploy_scripts/0` +- `init/0` +- `no_prod/3` +- `now/0` +- `read/1` +- `read/2` +- `start_mainnet/0` +- `start_mainnet/1` +- `start_simple_pay/0` +- `start_simple_pay/1` +- `start_simple_pay/2` +- `topup/3` +- `topup/4` +- `wallet/0` +- `wallet/1` + +--- + +### init + +Hyperbeam is a decentralized node implementing the AO-Core protocol +Initialize system-wide settings for the hyperbeam node. + +```erlang +init() -> + hb_name:start(), + ?event({setting_debug_stack_depth, hb_opts:get(debug_stack_depth)}), + Old = erlang:system_flag(backtrace_depth, hb_opts:get(debug_stack_depth)), + ?event({old_system_stack_depth, Old}), + ok. +``` + +### start_mainnet + +Start a mainnet server without payments. + +```erlang +start_mainnet() -> + start_mainnet(hb_opts:get(port)). +``` + +### start_mainnet + +```erlang +start_mainnet(Port) when is_integer(Port) -> + start_mainnet(#{ port => Port }); +``` + +### start_mainnet + +Start a server with a `simple-pay@1.0` pre-processor. + +```erlang +start_mainnet(Opts) -> + application:ensure_all_started([ + kernel, + stdlib, + inets, + ssl, + ranch, + cowboy, + gun, + os_mon + ]), + Wallet = hb:wallet(hb_opts:get(priv_key_location, no_viable_wallet_path, Opts)), + BaseOpts = hb_http_server:set_default_opts(Opts), + hb_http_server:start_node( + FinalOpts = + BaseOpts#{ + store => #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-mainnet">> }, + priv_wallet => Wallet + } + ), + Address = + case hb_opts:get(address, no_address, FinalOpts) of + no_address -> <<"[ !!! no-address !!! ]">>; + Addr -> Addr + end, + io:format( + "Started mainnet node at http://localhost:~p~n" + "Operator: ~s~n", + [hb_maps:get(port, Opts, undefined, Opts), Address] + ), + <<"http://localhost:", (integer_to_binary(hb_maps:get(port, Opts, undefined, Opts)))/binary>>. +``` + +### start_simple_pay + +Start a server with a `simple-pay@1.0` pre-processor. + +```erlang +start_simple_pay() -> + start_simple_pay(address()). +``` + +### start_simple_pay + +```erlang +start_simple_pay(Addr) -> + rand:seed(default), + start_simple_pay(Addr, 10000 + rand:uniform(50000)). +``` + +### start_simple_pay + +```erlang +start_simple_pay(Addr, Port) -> + do_start_simple_pay(#{ port => Port, operator => Addr }). +``` + +### do_start_simple_pay + +Upload all scripts from the `scripts` directory to the node to Arweave, + +```erlang +do_start_simple_pay(Opts) -> + application:ensure_all_started([ + kernel, + stdlib, + inets, + ssl, + ranch, + cowboy, + gun, + os_mon + ]), + Port = hb_maps:get(port, Opts, undefined, Opts), + Processor = + #{ + <<"device">> => <<"p4@1.0">>, + <<"ledger-device">> => <<"simple-pay@1.0">>, + <<"pricing-device">> => <<"simple-pay@1.0">> + }, + hb_http_server:start_node( + Opts#{ + on => #{ + <<"request">> => Processor, + <<"response">> => Processor + } + } + ), + io:format( + "Started simple-pay node at http://localhost:~p~n" + "Operator: ~s~n", + [Port, address()] + ), + <<"http://localhost:", (integer_to_binary(Port))/binary>>. +``` + +### deploy_scripts + +Upload all scripts from the `scripts` directory to the node to Arweave, + +```erlang +deploy_scripts() -> + deploy_scripts("scripts/"). +``` + +### deploy_scripts + +Upload all scripts from the `scripts` directory to the node to Arweave, + +```erlang +deploy_scripts(Dir) -> + Files = filelib:wildcard(Dir ++ "*.lua"), + lists:foreach(fun(File) -> + {ok, Script} = file:read_file(File), + Msg = + hb_message:commit( + #{ + <<"data-protocol">> => <<"ao">>, + <<"variant">> => <<"ao.N.1">>, + <<"type">> => <<"module">>, + <<"content-type">> => <<"application/lua">>, + <<"name">> => hb_util:bin(File), + <<"body">> => Script + }, + wallet(), + <<"ans104@1.0">> + ), + {Status, _} = hb_client:upload(Msg, #{}, <<"ans104@1.0">>), + io:format( + "~s: ~s (upload status: ~p)~n", + [File, hb_util:id(Msg), Status] + ) + end, Files), + ok. +``` + +### topup + +Helper for topping up a user's balance on a simple-pay node. + +```erlang +topup(Node, Amount, Recipient) -> + topup(Node, Amount, Recipient, wallet()). +``` + +### topup + +```erlang +topup(Node, Amount, Recipient, Wallet) -> + Message = hb_message:commit( + #{ + <<"path">> => <<"/~simple-pay@1.0/topup">>, + <<"amount">> => Amount, + <<"recipient">> => Recipient + }, + Wallet + ), + hb_http:get(Node, Message, #{}). +``` + +### wallet + +```erlang +wallet() -> + wallet(hb_opts:get(priv_key_location)). +``` + +### wallet + +```erlang +wallet(Location) -> + wallet(Location, #{}). +``` + +### wallet + +```erlang +wallet(Location, Opts) -> + case file:read_file_info(Location) of + {ok, _} -> + ar_wallet:load_keyfile(Location, Opts); + {error, _} -> + Res = ar_wallet:new_keyfile(?DEFAULT_KEY_TYPE, Location), + ?event({created_new_keyfile, Location, address(Res)}), + Res + end. +``` + +### address + +Get the address of a wallet. Defaults to the address of the wallet + +```erlang +address() -> address(wallet()). +``` + +### address + +Get the address of a wallet. Defaults to the address of the wallet + +```erlang +address(Wallet) when is_tuple(Wallet) -> + hb_util:encode(ar_wallet:to_address(Wallet)); +``` + +### address + +Get the address of a wallet. Defaults to the address of the wallet +Debugging function to read a message from the cache. + +```erlang +address(Location) -> address(wallet(Location)). +``` + +### read + +Get the address of a wallet. Defaults to the address of the wallet +Debugging function to read a message from the cache. + +```erlang +read(ID) -> read(ID, local). +``` + +### read + +Get the address of a wallet. Defaults to the address of the wallet +Debugging function to read a message from the cache. + +```erlang +read(ID, ScopeAtom) when is_atom(ScopeAtom) -> + read(ID, hb_store:scope(hb_opts:get(store), ScopeAtom)); +``` + +### read + +Get the address of a wallet. Defaults to the address of the wallet +Debugging function to read a message from the cache. + +```erlang +read(ID, Store) -> + hb_cache:read(Store, hb_util:id(ID)). +``` + +### no_prod + +Utility function to throw an error if the current mode is prod and + +```erlang +no_prod(X, Mod, Line) -> + case hb_opts:get(mode) of + prod -> + io:format(standard_error, + "=== DANGER: NON-PROD READY CODE INVOKED IN PROD ===~n", []), + io:format(standard_error, "~w:~w: ~p~n", [Mod, Line, X]), + case hb_opts:get(exit_on_no_prod) of + true -> init:stop(); + false -> throw(X) + end; + _ -> X + end. +``` + +### now + +Utility function to get the current time in milliseconds. + +```erlang +now() -> + erlang:system_time(millisecond). +``` + +### build + +Utility function to hot-recompile and load the hyperbeam environment. +Utility function to wait for a given amount of time, printing a debug + +```erlang +build() -> + r3:do(compile, [{dir, "src"}]). +``` + +### debug_wait + +Utility function to hot-recompile and load the hyperbeam environment. +Utility function to wait for a given amount of time, printing a debug + +```erlang +debug_wait(T, Mod, Func, Line) -> + ?event(wait, {debug_wait, {T, Mod, Func, Line}}), +``` + +--- + +*Generated from [hb.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb.erl)* diff --git a/docs/book/src/hb_ao.erl.md b/docs/book/src/hb_ao.erl.md new file mode 100644 index 000000000..8aeb61f6e --- /dev/null +++ b/docs/book/src/hb_ao.erl.md @@ -0,0 +1,1509 @@ +# hb_ao + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_ao.erl) + +This module is the root of the device call logic of the +AO-Core protocol in HyperBEAM. +At the implementation level, every message is simply a collection of keys, +dictated by its `Device`, that can be resolved in order to yield their +values. Each key may contain a link to another message or a raw value: + `ao(BaseMessage, RequestMessage) -> {Status, Result}` +Under-the-hood, `AO-Core(BaseMessage, RequestMessage)` leads to a lookup of +the `device` key of the base message, followed by the evaluation of +`DeviceMod:PathPart(BaseMessage, RequestMessage)`, which defines the user +compute to be performed. If `BaseMessage` does not specify a device, +`~message@1.0` is assumed. The key to resolve is specified by the `path` +field of the message. +After each output, the `HashPath` is updated to include the `RequestMessage` +that was executed upon it. +Because each message implies a device that can resolve its keys, as well +as generating a merkle tree of the computation that led to the result, +you can see the AO-Core protocol as a system for cryptographically chaining +the execution of `combinators`. See `docs/ao-core-protocol.md` for more +information about AO-Core. +The `key(BaseMessage, RequestMessage)` pattern is repeated throughout the +HyperBEAM codebase, sometimes with `BaseMessage` replaced with `Msg1`, `M1` +or similar, and `RequestMessage` replaced with `Msg2`, `M2`, etc. +The result of any computation can be either a new message or a raw literal +value (a binary, integer, float, atom, or list of such values). +Devices can be expressed as either modules or maps. They can also be +referenced by an Arweave ID, which can be used to load a device from +the network (depending on the value of the `load_remote_devices` and +`trusted_device_signers` environment settings). +HyperBEAM device implementations are defined as follows: +
+    DevMod:ExportedFunc : Key resolution functions. All are assumed to be
+                          device keys (thus, present in every message that
+                          uses it) unless specified by `DevMod:info()`.
+                          Each function takes a set of parameters
+                          of the form `DevMod:KeyHandler(Msg1, Msg2, Opts)`.
+                          Each of these arguments can be ommitted if not
+                          needed. Non-exported functions are not assumed
+                          to be device keys.
+    DevMod:info : Optional. Returns a map of options for the device. All 
+                  options are optional and assumed to be the defaults if 
+                  not specified. This function can accept a `Message1` as 
+                  an argument, allowing it to specify its functionality 
+                  based on a specific message if appropriate.
+    info/exports : Overrides the export list of the Erlang module, such that
+                  only the functions in this list are assumed to be device
+                  keys. Defaults to all of the functions that DevMod 
+                  exports in the Erlang environment.
+    info/excludes : A list of keys that should not be resolved by the device,
+                    despite being present in the Erlang module exports list.
+    info/handler : A function that should be used to handle _all_ keys for 
+                   messages using the device.
+    info/default : A function that should be used to handle all keys that
+                   are not explicitly implemented by the device. Defaults to
+                   the `dev_message` device, which contains general keys for 
+                   interacting with messages.
+    info/default_mod : A different device module that should be used to
+                   handle all keys that are not explicitly implemented
+                   by the device. Defaults to the `dev_message` device.
+    info/grouper : A function that returns the concurrency 'group' name for
+                   an execution. Executions with the same group name will
+                   be executed by sending a message to the associated process
+                   and waiting for a response. This allows you to control 
+                   concurrency of execution and to allow executions to share
+                   in-memory state as applicable. Default: A derivation of
+                   Msg1+Msg2. This means that concurrent calls for the same
+                   output will lead to only a single execution.
+    info/worker : A function that should be run as the 'server' loop of
+                  the executor for interactions using the device.
+The HyperBEAM resolver also takes a number of runtime options that change
+the way that the environment operates:
+`update_hashpath`:  Whether to add the `Msg2` to `HashPath` for the `Msg3`.
+					Default: true.
+`add_key`:          Whether to add the key to the start of the arguments.
+					Default: ``.
+
+ +--- + +## Exported Functions + +- `deep_set/4` +- `find_exported_function/5` +- `force_message/2` +- `get_first/2` +- `get_first/3` +- `get/2` +- `get/3` +- `get/4` +- `info/2` +- `is_exported/4` +- `keys/1` +- `keys/2` +- `keys/3` +- `load_device/2` +- `message_to_device/2` +- `message_to_fun/3` +- `normalize_key/1` +- `normalize_key/2` +- `normalize_keys/1` +- `normalize_keys/2` +- `remove/2` +- `remove/3` +- `resolve_many/2` +- `resolve/2` +- `resolve/3` +- `set/3` +- `set/4` +- `truncate_args/2` + +--- + +### resolve + +This module is the root of the device call logic of the +Get the value of a message's key by running its associated device + +```erlang +resolve(Path, Opts) when is_binary(Path) -> + resolve(#{ <<"path">> => Path }, Opts); +``` + +### resolve + +This module is the root of the device call logic of the +Get the value of a message's key by running its associated device + +```erlang +resolve(SingletonMsg, _Opts) + when is_map(SingletonMsg), not is_map_key(<<"path">>, SingletonMsg) -> + {error, <<"Attempted to resolve a message without a path.">>}; +``` + +### resolve + +This module is the root of the device call logic of the +Get the value of a message's key by running its associated device + +```erlang +resolve(SingletonMsg, Opts) -> + resolve_many(hb_singleton:from(SingletonMsg, Opts), Opts). +``` + +### resolve + +```erlang +resolve(Msg1, Path, Opts) when not is_map(Path) -> + resolve(Msg1, #{ <<"path">> => Path }, Opts); +``` + +### resolve + +```erlang +resolve(Msg1, Msg2, Opts) -> + PathParts = hb_path:from_message(request, Msg2, Opts), + ?event(ao_core, {stage, 1, prepare_multimessage_resolution, {path_parts, PathParts}}), + MessagesToExec = [ Msg2#{ <<"path">> => Path } || Path <- PathParts ], + ?event(ao_core, {stage, 1, prepare_multimessage_resolution, {messages_to_exec, MessagesToExec}}), + resolve_many([Msg1 | MessagesToExec], Opts). +``` + +### resolve_many + +Resolve a list of messages in sequence. Take the output of the first + +```erlang +resolve_many([ID], Opts) when ?IS_ID(ID) -> + % Note: This case is necessary to place specifically here for two reasons: + % 1. It is not in `do_resolve_many' because we need to handle the case + % where a result from a prior invocation is an ID itself. We should not + % attempt to resolve such IDs further. +``` + +### resolve_many + +```erlang +resolve_many(ListMsg, Opts) when is_map(ListMsg) -> + % We have been given a message rather than a list of messages, so we should + % convert it to a list, assuming that the message is monotonically numbered. +``` + +### resolve_many + +```erlang +resolve_many({as, DevID, Msg}, Opts) -> + subresolve(#{}, DevID, Msg, Opts); +``` + +### resolve_many + +```erlang +resolve_many([{resolve, Subres}], Opts) -> + resolve_many(Subres, Opts); +``` + +### resolve_many + +```erlang +resolve_many(MsgList, Opts) -> + ?event(ao_core, {resolve_many, MsgList}, Opts), + Res = do_resolve_many(MsgList, Opts), + ?event(ao_core, {resolve_many_complete, {res, Res}, {req, MsgList}}, Opts), + Res. +``` + +### do_resolve_many + +```erlang +do_resolve_many([], _Opts) -> + {failure, <<"Attempted to resolve an empty message sequence.">>}; +``` + +### do_resolve_many + +```erlang +do_resolve_many([Msg3], Opts) -> + ?event(ao_core, {stage, 11, resolve_complete, Msg3}), + {ok, hb_cache:ensure_loaded(Msg3, Opts)}; +``` + +### do_resolve_many + +```erlang +do_resolve_many([Msg1, Msg2 | MsgList], Opts) -> + ?event(ao_core, {stage, 0, resolve_many, {msg1, Msg1}, {msg2, Msg2}}), + case resolve_stage(1, Msg1, Msg2, Opts) of + {ok, Msg3} -> + ?event(ao_core, + { + stage, + 13, + resolved_step, + {msg3, Msg3}, + {opts, Opts} + }, + Opts + ), + do_resolve_many([Msg3 | MsgList], Opts); + Res -> + % The result is not a resolvable message. Return it. +``` + +### resolve_stage + +```erlang +resolve_stage(1, Link, Msg2, Opts) when ?IS_LINK(Link) -> + % If the first message is a link, we should load the message and + % continue with the resolution. +``` + +### resolve_stage + +```erlang +resolve_stage(1, Msg1, Link, Opts) when ?IS_LINK(Link) -> + % If the second message is a link, we should load the message and + % continue with the resolution. +``` + +### resolve_stage + +```erlang +resolve_stage(1, {as, DevID, Ref}, Msg2, Opts) when ?IS_ID(Ref) orelse ?IS_LINK(Ref) -> + % Normalize `as' requests with a raw ID or link as the path. Links will be + % loaded in following stages. +``` + +### resolve_stage + +```erlang +resolve_stage(1, {as, DevID, Link}, Msg2, Opts) when ?IS_LINK(Link) -> + % If the first message is an `as' with a link, we should load the message and + % continue with the resolution. +``` + +### resolve_stage + +```erlang +resolve_stage(1, {as, DevID, Raw = #{ <<"path">> := ID }}, Msg2, Opts) when ?IS_ID(ID) -> + % If the first message is an `as' with an ID, we should load the message and + % apply the non-path elements of the sub-request to it. +``` + +### resolve_stage + +```erlang +resolve_stage(1, Raw = {as, DevID, SubReq}, Msg2, Opts) -> + % Set the device of the message to the specified one and resolve the sub-path. +``` + +### resolve_stage + +```erlang +resolve_stage(1, RawMsg1, Msg2Outer = #{ <<"path">> := {as, DevID, Msg2Inner} }, Opts) -> + % Set the device to the specified `DevID' and resolve the message. Merging + % the `Msg2Inner' into the `Msg2Outer' message first. We return the result + % of the sub-resolution directly. +``` + +### resolve_stage + +```erlang +resolve_stage(1, {resolve, Subres}, Msg2, Opts) -> + % If the first message is a `{resolve, Subres}' tuple, we should execute it + % directly, then apply the request to the result. +``` + +### resolve_stage + +```erlang +resolve_stage(1, Msg1, {resolve, Subres}, Opts) -> + % If the second message is a `{resolve, Subresolution}' tuple, we should + % execute the subresolution directly to gain the underlying `Msg2' for + % our execution. We assume that the subresolution is already in a normalized, + % executable form, so we pass it to `resolve_many' for execution. +``` + +### resolve_stage + +```erlang +resolve_stage(1, Msg1, Msg2, Opts) when is_list(Msg1) -> + % Normalize lists to numbered maps (base=1) if necessary. +``` + +### resolve_stage + +```erlang +resolve_stage(1, Msg1, NonMapMsg2, Opts) when not is_map(NonMapMsg2) -> + ?event(ao_core, {stage, 1, path_normalize}), + resolve_stage(1, Msg1, #{ <<"path">> => NonMapMsg2 }, Opts); +``` + +### resolve_stage + +```erlang +resolve_stage(1, RawMsg1, RawMsg2, Opts) -> + % Normalize the path to a private key containing the list of remaining + % keys to resolve. +``` + +### resolve_stage + +```erlang +resolve_stage(2, Msg1, Msg2, Opts) -> + ?event(ao_core, {stage, 2, cache_lookup}, Opts), + % Lookup request in the cache. If we find a result, return it. +``` + +### resolve_stage + +```erlang +resolve_stage(3, Msg1, Msg2, Opts) when not is_map(Msg1) or not is_map(Msg2) -> + % Validation check: If the messages are not maps, we cannot find a key + % in them, so return not_found. +``` + +### resolve_stage + +```erlang +resolve_stage(3, Msg1, Msg2, Opts) -> + ?event(ao_core, {stage, 3, validation_check}, Opts), + % Validation check: Check if the message is valid. +``` + +### resolve_stage + +```erlang +resolve_stage(4, Msg1, Msg2, Opts) -> + ?event(ao_core, {stage, 4, persistent_resolver_lookup}, Opts), + % Persistent-resolver lookup: Search for local (or Distributed + % Erlang cluster) processes that are already performing the execution. +``` + +### resolve_stage + +```erlang +resolve_stage(5, Msg1, Msg2, ExecName, Opts) -> + ?event(ao_core, {stage, 5, device_lookup}, Opts), + % Device lookup: Find the Erlang function that should be utilized to + % execute Msg2 on Msg1. +``` + +### resolve_stage + +```erlang +resolve_stage(6, Func, Msg1, Msg2, ExecName, Opts) -> + ?event(ao_core, {stage, 6, ExecName, execution}, Opts), + % Execution. +``` + +### resolve_stage + +```erlang +resolve_stage(7, Msg1, Msg2, {St, Res}, ExecName, Opts = #{ on := On = #{ <<"step">> := _ }}) -> + ?event(ao_core, {stage, 7, ExecName, executing_step_hook, {on, On}}, Opts), + % If the `step' hook is defined, we execute it. Note: This function clause + % matches directly on the `on' key of the `Opts' map. This is in order to + % remove the expensive lookup check that would otherwise be performed on every + % execution. +``` + +### resolve_stage + +```erlang +resolve_stage(7, Msg1, Msg2, Res, ExecName, Opts) -> + ?event(ao_core, {stage, 7, ExecName, no_step_hook}, Opts), + resolve_stage(8, Msg1, Msg2, Res, ExecName, Opts); +``` + +### resolve_stage + +```erlang +resolve_stage(8, Msg1, Msg2, {ok, {resolve, Sublist}}, ExecName, Opts) -> + ?event(ao_core, {stage, 8, ExecName, subresolve_result}, Opts), + % If the result is a `{resolve, Sublist}' tuple, we need to execute it + % as a sub-resolution. +``` + +### resolve_stage + +```erlang +resolve_stage(8, Msg1, Msg2, Res, ExecName, Opts) -> + ?event(ao_core, {stage, 8, ExecName, no_subresolution_necessary}, Opts), + resolve_stage(9, Msg1, Msg2, Res, ExecName, Opts); +``` + +### resolve_stage + +```erlang +resolve_stage(9, Msg1, Msg2, {ok, Msg3}, ExecName, Opts) when is_map(Msg3) -> + ?event(ao_core, {stage, 9, ExecName, generate_hashpath}, Opts), + % Cryptographic linking. Now that we have generated the result, we + % need to cryptographically link the output to its input via a hashpath. +``` + +### resolve_stage + +```erlang +resolve_stage(9, Msg1, Msg2, {Status, Msg3}, ExecName, Opts) when is_map(Msg3) -> + ?event(ao_core, {stage, 9, ExecName, abnormal_status_reset_hashpath}, Opts), + ?event(hashpath, {resetting_hashpath_msg3, {msg1, Msg1}, {msg2, Msg2}, {opts, Opts}}), + % Skip cryptographic linking and reset the hashpath if the result is abnormal. +``` + +### resolve_stage + +```erlang +resolve_stage(9, Msg1, Msg2, Res, ExecName, Opts) -> + ?event(ao_core, {stage, 9, ExecName, non_map_result_skipping_hash_path}, Opts), + % Skip cryptographic linking and continue if we don't have a map that can have + % a hashpath at all. +``` + +### resolve_stage + +```erlang +resolve_stage(10, Msg1, Msg2, {ok, Msg3}, ExecName, Opts) -> + ?event(ao_core, {stage, 10, ExecName, result_caching}, Opts), + % Result caching: Optionally, cache the result of the computation locally. +``` + +### resolve_stage + +```erlang +resolve_stage(10, Msg1, Msg2, Res, ExecName, Opts) -> + ?event(ao_core, {stage, 10, ExecName, abnormal_status_skip_caching}, Opts), + % Skip result caching if the result is abnormal. +``` + +### resolve_stage + +```erlang +resolve_stage(11, Msg1, Msg2, Res, ExecName, Opts) -> + ?event(ao_core, {stage, 11, ExecName}, Opts), + % Notify processes that requested the resolution while we were executing and + % unregister ourselves from the group. +``` + +### resolve_stage + +```erlang +resolve_stage(12, _Msg1, _Msg2, {ok, Msg3} = Res, ExecName, Opts) -> + ?event(ao_core, {stage, 12, ExecName, maybe_spawn_worker}, Opts), + % Check if we should spawn a worker for the current execution + case {is_map(Msg3), hb_opts:get(spawn_worker, false, Opts#{ prefer => local })} of + {A, B} when (A == false) or (B == false) -> + Res; + {_, _} -> + % Spawn a worker for the current execution + WorkerPID = hb_persistent:start_worker(ExecName, Msg3, Opts), + hb_persistent:forward_work(WorkerPID, Opts), + Res + end; +``` + +### resolve_stage + +```erlang +resolve_stage(12, _Msg1, _Msg2, OtherRes, ExecName, Opts) -> + ?event(ao_core, {stage, 12, ExecName, abnormal_status_skip_spawning}, Opts), + OtherRes. +``` + +### subresolve + +Execute a sub-resolution. + +```erlang +subresolve(RawMsg1, DevID, ReqPath, Opts) when is_binary(ReqPath) -> + % If the request is a binary, we assume that it is a path. +``` + +### subresolve + +```erlang +subresolve(RawMsg1, DevID, Req, Opts) -> + % First, ensure that the message is loaded from the cache. +``` + +### maybe_profiled_apply + +If the `AO_PROFILING` macro is defined (set by building/launching with + +```erlang +maybe_profiled_apply(Func, Args, _Msg1, _Msg2, _Opts) -> + apply(Func, Args). +``` + +### maybe_profiled_apply + +```erlang +maybe_profiled_apply(Func, Args, Msg1, Msg2, Opts) -> + CallStack = erlang:get(ao_stack), + ?event(ao_trace, + {profiling_apply, + {func, Func}, + {args, Args}, + {call_stack, CallStack} + } + ), + Key = + case hb_maps:get(<<"device">>, Msg1, undefined, Opts) of + undefined -> + hb_util:bin(erlang:fun_to_list(Func)); + Device -> + case hb_maps:get(<<"path">>, Msg2, undefined, Opts) of + undefined -> + hb_util:bin(erlang:fun_to_list(Func)); + Path -> + MethodStr = + case hb_maps:get(<<"method">>, Msg2, undefined, Opts) of + undefined -> <<"">>; + <<"GET">> -> <<"">>; + Method -> <<"<", Method/binary, ">">> + end, + << + (hb_util:bin(Device))/binary, + "/", + MethodStr/binary, + (hb_util:bin(Path))/binary + >> + end + end, + put( + ao_stack, + case CallStack of + undefined -> [Key]; + Stack -> [Key | Stack] + end + ), + {ExecMicroSecs, Res} = timer:tc(fun() -> apply(Func, Args) end), + put(ao_stack, CallStack), + hb_event:increment(<<"ao-call-counts">>, Key, Opts), + hb_event:increment(<<"ao-total-durations">>, Key, Opts, ExecMicroSecs), + case CallStack of + undefined -> ok; + [Caller|_] -> + hb_event:increment( + <<"ao-callers:", Key/binary>>, + hb_util:bin( + [ + <<"duration:">>, + Caller + ] + ), + Opts, + ExecMicroSecs + ), + hb_event:increment( + <<"ao-callers:", Key/binary>>, + hb_util:bin( + [ + <<"calls:">>, + Caller + ]), + Opts + ) + end, + Res. +``` + +### ensure_message_loaded + +Ensure that a message is loaded from the cache if it is an ID, or + +```erlang +ensure_message_loaded(MsgID, Opts) when ?IS_ID(MsgID) -> + case hb_cache:read(MsgID, Opts) of + {ok, LoadedMsg} -> + LoadedMsg; + not_found -> + throw({necessary_message_not_found, <<"/">>, MsgID}) + end; +``` + +### ensure_message_loaded + +Ensure that a message is loaded from the cache if it is an ID, or + +```erlang +ensure_message_loaded(MsgLink, Opts) when ?IS_LINK(MsgLink) -> + hb_cache:ensure_loaded(MsgLink, Opts); +``` + +### ensure_message_loaded + +Ensure that a message is loaded from the cache if it is an ID, or + +```erlang +ensure_message_loaded(Msg, _Opts) -> + Msg. +``` + +### error_invalid_message + +Catch all return if the message is invalid. + +```erlang +error_invalid_message(Msg1, Msg2, Opts) -> + ?event( + ao_core, + {error, {type, invalid_message}, + {msg1, Msg1}, + {msg2, Msg2}, + {opts, Opts} + }, + Opts + ), + { + error, + #{ + <<"status">> => 400, + <<"body">> => <<"Request contains non-verifiable message.">> + } + }. +``` + +### error_infinite + +Catch all return if we are in an infinite loop. + +```erlang +error_infinite(Msg1, Msg2, Opts) -> + ?event( + ao_core, + {error, {type, infinite_recursion}, + {msg1, Msg1}, + {msg2, Msg2}, + {opts, Opts} + }, + Opts + ), + ?trace(), + { + error, + #{ + <<"status">> => 508, + <<"body">> => <<"Request creates infinite recursion.">> + } + }. +``` + +### error_invalid_intermediate_status + +```erlang +error_invalid_intermediate_status(Msg1, Msg2, Msg3, RemainingPath, Opts) -> + ?event( + ao_core, + {error, {type, invalid_intermediate_status}, + {msg2, Msg2}, + {msg3, Msg3}, + {remaining_path, RemainingPath}, + {opts, Opts} + }, + Opts + ), + ?event(ao_result, + {intermediate_failure, {msg1, Msg1}, + {msg2, Msg2}, {msg3, Msg3}, + {remaining_path, RemainingPath}, {opts, Opts}}), + { + error, + #{ + <<"status">> => 422, + <<"body">> => Msg3, + <<"key">> => hb_maps:get(<<"path">>, Msg2, <<"Key unknown.">>, Opts), + <<"remaining-path">> => RemainingPath + } + }. +``` + +### error_execution + +Handle an error in a device call. + +```erlang +error_execution(ExecGroup, Msg2, Whence, {Class, Exception, Stacktrace}, Opts) -> + Error = {error, Whence, {Class, Exception, Stacktrace}}, + hb_persistent:unregister_notify(ExecGroup, Msg2, Error, Opts), + ?event(ao_core, {handle_error, Error, {opts, Opts}}, Opts), + case hb_opts:get(error_strategy, throw, Opts) of + throw -> erlang:raise(Class, Exception, Stacktrace); + _ -> Error + end. +``` + +### maybe_force_message + +Force the result of a device call into a message if the result is not + +```erlang +maybe_force_message({Status, Res}, Opts) -> + case hb_opts:get(force_message, false, Opts) of + true -> force_message({Status, Res}, Opts); + false -> {Status, Res} + end; +``` + +### maybe_force_message + +Force the result of a device call into a message if the result is not + +```erlang +maybe_force_message(Res, Opts) -> + maybe_force_message({ok, Res}, Opts). +``` + +### force_message + +```erlang +force_message({Status, Res}, Opts) when is_list(Res) -> + force_message({Status, normalize_keys(Res, Opts)}, Opts); +``` + +### force_message + +```erlang +force_message({Status, Subres = {resolve, _}}, _Opts) -> + {Status, Subres}; +``` + +### force_message + +```erlang +force_message({Status, Literal}, _Opts) when not is_map(Literal) -> + ?event({force_message_from_literal, Literal}), + {Status, #{ <<"ao-result">> => <<"body">>, <<"body">> => Literal }}; +``` + +### force_message + +```erlang +force_message({Status, M = #{ <<"status">> := Status, <<"body">> := Body }}, _Opts) + when map_size(M) == 2 -> + ?event({force_message_from_literal_with_status, M}), + {Status, #{ + <<"status">> => Status, + <<"ao-result">> => <<"body">>, + <<"body">> => Body + }}; +``` + +### force_message + +```erlang +force_message({Status, Map}, _Opts) -> + ?event({force_message_from_map, Map}), + {Status, Map}. +``` + +### get + +Shortcut for resolving a key in a message without its status if it is + +```erlang +get(Path, Msg) -> + get(Path, Msg, #{}). +``` + +### get + +```erlang +get(Path, Msg, Opts) -> + get(Path, Msg, not_found, Opts). +``` + +### get + +```erlang +get(Path, {as, Device, Msg}, Default, Opts) -> + get( + Path, + set( + Msg, + #{ <<"device">> => Device }, + internal_opts(Opts) + ), + Default, + Opts + ); +``` + +### get + +```erlang +get(Path, Msg, Default, Opts) -> + case resolve(Msg, #{ <<"path">> => Path }, Opts#{ spawn_worker => false }) of + {ok, Value} -> Value; + {error, _} -> Default + end. +``` + +### get_first + +take a sequence of base messages and paths, then return the value of the + +```erlang +get_first(Paths, Opts) -> get_first(Paths, not_found, Opts). +``` + +### get_first + +take a sequence of base messages and paths, then return the value of the + +```erlang +get_first([], Default, _Opts) -> Default; +``` + +### get_first + +take a sequence of base messages and paths, then return the value of the + +```erlang +get_first([{Base, Path}|Msgs], Default, Opts) -> + case get(Path, Base, Opts) of + not_found -> get_first(Msgs, Default, Opts); + Value -> Value + end. +``` + +### keys + +Shortcut to get the list of keys from a message. + +```erlang +keys(Msg) -> keys(Msg, #{}). +``` + +### keys + +Shortcut to get the list of keys from a message. + +```erlang +keys(Msg, Opts) -> keys(Msg, Opts, keep). +``` + +### keys + +Shortcut to get the list of keys from a message. + +```erlang +keys(Msg, Opts, keep) -> + % There is quite a lot of AO-Core-specific machinery here. We: + % 1. `get' the keys from the message, via AO-Core in order to trigger the + % `keys' function on its device. +``` + +### keys + +```erlang +keys(Msg, Opts, remove) -> + lists:filter( + fun(Key) -> not lists:member(Key, ?AO_CORE_KEYS) end, + keys(Msg, Opts, keep) + ). +``` + +### set + +Shortcut for setting a key in the message using its underlying device. + +```erlang +set(RawMsg1, RawMsg2, Opts) when is_map(RawMsg2) -> + Msg1 = normalize_keys(RawMsg1, Opts), + Msg2 = hb_maps:without([<<"hashpath">>, <<"priv">>], normalize_keys(RawMsg2, Opts), Opts), + ?event(ao_internal, {set_called, {msg1, Msg1}, {msg2, Msg2}}, Opts), + % Get the next key to set. +``` + +### set + +```erlang +set(Msg1, Key, Value, Opts) -> + % For an individual key, we run deep_set with the key as the path. +``` + +### deep_set + +Recursively search a map, resolving keys, and set the value of the key + +```erlang +deep_set(Msg, [], Value, Opts) when is_map(Msg) or is_list(Msg) -> + device_set(Msg, <<"/">>, Value, Opts); +``` + +### deep_set + +Recursively search a map, resolving keys, and set the value of the key + +```erlang +deep_set(_Msg, [], Value, _Opts) -> + Value; +``` + +### deep_set + +Recursively search a map, resolving keys, and set the value of the key + +```erlang +deep_set(Msg, [Key], Value, Opts) -> + device_set(Msg, Key, Value, Opts); +``` + +### deep_set + +Recursively search a map, resolving keys, and set the value of the key + +```erlang +deep_set(Msg, [Key|Rest], Value, Opts) -> + case resolve(Msg, Key, Opts) of + {ok, SubMsg} -> + ?event( + {traversing_deeper_to_set, + {current_key, Key}, + {current_value, SubMsg}, + {rest, Rest} + } + ), + Res = device_set(Msg, Key, deep_set(SubMsg, Rest, Value, Opts), <<"explicit">>, Opts), + ?event({deep_set_result, {msg, Msg}, {key, Key}, {res, Res}}), + Res; + _ -> + ?event( + {creating_new_map, + {current_key, Key}, + {rest, Rest} + } + ), + Msg#{ Key => deep_set(#{}, Rest, Value, Opts) } + end. +``` + +### device_set + +Call the device's `set` function. + +```erlang +device_set(Msg, Key, Value, Opts) -> + device_set(Msg, Key, Value, <<"deep">>, Opts). +``` + +### device_set + +Call the device's `set` function. + +```erlang +device_set(Msg, Key, Value, Mode, Opts) -> + ReqWithoutMode = + case Key of + <<"path">> -> + #{ <<"path">> => <<"set_path">>, <<"value">> => Value }; + <<"/">> when is_map(Value) -> + % The value is a map and it is to be `set' at the root of the + % message. Subsequently, we call the device's `set' function + % with all of the keys found in the message, leading it to be + % merged into the message. +``` + +### remove + +Remove a key from a message, using its underlying device. + +```erlang +remove(Msg, Key) -> remove(Msg, Key, #{}). +``` + +### remove + +Remove a key from a message, using its underlying device. + +```erlang +remove(Msg, Key, Opts) -> + hb_util:ok( + resolve( + Msg, + #{ <<"path">> => <<"remove">>, <<"item">> => Key }, + internal_opts(Opts) + ), + Opts + ). +``` + +### truncate_args + +Truncate the arguments of a function to the number of arguments it + +```erlang +truncate_args(Fun, Args) -> + {arity, Arity} = erlang:fun_info(Fun, arity), + lists:sublist(Args, Arity). +``` + +### message_to_fun + +Calculate the Erlang function that should be called to get a value for + +```erlang +message_to_fun(Msg, Key, Opts) -> + % Get the device module from the message. +``` + +### message_to_device + +Extract the device module from a message. + +```erlang +message_to_device(Msg, Opts) -> + case dev_message:get(<<"device">>, Msg, Opts) of + {error, not_found} -> + % The message does not specify a device, so we use the default device. +``` + +### info_handler_to_fun + +Parse a handler key given by a device's `info`. + +```erlang +info_handler_to_fun(Handler, _Msg, _Key, _Opts) when is_function(Handler) -> + {add_key, Handler}; +``` + +### info_handler_to_fun + +Parse a handler key given by a device's `info`. + +```erlang +info_handler_to_fun(HandlerMap, Msg, Key, Opts) -> + case hb_maps:find(excludes, HandlerMap, Opts) of + {ok, Exclude} -> + case lists:member(Key, Exclude) of + true -> + {ok, MsgWithoutDevice} = + dev_message:remove(Msg, #{ item => device }, Opts), + message_to_fun( + MsgWithoutDevice#{ <<"device">> => default_module() }, + Key, + Opts + ); + false -> {add_key, hb_maps:get(func, HandlerMap, undefined, Opts)} + end; + error -> {add_key, hb_maps:get(func, HandlerMap, undefined, Opts)} + end. +``` + +### find_exported_function + +Find the function with the highest arity that has the given name, if it + +```erlang +find_exported_function(Msg, Dev, Key, MaxArity, Opts) when is_map(Dev) -> + case hb_maps:get(normalize_key(Key), normalize_keys(Dev, Opts), not_found, Opts) of + not_found -> not_found; + Fun when is_function(Fun) -> + case erlang:fun_info(Fun, arity) of + {arity, Arity} when Arity =< MaxArity -> + case is_exported(Msg, Dev, Key, Opts) of + true -> {ok, Fun}; + false -> not_found + end; + _ -> not_found + end + end; +``` + +### find_exported_function + +Find the function with the highest arity that has the given name, if it + +```erlang +find_exported_function(_Msg, _Mod, _Key, Arity, _Opts) when Arity < 0 -> + not_found; +``` + +### find_exported_function + +Find the function with the highest arity that has the given name, if it + +```erlang +find_exported_function(Msg, Mod, Key, Arity, Opts) when not is_atom(Key) -> + try hb_util:key_to_atom(Key, false) of + KeyAtom -> find_exported_function(Msg, Mod, KeyAtom, Arity, Opts) + catch _:_ -> not_found + end; +``` + +### find_exported_function + +Find the function with the highest arity that has the given name, if it + +```erlang +find_exported_function(Msg, Mod, Key, Arity, Opts) -> + case erlang:function_exported(Mod, Key, Arity) of + true -> + case is_exported(Msg, Mod, Key, Opts) of + true -> {ok, fun Mod:Key/Arity}; + false -> not_found + end; + false -> + find_exported_function(Msg, Mod, Key, Arity - 1, Opts) + end. +``` + +### is_exported + +Check if a device is guarding a key via its `exports` list. Defaults to + +```erlang +is_exported(_Msg, _Dev, info, _Opts) -> true; +``` + +### is_exported + +Check if a device is guarding a key via its `exports` list. Defaults to + +```erlang +is_exported(Msg, Dev, Key, Opts) -> + is_exported(info(Dev, Msg, Opts), Key, Opts). +``` + +### is_exported + +```erlang +is_exported(_, info, _Opts) -> true; +``` + +### is_exported + +```erlang +is_exported(Info = #{ excludes := Excludes }, Key, Opts) -> + case lists:member(normalize_key(Key), lists:map(fun normalize_key/1, Excludes)) of + true -> false; + false -> is_exported(hb_maps:remove(excludes, Info, Opts), Key, Opts) + end; +``` + +### is_exported + +```erlang +is_exported(#{ exports := Exports }, Key, _Opts) -> + lists:member(normalize_key(Key), lists:map(fun normalize_key/1, Exports)); +``` + +### is_exported + +Convert a key to a binary in normalized form. + +```erlang +is_exported(_Info, _Key, _Opts) -> true. +``` + +### normalize_key + +Convert a key to a binary in normalized form. + +```erlang +normalize_key(Key) -> normalize_key(Key, #{}). +``` + +### normalize_key + +Convert a key to a binary in normalized form. + +```erlang +normalize_key(Key, _Opts) when is_binary(Key) -> Key; +``` + +### normalize_key + +Convert a key to a binary in normalized form. + +```erlang +normalize_key(Key, _Opts) when is_atom(Key) -> atom_to_binary(Key); +``` + +### normalize_key + +Convert a key to a binary in normalized form. + +```erlang +normalize_key(Key, _Opts) when is_integer(Key) -> integer_to_binary(Key); +``` + +### normalize_key + +Convert a key to a binary in normalized form. + +```erlang +normalize_key(Key, _Opts) when is_list(Key) -> + case hb_util:is_string_list(Key) of + true -> normalize_key(list_to_binary(Key)); + false -> + iolist_to_binary( + lists:join( + <<"/">>, + lists:map(fun normalize_key/1, Key) + ) + ) + end. +``` + +### normalize_keys + +Ensure that a message is processable by the AO-Core resolver: No lists. + +```erlang +normalize_keys(Msg) -> normalize_keys(Msg, #{}). +``` + +### normalize_keys + +Ensure that a message is processable by the AO-Core resolver: No lists. + +```erlang +normalize_keys(Msg1, Opts) when is_list(Msg1) -> + normalize_keys( + hb_maps:from_list( + lists:zip( + lists:seq(1, length(Msg1)), + Msg1 + ) + ), + Opts + ); +``` + +### normalize_keys + +Ensure that a message is processable by the AO-Core resolver: No lists. + +```erlang +normalize_keys(Map, Opts) when is_map(Map) -> + hb_maps:from_list( + lists:map( + fun({Key, Value}) when is_map(Value) -> + {hb_ao:normalize_key(Key), Value}; + ({Key, Value}) -> + {hb_ao:normalize_key(Key), Value} + end, + hb_maps:to_list(Map, Opts) + ) + ); +``` + +### normalize_keys + +Ensure that a message is processable by the AO-Core resolver: No lists. +Load a device module from its name or a message ID. + +```erlang +normalize_keys(Other, _Opts) -> Other. +``` + +### load_device + +Ensure that a message is processable by the AO-Core resolver: No lists. +Load a device module from its name or a message ID. + +```erlang +load_device(Map, _Opts) when is_map(Map) -> {ok, Map}; +``` + +### load_device + +Ensure that a message is processable by the AO-Core resolver: No lists. +Load a device module from its name or a message ID. + +```erlang +load_device(ID, _Opts) when is_atom(ID) -> + try ID:module_info(), {ok, ID} + catch _:_ -> {error, not_loadable} + end; +``` + +### load_device + +Ensure that a message is processable by the AO-Core resolver: No lists. +Load a device module from its name or a message ID. + +```erlang +load_device(ID, Opts) when ?IS_ID(ID) -> + ?event(device_load, {requested_load, {id, ID}}, Opts), + case hb_opts:get(load_remote_devices, false, Opts) of + false -> + {error, remote_devices_disabled}; + true -> + ?event(device_load, {loading_from_cache, {id, ID}}, Opts), + {ok, Msg} = hb_cache:read(ID, Opts), + ?event(device_load, {received_device, {id, ID}, {msg, Msg}}, Opts), + TrustedSigners = hb_opts:get(trusted_device_signers, [], Opts), + Trusted = + lists:any( + fun(Signer) -> + lists:member(Signer, TrustedSigners) + end, + hb_message:signers(Msg, Opts) + ), + ?event(device_load, + {verifying_device_trust, + {id, ID}, + {trusted, Trusted}, + {signers, hb_message:signers(Msg, Opts)} + }, + Opts + ), + case Trusted of + false -> {error, device_signer_not_trusted}; + true -> + ?event(device_load, {loading_device, {id, ID}}, Opts), + case hb_maps:get(<<"content-type">>, Msg, undefined, Opts) of + <<"application/beam">> -> + case verify_device_compatibility(Msg, Opts) of + ok -> + ModName = + hb_util:key_to_atom( + hb_maps:get( + <<"module-name">>, + Msg, + undefined, + Opts + ), + new_atoms + ), + LoadRes = + erlang:load_module( + ModName, + hb_maps:get( + <<"body">>, + Msg, + undefined, + Opts + ) + ), + case LoadRes of + {module, _} -> + {ok, ModName}; + {error, Reason} -> + {error, {device_load_failed, Reason}} + end; + {error, Reason} -> + {error, {device_load_failed, Reason}} + end; + Other -> + {error, + {device_load_failed, + {incompatible_content_type, Other}, + {expected, <<"application/beam">>}, + {found, Other} + } + } + end + end + end; +``` + +### load_device + +Ensure that a message is processable by the AO-Core resolver: No lists. +Load a device module from its name or a message ID. + +```erlang +load_device(ID, Opts) -> + NormKey = + case is_atom(ID) of + true -> ID; + false -> normalize_key(ID) + end, + case lists:search( + fun (#{ <<"name">> := Name }) -> Name =:= NormKey end, + Preloaded = hb_opts:get(preloaded_devices, [], Opts) + ) of + false -> {error, {module_not_admissable, NormKey, Preloaded}}; + {value, #{ <<"module">> := Mod }} -> load_device(Mod, Opts) + end. +``` + +### verify_device_compatibility + +Verify that a device is compatible with the current machine. + +```erlang +verify_device_compatibility(Msg, Opts) -> + ?event(device_load, {verifying_device_compatibility, {msg, Msg}}, Opts), + Required = + lists:filtermap( + fun({<<"requires-", Key/binary>>, Value}) -> + {true, + { + hb_util:key_to_atom( + hb_ao:normalize_key(Key), + new_atoms + ), + hb_cache:ensure_loaded(Value, Opts) + } + }; + (_) -> false + end, + hb_maps:to_list(Msg, Opts) + ), + ?event(device_load, + {discerned_requirements, + {required, Required}, + {msg, Msg} + }, + Opts + ), + FailedToMatch = + lists:filtermap( + fun({Property, Value}) -> + % The values of these properties are _not_ 'keys', but we normalize + % them as such in order to make them comparable. +``` + +### info + +Get the info map for a device, optionally giving it a message if the + +```erlang +info(Msg, Opts) -> + info(message_to_device(Msg, Opts), Msg, Opts). +``` + +### info + +```erlang +info(DevMod, Msg, Opts) -> + %?event({calculating_info, {dev, DevMod}, {msg, Msg}}), + case find_exported_function(Msg, DevMod, info, 2, Opts) of + {ok, Fun} -> + Res = apply(Fun, truncate_args(Fun, [Msg, Opts])), + % ?event({ + % info_result, + % {dev, DevMod}, + % {args, truncate_args(Fun, [Msg])}, + % {result, Res} + % }), + Res; + not_found -> #{} + end. +``` + +### default_module + +The default device is the identity device, which simply returns the +The execution options that are used internally by this module + +```erlang +default_module() -> dev_message. +``` + +### internal_opts + +The default device is the identity device, which simply returns the +The execution options that are used internally by this module + +```erlang +internal_opts(Opts) -> + hb_maps:merge(Opts, #{ + topic => hb_opts:get(topic, ao_internal, Opts), + hashpath => ignore, + cache_control => [<<"no-cache">>, <<"no-store">>], + spawn_worker => false, + await_inprogress => false +``` + +--- + +*Generated from [hb_ao.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_ao.erl)* diff --git a/docs/book/src/hb_ao_test_vectors.erl.md b/docs/book/src/hb_ao_test_vectors.erl.md new file mode 100644 index 000000000..9a27b1619 --- /dev/null +++ b/docs/book/src/hb_ao_test_vectors.erl.md @@ -0,0 +1,892 @@ +# hb_ao_test_vectors + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_ao_test_vectors.erl) + +Uses a series of different `Opts` values to test the resolution engine's +execution under different circumstances. + +--- + +### run_test + +Uses a series of different `Opts` values to test the resolution engine's +Easy hook to make a test executable via the command line: + +```erlang +run_test() -> + multiple_as_subresolutions_test(#{}). +``` + +### suite_test_ + +Run each test in the file with each set of options. Start and reset + +```erlang +suite_test_() -> + hb_test_utils:suite_with_opts(test_suite(), test_opts()). +``` + +### benchmark_test_ + +```erlang +benchmark_test_() -> + hb_test_utils:suite_with_opts(benchmark_suite(), test_opts()). +``` + +### test_suite + +```erlang +test_suite() -> + [ + {resolve_simple, "resolve simple", + fun resolve_simple_test/1}, + {resolve_id, "resolve id", + fun resolve_id_test/1}, + {start_as, "start as", + fun start_as_test/1}, + {start_as_with_parameters, "start as with parameters", + fun start_as_with_parameters_test/1}, + {load_as, "load as", + fun load_as_test/1}, + {as_path, "as path", + fun as_path_test/1}, + {continue_as, "continue as", + fun continue_as_test/1}, + {multiple_as_subresolutions, "multiple as subresolutions", + fun multiple_as_subresolutions_test/1}, + {resolve_key_twice, "resolve key twice", + fun resolve_key_twice_test/1}, + {resolve_from_multiple_keys, "resolve from multiple keys", + fun resolve_from_multiple_keys_test/1}, + {resolve_path_element, "resolve path element", + fun resolve_path_element_test/1}, + {resolve_binary_key, "resolve binary key", + fun resolve_binary_key_test/1}, + {key_to_binary, "key to binary", + fun key_to_binary_test/1}, + {key_from_id_device_with_args, "key from id device with args", + fun key_from_id_device_with_args_test/1}, + {device_with_handler_function, "device with handler function", + fun device_with_handler_function_test/1}, + {device_with_default_handler_function, + "device with default handler function", + fun device_with_default_handler_function_test/1}, + {basic_get, "basic get", + fun basic_get_test/1}, + {recursive_get, "recursive get", + fun recursive_get_test/1}, + {deep_recursive_get, "deep recursive get", + fun deep_recursive_get_test/1}, + {basic_set, "basic set", + fun basic_set_test/1}, + {get_with_device, "get with device", + fun get_with_device_test/1}, + {get_as_with_device, "get as with device", + fun get_as_with_device_test/1}, + {set_with_device, "set with device", + fun set_with_device_test/1}, + {deep_set, "deep set", + fun deep_set_test/1}, + {deep_set_with_device, "deep set with device", + fun deep_set_with_device_test/1}, + {device_exports, "device exports", + fun device_exports_test/1}, + {device_excludes, "device excludes", + fun device_excludes_test/1}, + {denormalized_device_key, "denormalized device key", + fun denormalized_device_key_test/1}, + {list_transform, "list transform", + fun list_transform_test/1}, + {step_hook, "step hook", + fun step_hook_test/1} + ]. +``` + +### benchmark_suite + +```erlang +benchmark_suite() -> + [ + {benchmark_simple, "simple resolution benchmark", + fun benchmark_simple_test/1}, + {benchmark_multistep, "multistep resolution benchmark", + fun benchmark_multistep_test/1}, + {benchmark_get, "get benchmark", + fun benchmark_get_test/1}, + {benchmark_set, "single value set benchmark", + fun benchmark_set_test/1}, + {benchmark_set_multiple, "set two keys benchmark", + fun benchmark_set_multiple_test/1}, + {benchmark_set_multiple_deep, "set two keys deep benchmark", + fun benchmark_set_multiple_deep_test/1} + ]. +``` + +### test_opts + +```erlang +test_opts() -> + [ + #{ + name => normal, + desc => "Default opts", + opts => #{}, + skip => [] + }, + #{ + name => without_hashpath, + desc => "Default without hashpath", + opts => #{ + hashpath => ignore + }, + skip => [] + }, + #{ + name => no_cache, + desc => "No cache read or write", + opts => #{ + hashpath => ignore, + cache_control => [<<"no-cache">>, <<"no-store">>], + spawn_worker => false, + store => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/fs">> + } + }, + skip => [load_as] + }, + #{ + name => only_store, + desc => "Store, don't read", + opts => #{ + hashpath => update, + cache_control => [<<"no-cache">>], + spawn_worker => false, + store => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/fs">> + } + }, + skip => [ + denormalized_device_key, + deep_set_with_device, + load_as + ], + reset => false + }, + #{ + name => only_if_cached, + desc => "Only read, don't exec", + opts => #{ + hashpath => ignore, + cache_control => [<<"only-if-cached">>], + spawn_worker => false, + store => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/fs">> + } + }, + skip => [ + % Exclude tests that return a list on its own for now, as raw + % lists cannot be cached yet. +``` + +### exec_dummy_device + +Ensure that we can read a device from the cache then execute it. By + +```erlang +exec_dummy_device(SigningWallet, Opts) -> + % Compile the test device and store it in an accessible cache to the execution + % environment. +``` + +### load_device_test + +```erlang +load_device_test() -> + % Establish an execution environment which trusts the device author. +``` + +### untrusted_load_device_test + +```erlang +untrusted_load_device_test() -> + % Establish an execution environment which does not trust the device author. +``` + +### resolve_simple_test + +```erlang +resolve_simple_test(Opts) -> + Res = hb_ao:resolve(#{ <<"a">> => <<"RESULT">> }, <<"a">>, Opts), + ?assertEqual({ok, <<"RESULT">>}, Res). +``` + +### resolve_id_test + +```erlang +resolve_id_test(Opts) -> + ?assertMatch( + ID when byte_size(ID) == 43, + hb_ao:get(id, #{ test_key => <<"1">> }, Opts) + ). +``` + +### resolve_key_twice_test + +```erlang +resolve_key_twice_test(Opts) -> + % Ensure that the same message can be resolved again. +``` + +### resolve_from_multiple_keys_test + +```erlang +resolve_from_multiple_keys_test(Opts) -> + ?assertEqual( + {ok, [<<"a">>]}, + hb_ao:resolve(#{ <<"a">> => <<"1">>, <<"priv_a">> => <<"2">> }, <<"keys">>, Opts) + ). +``` + +### resolve_path_element_test + +```erlang +resolve_path_element_test(Opts) -> + ?assertEqual( + {ok, [<<"test_path">>]}, + hb_ao:resolve(#{ <<"path">> => [<<"test_path">>] }, <<"path">>, Opts) + ), + ?assertEqual( + {ok, [<<"a">>]}, + hb_ao:resolve(#{ <<"Path">> => [<<"a">>] }, <<"Path">>, Opts) + ). +``` + +### key_to_binary_test + +```erlang +key_to_binary_test(Opts) -> + ?assertEqual(<<"a">>, hb_ao:normalize_key(a, Opts)), + ?assertEqual(<<"a">>, hb_ao:normalize_key(<<"a">>, Opts)), + ?assertEqual(<<"a">>, hb_ao:normalize_key("a", Opts)). +``` + +### resolve_binary_key_test + +```erlang +resolve_binary_key_test(Opts) -> + ?assertEqual( + {ok, <<"RESULT">>}, + hb_ao:resolve(#{ a => <<"RESULT">> }, <<"a">>, Opts) + ), + ?assertEqual( + {ok, <<"1">>}, + hb_ao:resolve( + #{ + <<"Test-Header">> => <<"1">> + }, + <<"Test-Header">>, + Opts + ) + ). +``` + +### generate_device_with_keys_using_args + +Generates a test device with three keys, each of which uses + +```erlang +generate_device_with_keys_using_args() -> + #{ + key_using_only_state => + fun(State) -> + {ok, + <<(hb_maps:get(<<"state_key">>, State))/binary>> + } + end, + key_using_state_and_msg => + fun(State, Msg) -> + {ok, + << + (hb_maps:get(<<"state_key">>, State))/binary, + (hb_maps:get(<<"msg_key">>, Msg))/binary + >> + } + end, + key_using_all => + fun(State, Msg, Opts) -> + {ok, + << + (hb_maps:get(<<"state_key">>, State, undefined, Opts))/binary, + (hb_maps:get(<<"msg_key">>, Msg, undefined, Opts))/binary, + (hb_maps:get(<<"opts_key">>, Opts, undefined, Opts))/binary + >> + } + end + }. +``` + +### gen_default_device + +Create a simple test device that implements the default handler. + +```erlang +gen_default_device() -> + #{ + info => + fun() -> + #{ + default => + fun(_, _State) -> + {ok, <<"DEFAULT">>} + end + } + end, + <<"state_key">> => + fun(_) -> + {ok, <<"STATE">>} + end + }. +``` + +### gen_handler_device + +Create a simple test device that implements the handler key. + +```erlang +gen_handler_device() -> + #{ + info => + fun() -> + #{ + handler => + fun(<<"set">>, M1, M2, Opts) -> + dev_message:set(M1, M2, Opts); + (_, _, _, _) -> + {ok, <<"HANDLER VALUE">>} + end + } + end + }. +``` + +### key_from_id_device_with_args_test + +Test that arguments are passed to a device key as expected. + +```erlang +key_from_id_device_with_args_test(Opts) -> + Msg = + #{ + device => generate_device_with_keys_using_args(), + state_key => <<"1">> + }, + ?assertEqual( + {ok, <<"1">>}, + hb_ao:resolve( + Msg, + #{ + <<"path">> => <<"key_using_only_state">>, + <<"msg_key">> => <<"2">> % Param message, which is ignored + }, + Opts + ) + ), + ?assertEqual( + {ok, <<"13">>}, + hb_ao:resolve( + Msg, + #{ + <<"path">> => <<"key_using_state_and_msg">>, + <<"msg_key">> => <<"3">> % Param message, with value to add + }, + Opts + ) + ), + ?assertEqual( + {ok, <<"1337">>}, + hb_ao:resolve( + Msg, + #{ + <<"path">> => <<"key_using_all">>, + <<"msg_key">> => <<"3">> % Param message + }, + Opts#{ + <<"opts_key">> => <<"37">>, + <<"cache_control">> => [<<"no-cache">>, <<"no-store">>] + } + ) + ). +``` + +### device_with_handler_function_test + +```erlang +device_with_handler_function_test(Opts) -> + Msg = + #{ + device => gen_handler_device(), + test_key => <<"BAD">> + }, + ?assertEqual( + {ok, <<"HANDLER VALUE">>}, + hb_ao:resolve(Msg, <<"test_key">>, Opts) + ). +``` + +### device_with_default_handler_function_test + +```erlang +device_with_default_handler_function_test(Opts) -> + Msg = + #{ + device => gen_default_device() + }, + ?assertEqual( + {ok, <<"STATE">>}, + hb_ao:resolve(Msg, <<"state_key">>, Opts) + ), + ?assertEqual( + {ok, <<"DEFAULT">>}, + hb_ao:resolve(Msg, <<"any_random_key">>, Opts) + ). +``` + +### basic_get_test + +```erlang +basic_get_test(Opts) -> + Msg = #{ <<"key1">> => <<"value1">>, <<"key2">> => <<"value2">> }, + ?assertEqual(<<"value1">>, hb_ao:get(<<"key1">>, Msg, Opts)), + ?assertEqual(<<"value2">>, hb_ao:get(<<"key2">>, Msg, Opts)), + ?assertEqual(<<"value2">>, hb_ao:get(<<"key2">>, Msg, Opts)), + ?assertEqual(<<"value2">>, hb_ao:get([<<"key2">>], Msg, Opts)). +``` + +### recursive_get_test + +```erlang +recursive_get_test(Opts) -> + Msg = #{ + <<"key1">> => <<"value1">>, + <<"key2">> => #{ + <<"key3">> => <<"value3">>, + <<"key4">> => #{ + <<"key5">> => <<"value5">>, + <<"key6">> => #{ + <<"key7">> => <<"value7">> + } + } + } + }, + ?assertEqual( + {ok, <<"value1">>}, + hb_ao:resolve(Msg, #{ <<"path">> => <<"key1">> }, Opts) + ), + ?assertEqual(<<"value1">>, hb_ao:get(<<"key1">>, Msg, Opts)), + ?assertEqual( + {ok, <<"value3">>}, + hb_ao:resolve(Msg, #{ <<"path">> => [<<"key2">>, <<"key3">>] }, Opts) + ), + ?assertEqual(<<"value3">>, hb_ao:get([<<"key2">>, <<"key3">>], Msg, Opts)), + ?assertEqual(<<"value3">>, hb_ao:get(<<"key2/key3">>, Msg, Opts)). +``` + +### deep_recursive_get_test + +```erlang +deep_recursive_get_test(Opts) -> + Msg = #{ + <<"key1">> => <<"value1">>, + <<"key2">> => #{ + <<"key3">> => <<"value3">>, + <<"key4">> => #{ + <<"key5">> => <<"value5">>, + <<"key6">> => #{ + <<"key7">> => <<"value7">> + } + } + } + }, + ?assertEqual(<<"value7">>, hb_ao:get(<<"key2/key4/key6/key7">>, Msg, Opts)). +``` + +### basic_set_test + +```erlang +basic_set_test(Opts) -> + Msg = #{ <<"key1">> => <<"value1">>, <<"key2">> => <<"value2">> }, + UpdatedMsg = hb_ao:set(Msg, #{ <<"key1">> => <<"new_value1">> }, Opts), + ?event({set_key_complete, {key, <<"key1">>}, {value, <<"new_value1">>}}), + ?assertEqual(<<"new_value1">>, hb_ao:get(<<"key1">>, UpdatedMsg, Opts)), + ?assertEqual(<<"value2">>, hb_ao:get(<<"key2">>, UpdatedMsg, Opts)). +``` + +### get_with_device_test + +```erlang +get_with_device_test(Opts) -> + Msg = + #{ + <<"device">> => generate_device_with_keys_using_args(), + <<"state_key">> => <<"STATE">> + }, + ?assertEqual(<<"STATE">>, hb_ao:get(<<"state_key">>, Msg, Opts)), + ?assertEqual(<<"STATE">>, hb_ao:get(<<"key_using_only_state">>, Msg, Opts)). +``` + +### get_as_with_device_test + +```erlang +get_as_with_device_test(Opts) -> + Msg = + #{ + <<"device">> => gen_handler_device(), + <<"test_key">> => <<"ACTUAL VALUE">> + }, + ?assertEqual( + <<"HANDLER VALUE">>, + hb_ao:get(test_key, Msg, Opts) + ), + ?assertEqual( + <<"ACTUAL VALUE">>, + hb_ao:get(test_key, {as, dev_message, Msg}, Opts) + ). +``` + +### set_with_device_test + +```erlang +set_with_device_test(Opts) -> + Msg = + #{ + <<"device">> => + #{ + <<"set">> => + fun(State, _Msg) -> + Acc = hb_maps:get(<<"set_count">>, State, <<"">>, Opts), + {ok, + State#{ + <<"set_count">> => << Acc/binary, "." >> + } + } + end + }, + <<"state_key">> => <<"STATE">> + }, + ?assertEqual(<<"STATE">>, hb_ao:get(<<"state_key">>, Msg, Opts)), + SetOnce = hb_ao:set(Msg, #{ <<"state_key">> => <<"SET_ONCE">> }, Opts), + ?assertEqual(<<".">>, hb_ao:get(<<"set_count">>, SetOnce, Opts)), + SetTwice = hb_ao:set(SetOnce, #{ <<"state_key">> => <<"SET_TWICE">> }, Opts), + ?assertEqual(<<"..">>, hb_ao:get(<<"set_count">>, SetTwice, Opts)), + ?assertEqual(<<"STATE">>, hb_ao:get(<<"state_key">>, SetTwice, Opts)). +``` + +### deep_set_test + +```erlang +deep_set_test(Opts) -> + % First validate second layer changes are handled correctly. +``` + +### deep_set_new_messages_test + +```erlang +deep_set_new_messages_test() -> + Opts = hb_maps:get(opts, hd(test_opts())), + % Test that new messages are created when the path does not exist. +``` + +### deep_set_with_device_test + +```erlang +deep_set_with_device_test(Opts) -> + Device = #{ + set => + fun(Msg1, Msg2) -> + % A device where the set function modifies the key + % and adds a modified flag. +``` + +### device_exports_test + +```erlang +device_exports_test(Opts) -> + Msg = #{ <<"device">> => dev_message }, + ?assert(hb_ao:is_exported(Msg, dev_message, info, Opts)), + ?assert(hb_ao:is_exported(Msg, dev_message, set, Opts)), + ?assert( + hb_ao:is_exported( + Msg, + dev_message, + not_explicitly_exported, + Opts + ) + ), + Dev = #{ + info => fun() -> #{ exports => [set] } end, + set => fun(_, _) -> {ok, <<"SET">>} end + }, + Msg2 = #{ <<"device">> => Dev }, + ?assert(hb_ao:is_exported(Msg2, Dev, info, Opts)), + ?assert(hb_ao:is_exported(Msg2, Dev, set, Opts)), + ?assert(not hb_ao:is_exported(Msg2, Dev, not_exported, Opts)), + Dev2 = #{ + info => + fun() -> + #{ + exports => [test1, <<"test2">>], + handler => + fun() -> + {ok, <<"Handler-Value">>} + end + } + end + }, + Msg3 = #{ <<"device">> => Dev2, <<"test1">> => <<"BAD1">>, <<"test3">> => <<"GOOD3">> }, + ?assertEqual(<<"Handler-Value">>, hb_ao:get(<<"test1">>, Msg3, Opts)), + ?assertEqual(<<"Handler-Value">>, hb_ao:get(<<"test2">>, Msg3, Opts)), + ?assertEqual(<<"GOOD3">>, hb_ao:get(<<"test3">>, Msg3, Opts)), + ?assertEqual(<<"GOOD4">>, + hb_ao:get( + <<"test4">>, + hb_ao:set(Msg3, <<"test4">>, <<"GOOD4">>, Opts) + ) + ), + ?assertEqual(not_found, hb_ao:get(<<"test5">>, Msg3, Opts)). +``` + +### device_excludes_test + +```erlang +device_excludes_test(Opts) -> + % Create a device that returns an identifiable message for any key, but also + % sets excludes to [set], such that the message can be modified using the + % default handler. +``` + +### denormalized_device_key_test + +```erlang +denormalized_device_key_test(Opts) -> + Msg = #{ <<"device">> => dev_test }, + ?assertEqual(dev_test, hb_ao:get(device, Msg, Opts)), + ?assertEqual(dev_test, hb_ao:get(<<"device">>, Msg, Opts)), + ?assertEqual({module, dev_test}, + erlang:fun_info( + element(3, hb_ao:message_to_fun(Msg, test_func, Opts)), + module + ) + ). +``` + +### list_transform_test + +```erlang +list_transform_test(Opts) -> + Msg = [<<"A">>, <<"B">>, <<"C">>, <<"D">>, <<"E">>], + ?assertEqual(<<"A">>, hb_ao:get(1, Msg, Opts)), + ?assertEqual(<<"B">>, hb_ao:get(2, Msg, Opts)), + ?assertEqual(<<"C">>, hb_ao:get(3, Msg, Opts)), + ?assertEqual(<<"D">>, hb_ao:get(4, Msg, Opts)), + ?assertEqual(<<"E">>, hb_ao:get(5, Msg, Opts)). +``` + +### start_as_test + +```erlang +start_as_test(Opts) -> + ?assertEqual( + {ok, <<"GOOD_FUNCTION">>}, + hb_ao:resolve_many( + [ + {as, <<"test-device@1.0">>, #{ <<"path">> => <<>> }}, + #{ <<"path">> => <<"test_func">> } + ], + Opts + ) + ). +``` + +### start_as_with_parameters_test + +```erlang +start_as_with_parameters_test(Opts) -> + % Resolve a key on a message that has its device set with `as'. +``` + +### load_as_test + +```erlang +load_as_test(Opts) -> + % Load a message as a device with the `as' keyword. +``` + +### as_path_test + +```erlang +as_path_test(Opts) -> + % Create a message with the test device, which implements the test_func + % function. It normally returns `GOOD_FUNCTION'. +``` + +### continue_as_test + +```erlang +continue_as_test(Opts) -> + % Resolve a list of messages in sequence, swapping the device in the middle. +``` + +### multiple_as_subresolutions_test + +```erlang +multiple_as_subresolutions_test(Opts) -> + % Test that multiple as subresolutions in a sequence are handled correctly. +``` + +### step_hook_test + +```erlang +step_hook_test(InitOpts) -> + % Test that the step hook is called correctly. We do this by sending ourselves + % a message each time the hook is called. We also send a `reference', such + % that this test is uniquely identified and further/prior tests do not affect + % it. +``` + +### benchmark_simple_test + +```erlang +benchmark_simple_test(Opts) -> + Time = + hb_test_utils:benchmark_iterations( + fun(I) -> hb_ao:resolve(#{ <<"a">> => I }, <<"a">>, Opts) end, + ?BENCHMARK_ITERATIONS + ), + hb_test_utils:benchmark_print( + <<"Single-step resolutions:">>, + ?BENCHMARK_ITERATIONS, + Time + ). +``` + +### benchmark_multistep_test + +```erlang +benchmark_multistep_test(Opts) -> + Time = + hb_test_utils:benchmark_iterations( + fun(I) -> + hb_ao:resolve( + #{ + <<"iteration">> => I, + <<"a">> => #{ + <<"b">> => #{ <<"return">> => I } + } + }, + <<"a/b/return">>, + Opts + ) + end, + ?BENCHMARK_ITERATIONS + ), + hb_test_utils:benchmark_print( + <<"Multistep resolutions:">>, + ?BENCHMARK_ITERATIONS, + Time + ). +``` + +### benchmark_get_test + +```erlang +benchmark_get_test(Opts) -> + Time = + hb_test_utils:benchmark_iterations( + fun(I) -> + hb_ao:get( + <<"a">>, + #{ <<"a">> => <<"1">>, <<"iteration">> => I }, + Opts + ) + end, + ?BENCHMARK_ITERATIONS + ), + hb_test_utils:benchmark_print( + <<"Get operations:">>, + ?BENCHMARK_ITERATIONS, + Time + ). +``` + +### benchmark_set_test + +```erlang +benchmark_set_test(Opts) -> + Time = + hb_test_utils:benchmark_iterations( + fun(I) -> + hb_ao:set( + #{ <<"a">> => <<"1">>, <<"iteration">> => I }, + <<"a">>, + <<"2">>, + Opts + ) + end, + ?BENCHMARK_ITERATIONS + ), + hb_test_utils:benchmark_print( + <<"Single value set operations:">>, + ?BENCHMARK_ITERATIONS, + Time + ). +``` + +### benchmark_set_multiple_test + +```erlang +benchmark_set_multiple_test(Opts) -> + Time = + hb_test_utils:benchmark_iterations( + fun(I) -> + hb_ao:set( + #{ <<"a">> => <<"1">>, <<"iteration">> => I }, + #{ <<"a">> => <<"1a">>, <<"b">> => <<"2">> }, + Opts + ) + end, + ?BENCHMARK_ITERATIONS + ), + hb_test_utils:benchmark_print( + <<"Set two keys operations:">>, + ?BENCHMARK_ITERATIONS, + Time + ). +``` + +### benchmark_set_multiple_deep_test + +```erlang +benchmark_set_multiple_deep_test(Opts) -> + Time = + hb_test_utils:benchmark_iterations( + fun(I) -> + hb_ao:set( + #{ <<"a">> => #{ <<"b">> => <<"1">> } }, + #{ <<"a">> => #{ <<"b">> => <<"2">>, <<"c">> => I } }, + Opts + ) + end, + ?BENCHMARK_ITERATIONS + ), + hb_test_utils:benchmark_print( + <<"Set two keys operations:">>, + ?BENCHMARK_ITERATIONS, + Time +``` + +--- + +*Generated from [hb_ao_test_vectors.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_ao_test_vectors.erl)* diff --git a/docs/book/src/hb_app.erl.md b/docs/book/src/hb_app.erl.md new file mode 100644 index 000000000..5ad52cd50 --- /dev/null +++ b/docs/book/src/hb_app.erl.md @@ -0,0 +1,37 @@ +# hb_app + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_app.erl) + +The main HyperBEAM application module. + +--- + +## Exported Functions + +- `start/2` +- `stop/1` + +--- + +### start + +The main HyperBEAM application module. + +```erlang +start(_StartType, _StartArgs) -> + hb:init(), + hb_sup:start_link(), + ok = dev_scheduler_registry:start(), + _TimestampServer = ar_timestamp:start(), + {ok, _} = hb_http_server:start(). +``` + +### stop + +```erlang +stop(_State) -> +``` + +--- + +*Generated from [hb_app.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_app.erl)* diff --git a/docs/book/src/hb_beamr.erl.md b/docs/book/src/hb_beamr.erl.md new file mode 100644 index 000000000..bc2f18a17 --- /dev/null +++ b/docs/book/src/hb_beamr.erl.md @@ -0,0 +1,443 @@ +# hb_beamr + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_beamr.erl) + +BEAMR: A WAMR wrapper for BEAM. +Beamr is a library that allows you to run WASM modules in BEAM, using the +Webassembly Micro Runtime (WAMR) as its engine. Each WASM module is +executed using a Linked-In Driver (LID) that is loaded into BEAM. It is +designed with a focus on supporting long-running WASM executions that +interact with Erlang functions and processes easily. +Because each WASM module runs as an independent async worker, if you plan +to run many instances in parallel, you should be sure to configure the +BEAM to have enough async worker threads enabled (see `erl +A N` in the +Erlang manuals). +The core API is simple: +
+    start(WasmBinary) -> {ok, Port, Imports, Exports}
+        Where:
+            WasmBinary is the WASM binary to load.
+            Port is the port to the LID.
+            Imports is a list of tuples of the form {Module, Function,
+                Args, Signature}.
+            Exports is a list of tuples of the form {Function, Args,
+                Signature}.
+    stop(Port) -> ok
+    call(Port, FunctionName, Args) -> {ok, Result}
+        Where:
+            FunctionName is the name of the function to call.
+            Args is a list of Erlang terms (converted to WASM values by
+                BEAMR) that match the signature of the function.
+            Result is a list of Erlang terms (converted from WASM values).
+    call(Port, FunName, Args[, Import, State, Opts]) -> {ok, Res, NewState}
+        Where:
+            ImportFun is a function that will be called upon each import.
+            ImportFun must have an arity of 2: Taking an arbitrary `state`
+            term, and a map containing the `port`, `module`, `func`, `args`,
+            `signature`, and the `options` map of the import.
+            It must return a tuple of the form {ok, Response, NewState}.
+    serialize(Port) -> {ok, Mem}
+        Where:
+            Port is the port to the LID.
+            Mem is a binary representing the full WASM state.
+    deserialize(Port, Mem) -> ok
+        Where:
+            Port is the port to the LID.
+            Mem is a binary output of a previous `serialize/1` call.
+
+BEAMR was designed for use in the HyperBEAM project, but is suitable for +deployment in other Erlang applications that need to run WASM modules. PRs +are welcome. + +--- + +## Exported Functions + +- `call/3` +- `call/4` +- `call/5` +- `call/6` +- `deserialize/2` +- `serialize/1` +- `start/1` +- `start/2` +- `stop/1` +- `stub/3` +- `wasm_send/2` + +--- + +### load_driver + +BEAMR: A WAMR wrapper for BEAM. +Load the driver for the WASM executor. + +```erlang +load_driver() -> + case erl_ddll:load(code:priv_dir(hb), ?MODULE) of + ok -> ok; + {error, already_loaded} -> ok; + {error, Error} -> {error, Error} + end. +``` + +### start + +Start a WASM executor context. Yields a port to the LID, and the + +```erlang +start(WasmBinary) when is_binary(WasmBinary) -> + start(WasmBinary, wasm). +``` + +### start + +```erlang +start(WasmBinary, Mode) when is_binary(WasmBinary) -> + ?event({loading_module, {bytes, byte_size(WasmBinary)}, Mode}), + Self = self(), + WASM = spawn( + fun() -> + ok = load_driver(), + Port = open_port({spawn, "hb_beamr"}, []), + Port ! {self(), {command, term_to_binary({init, WasmBinary, Mode})}}, + ?event({waiting_for_init_from, Port}), + worker(Port, Self) + end + ), + receive + {execution_result, Imports, Exports} -> + ?event( + {wasm_init_success, + {imports, Imports}, + {exports, Exports}}), + {ok, WASM, Imports, Exports}; + {error, Error} -> + ?event({wasm_init_error, Error}), + stop(WASM), + {error, Error} + end. +``` + +### worker + +A worker process that is responsible for handling a WASM instance. + +```erlang +worker(Port, Listener) -> + receive + stop -> + ?event({stop_invoked_for_beamr, self()}), + case erlang:port_info(Port, id) of + undefined -> + ok; + _ -> + port_close(Port), + ok + end, + ok; + {wasm_send, NewListener, Message} -> + ?event({wasm_send, {listener, NewListener}, {message, Message}}), + Port ! {self(), Message}, + worker(Port, NewListener); + WASMResult -> + ?event({wasm_result, {listener, Listener}, {result, WASMResult}}), + Listener ! WASMResult, + worker(Port, Listener) + end. +``` + +### wasm_send + +```erlang +wasm_send(WASM, Message) when is_pid(WASM) -> + WASM ! {wasm_send, self(), Message}, + ok. +``` + +### stop + +Stop a WASM executor context. + +```erlang +stop(WASM) when is_pid(WASM) -> + WASM ! stop, + ok. +``` + +### call + +Call a function in the WASM executor (see moduledoc for more details). + +```erlang +call(PID, FuncRef, Args) -> + {ok, Res, _} = call(PID, FuncRef, Args, fun stub/3), + {ok, Res}. +``` + +### call + +```erlang +call(PID, FuncRef, Args, ImportFun) -> + call(PID, FuncRef, Args, ImportFun, #{}). +``` + +### call + +```erlang +call(PID, FuncRef, Args, ImportFun, StateMsg) -> + call(PID, FuncRef, Args, ImportFun, StateMsg, #{}). +``` + +### call + +```erlang +call(PID, FuncRef, Args, ImportFun, StateMsg, Opts) + when is_binary(FuncRef) -> + call(PID, binary_to_list(FuncRef), Args, ImportFun, StateMsg, Opts); +``` + +### call + +```erlang +call(WASM, FuncRef, Args, ImportFun, StateMsg, Opts) + when is_pid(WASM) + andalso (is_list(FuncRef) or is_integer(FuncRef)) + andalso is_list(Args) + andalso is_function(ImportFun) + andalso is_map(Opts) -> + case is_valid_arg_list(Args) of + true -> + ?event( + {call_started, + WASM, + FuncRef, + Args, + ImportFun, + StateMsg, + Opts}), + wasm_send(WASM, + {command, + term_to_binary( + case is_integer(FuncRef) of + true -> {indirect_call, FuncRef, Args}; + false -> {call, FuncRef, Args} + end + ) + } + ), + ?event({waiting_for_call_result, self(), WASM}), + monitor_call(WASM, ImportFun, StateMsg, Opts); + false -> + {error, {invalid_args, Args}} + end. +``` + +### stub + +Stub import function for the WASM executor. + +```erlang +stub(Msg1, _Msg2, _Opts) -> + ?event(stub_stdlib_called), + {ok, [0], Msg1}. +``` + +### monitor_call + +Synchonously monitor the WASM executor for a call result and any + +```erlang +monitor_call(WASM, ImportFun, StateMsg, Opts) -> + receive + {execution_result, Result} -> + ?event({call_result, Result}), + {ok, Result, StateMsg}; + {import, Module, Func, Args, Signature} -> + ?event({import_called, Module, Func, Args, Signature}), + try + {ok, Res, StateMsg2} = + ImportFun(StateMsg, + #{ + instance => WASM, + module => Module, + func => Func, + args => Args, + func_sig => Signature + }, + Opts + ), + ?event({import_ret, Module, Func, {args, Args}, {res, Res}}), + dispatch_response(WASM, Res), + monitor_call(WASM, ImportFun, StateMsg2, Opts) + catch + Err:Reason:Stack -> + % Signal the WASM executor to stop. +``` + +### dispatch_response + +Check the type of an import response and dispatch it to a Beamr port. + +```erlang +dispatch_response(WASM, Term) when is_pid(WASM) -> + case is_valid_arg_list(Term) of + true -> + wasm_send(WASM, + {command, term_to_binary({import_response, Term})}); + false -> + throw({error, {invalid_response, Term}}) + end; +``` + +### dispatch_response + +Check the type of an import response and dispatch it to a Beamr port. + +```erlang +dispatch_response(_WASM, Term) -> + throw({error, {invalid_response, Term}}). +``` + +### is_valid_arg_list + +Check that a list of arguments is valid for a WASM function call. + +```erlang +is_valid_arg_list(Args) when is_list(Args) -> + lists:all(fun(Arg) -> is_integer(Arg) or is_float(Arg) end, Args); +``` + +### is_valid_arg_list + +Check that a list of arguments is valid for a WASM function call. + +```erlang +is_valid_arg_list(_) -> + false. +``` + +### serialize + +Serialize the WASM state to a binary. + +```erlang +serialize(WASM) when is_pid(WASM) -> + ?event(starting_serialize), + {ok, Size} = hb_beamr_io:size(WASM), + ?event({image_size, Size}), + {ok, Mem} = hb_beamr_io:read(WASM, 0, Size), + ?event({finished_serialize, byte_size(Mem)}), + {ok, Mem}. +``` + +### deserialize + +Deserialize a WASM state from a binary. + +```erlang +deserialize(WASM, Bin) when is_pid(WASM) andalso is_binary(Bin) -> + ?event(starting_deserialize), + Res = hb_beamr_io:write(WASM, 0, Bin), + ?event({finished_deserialize, Res}), + ok. +``` + +### driver_loads_test + +```erlang +driver_loads_test() -> + ?assertEqual(ok, load_driver()). +``` + +### simple_wasm_test + +Test standalone `hb_beamr` correctly after loading a WASM module. + +```erlang +simple_wasm_test() -> + {ok, File} = file:read_file("test/test.wasm"), + {ok, WASM, _Imports, _Exports} = start(File), + {ok, [Result]} = call(WASM, "fac", [5.0]), + ?assertEqual(120.0, Result). +``` + +### imported_function_test + +Test that imported functions can be called from the WASM module. + +```erlang +imported_function_test() -> + {ok, File} = file:read_file("test/pow_calculator.wasm"), + {ok, WASM, _Imports, _Exports} = start(File), + {ok, [Result], _} = + call(WASM, <<"pow">>, [2, 5], + fun(Msg1, #{ args := [Arg1, Arg2] }, _Opts) -> + {ok, [Arg1 * Arg2], Msg1} + end), + ?assertEqual(32, Result). +``` + +### wasm64_test + +Test that WASM Memory64 modules load and execute correctly. + +```erlang +wasm64_test() -> + {ok, File} = file:read_file("test/test-64.wasm"), + {ok, WASM, _ImportMap, _Exports} = start(File), + {ok, [Result]} = call(WASM, "fac", [5.0]), + ?assertEqual(120.0, Result). +``` + +### multiclient_test + +Ensure that processes outside of the initial one can interact with + +```erlang +multiclient_test() -> + Self = self(), + ExecPID = spawn(fun() -> + receive {wasm, WASM} -> + {ok, [Result]} = call(WASM, "fac", [5.0]), + Self ! {result, Result} + end + end), + _StartPID = spawn(fun() -> + {ok, File} = file:read_file("test/test.wasm"), + {ok, WASM, _ImportMap, _Exports} = start(File), + ExecPID ! {wasm, WASM} + end), + receive + {result, Result} -> + ?assertEqual(120.0, Result) + end. +``` + +### benchmark_test + +```erlang +benchmark_test() -> + BenchTime = 1, + {ok, File} = file:read_file("test/test-64.wasm"), + {ok, WASM, _ImportMap, _Exports} = start(File), + Iterations = hb_test_utils:benchmark( + fun() -> + {ok, [Result]} = call(WASM, "fac", [5.0]), + ?assertEqual(120.0, Result) + end, + BenchTime + ), + ?event(benchmark, {scheduled, Iterations}), + ?assert(Iterations > 1000), + hb_test_utils:benchmark_print( + <<"Direct beamr: Executed">>, + <<"calls">>, + Iterations, + BenchTime + ), +``` + +--- + +*Generated from [hb_beamr.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_beamr.erl)* diff --git a/docs/book/src/hb_beamr_io.erl.md b/docs/book/src/hb_beamr_io.erl.md new file mode 100644 index 000000000..181ac9e38 --- /dev/null +++ b/docs/book/src/hb_beamr_io.erl.md @@ -0,0 +1,238 @@ +# hb_beamr_io + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_beamr_io.erl) + +Simple interface for memory management for Beamr instances. +It allows for reading and writing to memory, as well as allocating and +freeing memory by calling the WASM module's exported malloc and free +functions. +Unlike the majority of HyperBEAM modules, this module takes a defensive +approach to type checking, breaking from the conventional Erlang style, +such that failures are caught in the Erlang-side of functions rather than +in the C/WASM-side. + +--- + +## Exported Functions + +- `free/2` +- `malloc/2` +- `read_string/2` +- `read/3` +- `size/1` +- `write_string/2` +- `write/3` + +--- + +### size + +Simple interface for memory management for Beamr instances. +Get the size (in bytes) of the native memory allocated in the Beamr + +```erlang +size(WASM) when is_pid(WASM) -> + hb_beamr:wasm_send(WASM, {command, term_to_binary({size})}), + receive + {execution_result, Size} -> + {ok, Size} + end. +``` + +### write + +Write a binary to the Beamr instance's native memory at a given offset. + +```erlang +write(WASM, Offset, Data) + when is_pid(WASM) + andalso is_binary(Data) + andalso is_integer(Offset) -> + ?event(writing_to_mem), + hb_beamr:wasm_send(WASM, {command, term_to_binary({write, Offset, Data})}), + ?event(mem_written), + receive + ok -> ok; + {error, Error} -> {error, Error} + end. +``` + +### write_string + +Simple helper function to allocate space for (via malloc) and write a + +```erlang +write_string(WASM, Data) when is_pid(WASM) andalso is_list(Data) -> + write_string(WASM, iolist_to_binary(Data)); +``` + +### write_string + +Simple helper function to allocate space for (via malloc) and write a + +```erlang +write_string(WASM, Data) when is_pid(WASM) andalso is_binary(Data) -> + DataSize = byte_size(Data) + 1, + String = <>, + case malloc(WASM, DataSize) of + {ok, Ptr} -> + case write(WASM, Ptr, String) of + ok -> {ok, Ptr}; + {error, Error} -> {error, Error} + end; + Error -> Error + end. +``` + +### read + +Read a binary from the Beamr instance's native memory at a given offset + +```erlang +read(WASM, Offset, Size) + when is_pid(WASM) + andalso is_integer(Offset) + andalso is_integer(Size) -> + ?event({read_request, {port, WASM}, {location, Offset}, {size, Size}}), + hb_beamr:wasm_send(WASM, {command, term_to_binary({read, Offset, Size})}), + ?event(read_req_sent), + receive + {execution_result, Result} -> + ?event( + {read_result, + {wasm, WASM}, + {location, Offset}, + {size, Size}, + {result, Result}}), + {ok, Result}; + {error, Error} -> + {error, Error} + end. +``` + +### read_string + +Simple helper function to read a string from the Beamr instance's native + +```erlang +read_string(Port, Offset) -> read_string(Port, Offset, 8). +``` + +### read_string + +Simple helper function to read a string from the Beamr instance's native + +```erlang +read_string(WASM, Offset, ChunkSize) + when is_pid(WASM) + andalso is_integer(Offset) + andalso is_integer(ChunkSize) -> + {ok, iolist_to_binary(do_read_string(WASM, Offset, ChunkSize))}. +``` + +### do_read_string + +```erlang +do_read_string(WASM, Offset, ChunkSize) -> + {ok, Data} = read(WASM, Offset, ChunkSize), + case binary:split(Data, [<<0>>]) of + [Data|[]] -> [Data|do_read_string(WASM, Offset + ChunkSize, ChunkSize)]; + [FinalData|_Remainder] -> [FinalData] + end. +``` + +### malloc + +Allocate space for (via an exported malloc function from the WASM) in + +```erlang +malloc(WASM, Size) when is_pid(WASM) andalso is_integer(Size) -> + case hb_beamr:call(WASM, "malloc", [Size]) of + {ok, [0]} -> + ?event({malloc_failed, Size}), + {error, malloc_failed}; + {ok, [Ptr]} -> + ?event({malloc_success, Ptr, Size}), + {ok, Ptr}; + {error, Error} -> + {error, Error} + end. +``` + +### free + +Free space allocated in the Beamr instance's native memory via a + +```erlang +free(WASM, Ptr) when is_pid(WASM) andalso is_integer(Ptr) -> + case hb_beamr:call(WASM, "free", [Ptr]) of + {ok, Res} -> + ?event({free_result, Res}), + ok; + {error, Error} -> + {error, Error} + end. +``` + +### size_test + +```erlang +size_test() -> + WASMPageSize = 65536, + File1Pages = 1, + File2Pages = 193, + {ok, File} = file:read_file("test/test-print.wasm"), + {ok, WASM, _Imports, _Exports} = hb_beamr:start(File), + ?assertEqual({ok, WASMPageSize * File1Pages}, hb_beamr_io:size(WASM)), + hb_beamr:stop(WASM), + {ok, File2} = file:read_file("test/aos-2-pure-xs.wasm"), + {ok, WASM2, _Imports2, _Exports2} = hb_beamr:start(File2), + ?assertEqual({ok, WASMPageSize * File2Pages}, hb_beamr_io:size(WASM2)), + hb_beamr:stop(WASM2). +``` + +### write_test + +Test writing memory in and out of bounds. + +```erlang +write_test() -> + % Load the `test-print' WASM module, which has a simple print function. +``` + +### read_test + +Test reading memory in and out of bounds. + +```erlang +read_test() -> + % Our `test-print' module is hand-written in WASM, so we know that it + % has a `Hello, World!` string at precisely offset 66. +``` + +### malloc_test + +Test allocating and freeing memory. + +```erlang +malloc_test() -> + {ok, File} = file:read_file("test/test-calling.wasm"), + {ok, WASM, _Imports, _Exports} = hb_beamr:start(File), + % Check that we can allocate memory inside the bounds of the WASM module. +``` + +### string_write_and_read_test + +Write and read strings to memory. + +```erlang +string_write_and_read_test() -> + {ok, File} = file:read_file("test/test-calling.wasm"), + {ok, WASM, _Imports, _Exports} = hb_beamr:start(File), + {ok, Ptr} = write_string(WASM, <<"Hello, World!">>), + ?assertEqual({ok, <<"Hello, World!">>}, read_string(WASM, Ptr)). +``` + +--- + +*Generated from [hb_beamr_io.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_beamr_io.erl)* diff --git a/docs/book/src/hb_cache.erl.md b/docs/book/src/hb_cache.erl.md new file mode 100644 index 000000000..356becb5d --- /dev/null +++ b/docs/book/src/hb_cache.erl.md @@ -0,0 +1,967 @@ +# hb_cache + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache.erl) + +A cache of AO-Core protocol messages and compute results. +HyperBEAM stores all paths in key value stores, abstracted by the `hb_store` +module. Each store has its own storage backend, but each works with simple +key-value pairs. Each store can write binary keys at paths, and link between +paths. +There are three layers to HyperBEAMs internal data representation on-disk: +1. The raw binary data, written to the store at the hash of the content. + Storing binary paths in this way effectively deduplicates the data. +2. The hashpath-graph of all content, stored as a set of links between + hashpaths, their keys, and the data that underlies them. This allows + all messages to share the same hashpath space, such that all requests + from users additively fill-in the hashpath space, minimizing duplicated + compute. +3. Messages, referrable by their IDs (committed or uncommitted). These are + stored as a set of links commitment IDs and the uncommitted message. +Before writing a message to the store, we convert it to Type-Annotated +Binary Messages (TABMs), such that each of the keys in the message is +either a map or a direct binary. +Nested keys are lazily loaded from the stores, such that large deeply +nested messages where only a small part of the data is actually used are +not loaded into memory unnecessarily. In order to ensure that a message is +loaded from the cache after a `read`, we can use the `ensure_loaded/1` and +`ensure_all_loaded/1` functions. Ensure loaded will load the exact value +that has been requested, while ensure all loaded will load the entire +structure of the message into memory. +Lazily loadable `links` are expressed as a tuple of the following form: +`{link, ID, LinkOpts}`, where `ID` is the path to the data in the store, +and `LinkOpts` is a map of suggested options to use when loading the data. +In particular, this module ensures to stash the `store` option in `LinkOpts`, +such that the `read` function can use the correct store without having to +search unnecessarily. By providing an `Opts` argument to `ensure_loaded` or +`ensure_all_loaded`, the caller can specify additional options to use when +loading the data -- overriding the suggested options in the link. + +--- + +## Exported Functions + +- `ensure_all_loaded/1` +- `ensure_all_loaded/2` +- `ensure_loaded/1` +- `ensure_loaded/2` +- `link/3` +- `list_numbered/2` +- `list/2` +- `match/2` +- `read_resolved/3` +- `read/2` +- `test_signed/1` +- `test_unsigned/1` +- `write_binary/3` +- `write_hashpath/2` +- `write/2` + +--- + +### ensure_loaded + +A cache of AO-Core protocol messages and compute results. +Ensure that a value is loaded from the cache if it is an ID or a link. + +```erlang +ensure_loaded(Msg) -> + ensure_loaded(Msg, #{}). +``` + +### ensure_loaded + +```erlang +ensure_loaded(Msg, Opts) -> + ensure_loaded([], Msg, Opts). +``` + +### ensure_loaded + +```erlang +ensure_loaded(Ref, {Status, Msg}, Opts) when Status == ok; Status == error -> + {Status, ensure_loaded(Ref, Msg, Opts)}; +``` + +### ensure_loaded + +```erlang +ensure_loaded(Ref, + Lk = {link, ID, LkOpts = #{ <<"type">> := <<"link">>, <<"lazy">> := Lazy }}, + RawOpts) -> + % The link is to a submessage; either in lazy (unresolved) form, or direct + % form. +``` + +### ensure_loaded + +```erlang +ensure_loaded(Ref, Link = {link, ID, LinkOpts = #{ <<"lazy">> := true }}, RawOpts) -> + % If the user provided their own options, we merge them and _overwrite_ + % the options that are already set in the link. +``` + +### ensure_loaded + +```erlang +ensure_loaded(Ref, {link, ID, LinkOpts}, Opts) -> + ensure_loaded(Ref, {link, ID, LinkOpts#{ <<"lazy">> => true}}, Opts); +``` + +### ensure_loaded + +```erlang +ensure_loaded(_Ref, Msg, _Opts) when not ?IS_LINK(Msg) -> + Msg. +``` + +### report_ensure_loaded_not_found + +Report that a value was not found in the cache. If a key is provided, + +```erlang +report_ensure_loaded_not_found(Ref, Lk, Opts) -> + ?event(link_error, {link_not_resolvable, {ref, Ref}, {link, Lk}, {opts, Opts}}), + throw( + {necessary_message_not_found, + hb_path:to_binary(lists:reverse(Ref)), + hb_link:format_unresolved(Lk, Opts, 0) + } + ). +``` + +### ensure_all_loaded + +Ensure that all of the components of a message (whether a map, list, + +```erlang +ensure_all_loaded(Msg) -> + ensure_all_loaded(Msg, #{}). +``` + +### ensure_all_loaded + +```erlang +ensure_all_loaded(Msg, Opts) -> + ensure_all_loaded([], Msg, Opts). +``` + +### ensure_all_loaded + +```erlang +ensure_all_loaded(Ref, Link, Opts) when ?IS_LINK(Link) -> + ensure_all_loaded(Ref, ensure_loaded(Ref, Link, Opts), Opts); +``` + +### ensure_all_loaded + +```erlang +ensure_all_loaded(Ref, Msg, Opts) when is_map(Msg) -> + maps:map(fun(K, V) -> ensure_all_loaded([K|Ref], V, Opts) end, Msg); +``` + +### ensure_all_loaded + +```erlang +ensure_all_loaded(Ref, Msg, Opts) when is_list(Msg) -> + lists:map( + fun({N, V}) -> ensure_all_loaded([N|Ref], V, Opts) end, + hb_util:number(Msg) + ); +``` + +### ensure_all_loaded + +```erlang +ensure_all_loaded(Ref, Msg, Opts) -> + ensure_loaded(Ref, Msg, Opts). +``` + +### list_numbered + +List all items in a directory, assuming they are numbered. + +```erlang +list_numbered(Path, Opts) -> + SlotDir = hb_store:path(hb_opts:get(store, no_viable_store, Opts), Path), + [ hb_util:int(Name) || Name <- list(SlotDir, Opts) ]. +``` + +### list + +List all items under a given path. + +```erlang +list(Path, Opts) when is_map(Opts) and not is_map_key(<<"store-module">>, Opts) -> + case hb_opts:get(store, no_viable_store, Opts) of + not_found -> []; + Store -> + list(Path, Store) + end; +``` + +### list + +List all items under a given path. + +```erlang +list(Path, Store) -> + ResolvedPath = hb_store:resolve(Store, Path), + case hb_store:list(Store, ResolvedPath) of + {ok, Names} -> Names; + {error, _} -> []; + not_found -> [] + end. +``` + +### match + +Match a template message against the cache, returning a list of IDs + +```erlang +match(MatchSpec, Opts) -> + Spec = hb_message:convert(MatchSpec, tabm, <<"structured@1.0">>, Opts), + ConvertedMatchSpec = + maps:map( + fun(_, Value) -> + generate_binary_path(Value, Opts) + end, + maps:without([<<"ao-types">>], hb_ao:normalize_keys(Spec, Opts)) + ), + case hb_store:match(hb_opts:get(store, no_viable_store, Opts), ConvertedMatchSpec) of + {ok, Matches} -> {ok, Matches}; + _ -> not_found + end. +``` + +### generate_binary_path + +Generate the path at which a binary value should be stored. +Write a message to the cache. For raw binaries, we write the data at + +```erlang +generate_binary_path(Bin, Opts) -> + Hashpath = hb_path:hashpath(Bin, Opts), + <<"data/", Hashpath/binary>>. +``` + +### write + +Generate the path at which a binary value should be stored. +Write a message to the cache. For raw binaries, we write the data at + +```erlang +write(RawMsg, Opts) when is_map(RawMsg) -> + {ok, Msg} = hb_message:with_only_committed(RawMsg, Opts), + TABM = hb_message:convert(Msg, tabm, <<"structured@1.0">>, Opts), + ?event(debug_cache, {writing_full_message, {msg, TABM}}), + try + do_write_message( + TABM, + hb_opts:get(store, no_viable_store, Opts), + Opts + ) + catch + Type:Reason:Stacktrace -> + ?event(error, + {cache_write_error, + {type, Type}, + {reason, Reason}, + {stacktrace, {trace, Stacktrace}} + }, + Opts + ), + erlang:raise(Type, Reason, Stacktrace) + end; +``` + +### write + +Generate the path at which a binary value should be stored. +Write a message to the cache. For raw binaries, we write the data at + +```erlang +write(List, Opts) when is_list(List) -> + write(hb_message:convert(List, tabm, <<"structured@1.0">>, Opts), Opts); +``` + +### write + +Generate the path at which a binary value should be stored. +Write a message to the cache. For raw binaries, we write the data at + +```erlang +write(Bin, Opts) when is_binary(Bin) -> + do_write_message(Bin, hb_opts:get(store, no_viable_store, Opts), Opts). +``` + +### do_write_message + +```erlang +do_write_message(Bin, Store, Opts) when is_binary(Bin) -> + % Write the binary in the store at its calculated content-hash. +``` + +### do_write_message + +```erlang +do_write_message(List, Store, Opts) when is_list(List) -> + do_write_message( + hb_message:convert(List, tabm, <<"structured@1.0">>, Opts), + Store, + Opts + ); +``` + +### do_write_message + +```erlang +do_write_message(Msg, Store, Opts) when is_map(Msg) -> + ?event(debug_cache, {writing_message, Msg}), + % Calculate the IDs of the message. +``` + +### write_key + +Write a single key for a message into the store. + +```erlang +write_key(Base, <<"commitments">>, _HPAlg, RawCommitments, Store, Opts) -> + % The commitments are a special case: We calculate the single-part hashpath + % for the `baseID/commitments` key, then write each commitment to the store + % and link it to `baseCommHP/commitmentID`. +``` + +### write_key + +```erlang +write_key(Base, Key, HPAlg, Value, Store, Opts) -> + KeyHashPath = + hb_path:hashpath( + Base, + hb_path:to_binary(Key), + HPAlg, + Opts + ), + {ok, Path} = do_write_message(Value, Store, Opts), + hb_store:make_link(Store, Path, KeyHashPath), + {ok, Path}. +``` + +### prepare_commitments + +The `structured@1.0` encoder does not typically encode `commitments`, + +```erlang +prepare_commitments(RawCommitments, Opts) -> + Commitments = ensure_all_loaded(RawCommitments, Opts), + maps:map( + fun(_, StructuredCommitment) -> + hb_message:convert(StructuredCommitment, tabm, Opts) + end, + Commitments + ). +``` + +### commitment_path + +Generate the commitment path for a given base path. +Calculate the IDs for a message. + +```erlang +commitment_path(Base, Opts) -> + hb_path:hashpath(<>, Opts). +``` + +### calculate_all_ids + +Generate the commitment path for a given base path. +Calculate the IDs for a message. + +```erlang +calculate_all_ids(Bin, _Opts) when is_binary(Bin) -> []; +``` + +### calculate_all_ids + +Generate the commitment path for a given base path. +Calculate the IDs for a message. + +```erlang +calculate_all_ids(Msg, Opts) -> + Commitments = + hb_maps:without( + [<<"priv">>], + hb_maps:get(<<"commitments">>, Msg, #{}, Opts), + Opts + ), + CommIDs = hb_maps:keys(Commitments, Opts), + ?event({calculating_ids, {msg, Msg}, {commitments, Commitments}, {comm_ids, CommIDs}}), + All = hb_message:id(Msg, all, Opts#{ linkify_mode => discard }), + case lists:member(All, CommIDs) of + true -> CommIDs; + false -> [All | CommIDs] + end. +``` + +### write_hashpath + +Write a hashpath and its message to the store and link it. + +```erlang +write_hashpath(Msg = #{ <<"priv">> := #{ <<"hashpath">> := HP } }, Opts) -> + write_hashpath(HP, Msg, Opts); +``` + +### write_hashpath + +Write a hashpath and its message to the store and link it. + +```erlang +write_hashpath(MsgWithoutHP, Opts) -> + write(MsgWithoutHP, Opts). +``` + +### write_hashpath + +```erlang +write_hashpath(HP, Msg, Opts) when is_binary(HP) or is_list(HP) -> + Store = hb_opts:get(store, no_viable_store, Opts), + ?event({writing_hashpath, {hashpath, HP}, {msg, Msg}, {store, Store}}), + {ok, Path} = write(Msg, Opts), + hb_store:make_link(Store, Path, HP), + {ok, Path}. +``` + +### write_binary + +Write a raw binary keys into the store and link it at a given hashpath. + +```erlang +write_binary(Hashpath, Bin, Opts) -> + write_binary(Hashpath, Bin, hb_opts:get(store, no_viable_store, Opts), Opts). +``` + +### write_binary + +```erlang +write_binary(Hashpath, Bin, Store, Opts) -> + ?event({writing_binary, {hashpath, Hashpath}, {bin, Bin}, {store, Store}}), + {ok, Path} = do_write_message(Bin, Store, Opts), + hb_store:make_link(Store, Path, Hashpath), + {ok, Path}. +``` + +### read + +Read the message at a path. Returns in `structured@1.0` format: Either a + +```erlang +read(Path, Opts) -> + case store_read(Path, hb_opts:get(store, no_viable_store, Opts), Opts) of + not_found -> not_found; + {ok, Res} -> + %?event({applying_types_to_read_message, Res}), + %Structured = dev_codec_structured:to(Res), + %?event({finished_read, Structured}), + {ok, Res} + end. +``` + +### store_read + +List all of the subpaths of a given path and return a map of keys and + +```erlang +store_read(_Path, no_viable_store, _) -> + not_found; +``` + +### store_read + +List all of the subpaths of a given path and return a map of keys and + +```erlang +store_read(Path, Store, Opts) -> + ResolvedFullPath = hb_store:resolve(Store, PathBin = hb_path:to_binary(Path)), + ?event({read_resolved, + {original_path, {string, PathBin}}, + {resolved_path, ResolvedFullPath}, + {store, Store} + }), + case hb_store:type(Store, ResolvedFullPath) of + not_found -> not_found; + simple -> + ?event({reading_data, ResolvedFullPath}), + case hb_store:read(Store, ResolvedFullPath) of + {ok, Bin} -> {ok, Bin}; + not_found -> not_found + end; + composite -> + ?event({reading_composite, ResolvedFullPath}), + case hb_store:list(Store, ResolvedFullPath) of + {ok, RawSubpaths} -> + Subpaths = + lists:map(fun hb_util:bin/1, RawSubpaths), + ?event( + {listed, + {original_path, Path}, + {subpaths, {explicit, Subpaths}} + } + ), + % Generate links for all subpaths except `commitments' and + % `ao-types'. `commitments' is always read in its entirety, + % such that all messages have their IDs and signatures + % locally available. +``` + +### prepare_links + +Prepare a set of links from a listing of subpaths. + +```erlang +prepare_links(RootPath, Subpaths, Store, Opts) -> + {ok, Implicit, Types} = read_ao_types(RootPath, Subpaths, Store, Opts), + Res = + maps:from_list(lists:filtermap( + fun(<<"ao-types">>) -> false; + (<<"commitments">>) -> + % List the commitments for this message, and load them into + % memory. If there no commitments at the path, we exclude + % commitments from the list of links. +``` + +### read_ao_types + +Read and parse the ao-types for a given path if it is in the supplied + +```erlang +read_ao_types(Path, Subpaths, Store, Opts) -> + ?event({reading_ao_types, {path, Path}, {subpaths, {explicit, Subpaths}}}), + case lists:member(<<"ao-types">>, Subpaths) of + true -> + {ok, TypesBin} = + hb_store:read( + Store, + hb_store:path(Store, [Path, <<"ao-types">>]) + ), + Types = dev_codec_structured:decode_ao_types(TypesBin, Opts), + ?event({parsed_ao_types, {types, Types}}), + {ok, types_to_implicit(Types), Types}; + false -> + ?event({no_ao_types_key_found, {path, Path}, {subpaths, Subpaths}}), + {ok, #{}, #{}} + end. +``` + +### types_to_implicit + +Convert a map of ao-types to an implicit map of types. + +```erlang +types_to_implicit(Types) -> + maps:filtermap( + fun(_K, <<"empty-message">>) -> {true, #{}}; + (_K, <<"empty-list">>) -> {true, []}; + (_K, <<"empty-binary">>) -> {true, <<>>}; + (_, _) -> false + end, + Types + ). +``` + +### read_resolved + +Read the output of a prior computation, given Msg1, Msg2, and some + +```erlang +read_resolved(MsgID1, MsgID2, Opts) when ?IS_ID(MsgID1) and ?IS_ID(MsgID2) -> + ?event({cache_lookup, {msg1, MsgID1}, {msg2, MsgID2}, {opts, Opts}}), + read(<>, Opts); +``` + +### read_resolved + +Read the output of a prior computation, given Msg1, Msg2, and some + +```erlang +read_resolved(MsgID1, Msg2, Opts) when ?IS_ID(MsgID1) and is_map(Msg2) -> + {ok, MsgID2} = dev_message:id(Msg2, #{ <<"committers">> => <<"all">> }, Opts), + read(<>, Opts); +``` + +### read_resolved + +Read the output of a prior computation, given Msg1, Msg2, and some + +```erlang +read_resolved(Msg1, Msg2, Opts) when is_map(Msg1) and is_map(Msg2) -> + read(hb_path:hashpath(Msg1, Msg2, Opts), Opts); +``` + +### read_resolved + +Read the output of a prior computation, given Msg1, Msg2, and some +Make a link from one path to another in the store. + +```erlang +read_resolved(_, _, _) -> not_found. +``` + +### link + +Read the output of a prior computation, given Msg1, Msg2, and some +Make a link from one path to another in the store. + +```erlang +link(Existing, New, Opts) -> + hb_store:make_link( + hb_opts:get(store, no_viable_store, Opts), + Existing, + New + ). +``` + +### test_unsigned + +```erlang +test_unsigned(Data) -> + #{ + <<"base-test-key">> => <<"base-test-value">>, + <<"other-test-key">> => Data + }. +``` + +### test_signed + +```erlang +test_signed(Data) -> test_signed(Data, ar_wallet:new()). +``` + +### test_signed + +```erlang +test_signed(Data, Wallet) -> + hb_message:commit(test_unsigned(Data), Wallet). +``` + +### test_store_binary + +```erlang +test_store_binary(Store) -> + Bin = <<"Simple unsigned data item">>, + ?event(debug_store_test, {store, Store}), + Opts = #{ store => Store }, + {ok, ID} = write(Bin, Opts), + {ok, RetrievedBin} = read(ID, Opts), + ?assertEqual(Bin, RetrievedBin). +``` + +### test_store_unsigned_empty_message + +```erlang +test_store_unsigned_empty_message(Store) -> + ?event(debug_store_test, {store, Store}), + hb_store:reset(Store), + Item = #{}, + Opts = #{ store => Store }, + {ok, Path} = write(Item, Opts), + {ok, RetrievedItem} = read(Path, Opts), + ?event( + {retrieved_item, + {path, {string, Path}}, + {expected, Item}, + {got, RetrievedItem} + } + ), + MatchRes = hb_message:match(Item, RetrievedItem, strict, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes). +``` + +### test_store_unsigned_nested_empty_message + +```erlang +test_store_unsigned_nested_empty_message(Store) -> + ?event(debug_store_test, {store, Store}), + hb_store:reset(Store), + Item = + #{ <<"layer1">> => + #{ <<"layer2">> => + #{ <<"layer3">> => + #{ <<"a">> => <<"b">>} + }, + <<"layer3b">> => #{ <<"c">> => <<"d">>}, + <<"layer3c">> => #{} + } + }, + Opts = #{ store => Store }, + {ok, Path} = write(Item, Opts), + {ok, RetrievedItem} = read(Path, Opts), + ?assert(hb_message:match(Item, RetrievedItem, strict, Opts)). +``` + +### test_store_simple_unsigned_message + +Test storing and retrieving a simple unsigned item + +```erlang +test_store_simple_unsigned_message(Store) -> + Item = test_unsigned(<<"Simple unsigned data item">>), + ?event(debug_store_test, {store, Store}), + Opts = #{ store => Store }, + %% Write the simple unsigned item + {ok, _Path} = write(Item, Opts), + %% Read the item back + ID = hb_util:human_id(hb_ao:get(id, Item)), + {ok, RetrievedItem} = read(ID, Opts), + ?assert(hb_message:match(Item, RetrievedItem, strict, Opts)), + ok. +``` + +### test_store_ans104_message + +```erlang +test_store_ans104_message(Store) -> + ?event(debug_store_test, {store, Store}), + hb_store:reset(Store), + Opts = #{ store => Store }, + Item = #{ <<"type">> => <<"ANS104">>, <<"content">> => <<"Hello, world!">> }, + Committed = hb_message:commit(Item, hb:wallet()), + {ok, _Path} = write(Committed, Opts), + CommittedID = hb_util:human_id(hb_message:id(Committed, all)), + UncommittedID = hb_util:human_id(hb_message:id(Committed, none)), + ?event({test_message_ids, {uncommitted, UncommittedID}, {committed, CommittedID}}), + {ok, RetrievedItem} = read(CommittedID, Opts), + {ok, RetrievedItemU} = read(UncommittedID, Opts), + ?assert(hb_message:match(Committed, RetrievedItem, strict, Opts)), + ?assert(hb_message:match(Committed, RetrievedItemU, strict, Opts)), + ok. +``` + +### test_store_simple_signed_message + +Test storing and retrieving a simple unsigned item + +```erlang +test_store_simple_signed_message(Store) -> + ?event(debug_store_test, {store, Store}), + Opts = #{ store => Store }, + hb_store:reset(Store), + Wallet = ar_wallet:new(), + Address = hb_util:human_id(ar_wallet:to_address(Wallet)), + Item = test_signed(<<"Simple signed data item">>, Wallet), + ?event({writing_test_message, Item}), + %% Write the simple unsigned item + {ok, _Path} = write(Item, Opts), + % %% Read the item back + % {ok, UID} = dev_message:id(Item, #{ <<"committers">> => <<"none">> }, Opts), + % {ok, RetrievedItemUnsig} = read(UID, Opts), + % ?event({retreived_unsigned_message, {expected, Item}, {got, RetrievedItemUnsig}}), + % MatchRes = hb_message:match(Item, RetrievedItemUnsig, strict, Opts), + % ?event({match_result, MatchRes}), + % ?assert(MatchRes), + {ok, CommittedID} = dev_message:id(Item, #{ <<"committers">> => [Address] }, Opts), + {ok, RetrievedItemSigned} = read(CommittedID, Opts), + ?event({retreived_signed_message, {expected, Item}, {got, RetrievedItemSigned}}), + MatchResSigned = hb_message:match(Item, RetrievedItemSigned, strict, Opts), + ?event({match_result_signed, MatchResSigned}), + ?assert(MatchResSigned), + ok. +``` + +### test_deeply_nested_complex_message + +Test deeply nested item storage and retrieval + +```erlang +test_deeply_nested_complex_message(Store) -> + ?event(debug_store_test, {store, Store}), + hb_store:reset(Store), + Wallet = ar_wallet:new(), + Opts = #{ store => Store, priv_wallet => Wallet }, + %% Create nested data + Level3SignedSubmessage = test_signed([1,2,3], Opts#{priv_wallet => Wallet}), + Outer = + hb_message:commit( + #{ + <<"level1">> => + InnerSigned = hb_message:commit( + #{ + <<"level2">> => + #{ + <<"level3">> => Level3SignedSubmessage, + <<"e">> => <<"f">>, + <<"z">> => [1,2,3] + }, + <<"c">> => <<"d">>, + <<"g">> => [<<"h">>, <<"i">>], + <<"j">> => 1337 + }, + Opts + ), + <<"a">> => <<"b">> + }, + Opts + ), + UID = hb_message:id(Outer, none, Opts), + ?event({string, <<"================================================">>}), + CommittedID = hb_message:id(Outer, signed, Opts), + ?event({string, <<"================================================">>}), + ?event({test_message_ids, {uncommitted, UID}, {committed, CommittedID}}), + %% Write the nested item + {ok, _} = write(Outer, Opts), + %% Read the deep value back using subpath + OuterID = hb_util:human_id(UID), + {ok, OuterMsg} = read(OuterID, Opts), + EnsuredLoadedOuter = hb_cache:ensure_all_loaded(OuterMsg, Opts), + ?event({deep_message, {explicit, EnsuredLoadedOuter}}), + %% Assert that the retrieved item matches the original deep value + ?assertEqual( + [1,2,3], + hb_ao:get( + <<"level1/level2/level3/other-test-key">>, + EnsuredLoadedOuter, + Opts + ) + ), + ?event( + {deep_message_match, + {read, EnsuredLoadedOuter}, + {write, Level3SignedSubmessage} + } + ), + ?event({reading_committed_outer, {id, CommittedID}, {expect, Outer}}), + {ok, CommittedMsg} = read(hb_util:human_id(CommittedID), Opts), + EnsuredLoadedCommitted = hb_cache:ensure_all_loaded(CommittedMsg, Opts), + ?assertEqual( + [1,2,3], + hb_ao:get( + <<"level1/level2/level3/other-test-key">>, + EnsuredLoadedCommitted, + Opts + ) + ). +``` + +### test_message_with_list + +```erlang +test_message_with_list(Store) -> + hb_store:reset(Store), + Opts = #{ store => Store }, + Msg = test_unsigned([<<"a">>, <<"b">>, <<"c">>]), + ?event({writing_message, Msg}), + {ok, Path} = write(Msg, Opts), + {ok, RetrievedItem} = read(Path, Opts), + ?assert(hb_message:match(Msg, RetrievedItem, strict, Opts)). +``` + +### test_match_message + +```erlang +test_match_message(Store) when map_get(<<"store-module">>, Store) =/= hb_store_lmdb -> + skip; +``` + +### test_match_message + +```erlang +test_match_message(Store) -> + hb_store:reset(Store), + Opts = #{ store => Store }, + % Write two messages that match the template, and a third that does not. +``` + +### test_match_linked_message + +```erlang +test_match_linked_message(Store) when map_get(<<"store-module">>, Store) =/= hb_store_lmdb -> + skip; +``` + +### test_match_linked_message + +```erlang +test_match_linked_message(Store) -> + hb_store:reset(Store), + Opts = #{ store => Store }, + Msg = #{ <<"a">> => Inner = #{ <<"b">> => <<"c">>, <<"d">> => <<"e">> } }, + {ok, _ID} = write(Msg, Opts), + {ok, [MatchedID]} = match(#{ <<"b">> => <<"c">> }, Opts), + {ok, Read1} = read(MatchedID, Opts), + ?assertEqual( + #{ <<"b">> => <<"c">>, <<"d">> => <<"e">> }, + hb_cache:ensure_all_loaded(Read1, Opts) + ), + {ok, [MatchedID2]} = match(#{ <<"a">> => Inner }, Opts), + {ok, Read2} = read(MatchedID2, Opts), + ?assertEqual(#{ <<"a">> => Inner }, ensure_all_loaded(Read2, Opts)). +``` + +### test_match_typed_message + +```erlang +test_match_typed_message(Store) when map_get(<<"store-module">>, Store) =/= hb_store_lmdb -> + skip; +``` + +### test_match_typed_message + +```erlang +test_match_typed_message(Store) -> + hb_store:reset(Store), + Opts = #{ store => Store }, + % Add some messages that should not match the template, as well as the main + % message that should match the template. +``` + +### cache_suite_test_ + +```erlang +cache_suite_test_() -> + hb_store:generate_test_suite([ + {"store unsigned empty message", + fun test_store_unsigned_empty_message/1}, + {"store binary", fun test_store_binary/1}, + {"store unsigned nested empty message", + fun test_store_unsigned_nested_empty_message/1}, + {"store simple unsigned message", fun test_store_simple_unsigned_message/1}, + {"store simple signed message", fun test_store_simple_signed_message/1}, + {"deeply nested complex message", fun test_deeply_nested_complex_message/1}, + {"message with list", fun test_message_with_list/1}, + {"match message", fun test_match_message/1}, + {"match linked message", fun test_match_linked_message/1}, + {"match typed message", fun test_match_typed_message/1} + ]). +``` + +### test_device_map_cannot_be_written_test + +Test that message whose device is `#{}` cannot be written. If it were to + +```erlang +test_device_map_cannot_be_written_test() -> + try + Opts = #{ store => StoreOpts = + [#{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-TEST">> }] }, + hb_store:reset(StoreOpts), + Danger = #{ <<"device">> => #{}}, + write(Danger, Opts), + ?assert(false) + catch + _:_:_ -> ?assert(true) + end. +``` + +### run_test + +Run a specific test with a given store module. + +```erlang +run_test() -> + Store = hb_test_utils:test_store(hb_store_lmdb), +``` + +--- + +*Generated from [hb_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache.erl)* diff --git a/docs/book/src/hb_cache_control.erl.md b/docs/book/src/hb_cache_control.erl.md new file mode 100644 index 000000000..1a3146606 --- /dev/null +++ b/docs/book/src/hb_cache_control.erl.md @@ -0,0 +1,554 @@ +# hb_cache_control + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache_control.erl) + +Cache control logic for the AO-Core resolver. It derives cache settings +from request, response, execution-local node Opts, as well as the global +node Opts. It applies these settings when asked to maybe store/lookup in +response to a request. + +--- + +## Exported Functions + +- `maybe_lookup/3` +- `maybe_store/4` + +--- + +### maybe_store + +Cache control logic for the AO-Core resolver. It derives cache settings +Write a resulting M3 message to the cache if requested. The precedence + +```erlang +maybe_store(Msg1, Msg2, Msg3, Opts) -> + case derive_cache_settings([Msg3, Msg2], Opts) of + #{ <<"store">> := true } -> + ?event(caching, {caching_result, {msg1, Msg1}, {msg2, Msg2}, {msg3, Msg3}}), + dispatch_cache_write(Msg1, Msg2, Msg3, Opts); + _ -> + not_caching + end. +``` + +### maybe_lookup + +Handles cache lookup, modulated by the caching options requested by + +```erlang +maybe_lookup(Msg1, Msg2, Opts) -> + case exec_likely_faster_heuristic(Msg1, Msg2, Opts) of + true -> + ?event(caching, {skip_cache_check, exec_likely_faster_heuristic}), + {continue, Msg1, Msg2}; + false -> lookup(Msg1, Msg2, Opts) + end. +``` + +### lookup + +```erlang +lookup(Msg1, Msg2, Opts) -> + case derive_cache_settings([Msg1, Msg2], Opts) of + #{ <<"lookup">> := false } -> + ?event({skip_cache_check, lookup_disabled}), + {continue, Msg1, Msg2}; + Settings = #{ <<"lookup">> := true } -> + OutputScopedOpts = + hb_store:scope( + Opts, + hb_opts:get(store_scope_resolved, local, Opts) + ), + case hb_cache:read_resolved(Msg1, Msg2, OutputScopedOpts) of + {ok, Msg3} -> + ?event(caching, + {cache_hit, + case is_binary(Msg3) of + true -> hb_path:hashpath(Msg1, Msg2, Opts); + false -> hb_path:hashpath(Msg3, Opts) + end, + {msg1, Msg1}, + {msg2, Msg2}, + {msg3, Msg3} + } + ), + {ok, Msg3}; + not_found -> + ?event(caching, {result_cache_miss, Msg1, Msg2}), + case Settings of + #{ <<"only-if-cached">> := true } -> + only_if_cached_not_found_error(Msg1, Msg2, Opts); + _ -> + case ?IS_ID(Msg1) of + false -> {continue, Msg1, Msg2}; + true -> + case hb_cache:read(Msg1, Opts) of + {ok, FullMsg1} -> + ?event(load_message, + {cache_hit_base_message_load, + {base_id, Msg1}, + {base_loaded, FullMsg1} + } + ), + {continue, FullMsg1, Msg2}; + not_found -> + necessary_messages_not_found_error( + Msg1, + Msg2, + Opts + ) + end + end + end + end + end. +``` + +### dispatch_cache_write + +Dispatch the cache write to a worker process if requested. + +```erlang +dispatch_cache_write(Msg1, Msg2, Msg3, Opts) -> + case hb_opts:get(async_cache, false, Opts) of + true -> + find_or_spawn_async_writer(Opts) ! {write, Msg1, Msg2, Msg3, Opts}, + ok; + false -> + perform_cache_write(Msg1, Msg2, Msg3, Opts) + end. +``` + +### find_or_spawn_async_writer + +Find our async cacher process, or spawn one if none exists. + +```erlang +find_or_spawn_async_writer(_Opts) -> + case erlang:get({hb_cache_control, async_writer}) of + undefined -> + PID = spawn(fun() -> async_writer() end), + erlang:put({hb_cache_control, async_writer}, PID), + PID; + PID -> + PID + end. +``` + +### async_writer + +Optional worker process to write messages to the cache. + +```erlang +async_writer() -> + receive + {write, Msg1, Msg2, Msg3, Opts} -> + perform_cache_write(Msg1, Msg2, Msg3, Opts); + stop -> ok + end. +``` + +### perform_cache_write + +Internal function to write a compute result to the cache. + +```erlang +perform_cache_write(Msg1, Msg2, Msg3, Opts) -> + hb_cache:write(Msg1, Opts), + hb_cache:write(Msg2, Opts), + case Msg3 of + <<_/binary>> -> + hb_cache:write_binary( + hb_path:hashpath(Msg1, Msg2, Opts), + Msg3, + Opts + ); + Map when is_map(Map) -> + hb_cache:write(Msg3, Opts); + _ -> + ?event({cannot_write_result, Msg3}), + skip_caching + end. +``` + +### only_if_cached_not_found_error + +Generate a message to return when `only_if_cached` was specified, and + +```erlang +only_if_cached_not_found_error(Msg1, Msg2, Opts) -> + ?event( + caching, + {only_if_cached_execution_failed, {msg1, Msg1}, {msg2, Msg2}}, + Opts + ), + {error, + #{ + <<"status">> => 504, + <<"cache-status">> => <<"miss">>, + <<"body">> => + <<"Computed result not available in cache.">> + } + }. +``` + +### necessary_messages_not_found_error + +Generate a message to return when the necessary messages to execute a + +```erlang +necessary_messages_not_found_error(Msg1, Msg2, Opts) -> + ?event( + load_message, + {necessary_messages_not_found, {msg1, Msg1}, {msg2, Msg2}}, + Opts + ), + {error, + #{ + <<"status">> => 404, + <<"body">> => + <<"Necessary messages not found in cache.">> + } + }. +``` + +### exec_likely_faster_heuristic + +Determine whether we are likely to be faster looking up the result in + +```erlang +exec_likely_faster_heuristic(M1, _M2, _) when (not ?IS_ID(M1)) -> + true; +``` + +### exec_likely_faster_heuristic + +Determine whether we are likely to be faster looking up the result in + +```erlang +exec_likely_faster_heuristic({as, _, Msg1}, Msg2, Opts) -> + exec_likely_faster_heuristic(Msg1, Msg2, Opts); +``` + +### exec_likely_faster_heuristic + +Determine whether we are likely to be faster looking up the result in + +```erlang +exec_likely_faster_heuristic(Msg1, Msg2, Opts) -> + case hb_opts:get(cache_lookup_hueristics, true, Opts) of + false -> false; + true -> + case ?IS_ID(Msg1) of + true -> false; + false -> is_explicit_lookup(Msg1, Msg2, Opts) + end + end. +``` + +### is_explicit_lookup + +```erlang +is_explicit_lookup(Msg1, #{ <<"path">> := Key }, Opts) -> + % For now, just check whether the key is explicitly in the map. That is + % a good signal that we will likely be asked by the device to grab it. +``` + +### derive_cache_settings + +Derive cache settings from a series of option sources and the opts, + +```erlang +derive_cache_settings(SourceList, Opts) -> + lists:foldr( + fun(Source, Acc) -> + maybe_set(Acc, cache_source_to_cache_settings(Source, Opts), Opts) + end, + #{ <<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => ?DEFAULT_LOOKUP_OPT }, + [{opts, Opts}|lists:filter(fun erlang:is_map/1, SourceList)] + ). +``` + +### maybe_set + +Takes a key and two maps, returning the first map with the key set to + +```erlang +maybe_set(Map1, Map2, Opts) -> + lists:foldl( + fun(Key, AccMap) -> + case hb_maps:get(Key, Map2, undefined, Opts) of + undefined -> AccMap; + Value -> hb_maps:put(Key, Value, AccMap, Opts) + end + end, + Map1, + hb_maps:keys(Map2, Opts) + ). +``` + +### cache_source_to_cache_settings + +Convert a cache source to a cache setting. The setting _must_ always be + +```erlang +cache_source_to_cache_settings({opts, Opts}, _) -> + CCMap = specifiers_to_cache_settings(hb_opts:get(cache_control, [], Opts)), + case hb_opts:get(hashpath, update, Opts) of + ignore -> CCMap#{ <<"store">> => false }; + _ -> CCMap + end; +``` + +### cache_source_to_cache_settings + +Convert a cache source to a cache setting. The setting _must_ always be + +```erlang +cache_source_to_cache_settings(Msg, Opts) -> + case dev_message:get(<<"cache-control">>, Msg, Opts) of + {ok, CC} -> specifiers_to_cache_settings(CC); + {error, not_found} -> #{} + end. +``` + +### specifiers_to_cache_settings + +Convert a cache control list as received via HTTP headers into a + +```erlang +specifiers_to_cache_settings(CCSpecifier) when not is_list(CCSpecifier) -> + specifiers_to_cache_settings([CCSpecifier]); +``` + +### specifiers_to_cache_settings + +Convert a cache control list as received via HTTP headers into a + +```erlang +specifiers_to_cache_settings(RawCCList) -> + CCList = lists:map(fun hb_ao:normalize_key/1, RawCCList), + #{ + <<"store">> => + case lists:member(<<"always">>, CCList) of + true -> true; + false -> + case lists:member(<<"no-store">>, CCList) of + true -> false; + false -> + case lists:member(<<"store">>, CCList) of + true -> true; + false -> undefined + end + end + end, + <<"lookup">> => + case lists:member(<<"always">>, CCList) of + true -> true; + false -> + case lists:member(<<"no-cache">>, CCList) of + true -> false; + false -> + case lists:member(<<"cache">>, CCList) of + true -> true; + false -> undefined + end + end + end, + <<"only-if-cached">> => + case lists:member(<<"only-if-cached">>, CCList) of + true -> true; + false -> undefined + end + }. +``` + +### msg_with_cc + +```erlang +msg_with_cc(CC) -> #{ <<"cache-control">> => CC }. +``` + +### opts_with_cc + +```erlang +opts_with_cc(CC) -> #{ cache_control => CC }. +%% Test precedence order (Opts > Msg3 > Msg2) +``` + +### opts_override_message_settings_test + +```erlang +opts_override_message_settings_test() -> + Msg2 = msg_with_cc([<<"no-store">>]), + Msg3 = msg_with_cc([<<"no-cache">>]), + Opts = opts_with_cc([<<"always">>]), + Result = derive_cache_settings([Msg3, Msg2], Opts), + ?assertEqual(#{<<"store">> => true, <<"lookup">> => true}, Result). +``` + +### msg_precidence_overrides_test + +```erlang +msg_precidence_overrides_test() -> + Msg2 = msg_with_cc([<<"always">>]), + Msg3 = msg_with_cc([<<"no-store">>]), % No restrictions + Result = derive_cache_settings([Msg3, Msg2], opts_with_cc([])), + ?assertEqual(#{<<"store">> => false, <<"lookup">> => true}, Result). +%% Test specific directives +``` + +### no_store_directive_test + +```erlang +no_store_directive_test() -> + Msg = msg_with_cc([<<"no-store">>]), + Result = derive_cache_settings([Msg], opts_with_cc([])), + ?assertEqual(#{<<"store">> => false, <<"lookup">> => ?DEFAULT_LOOKUP_OPT}, Result). +``` + +### no_cache_directive_test + +```erlang +no_cache_directive_test() -> + Msg = msg_with_cc([<<"no-cache">>]), + Result = derive_cache_settings([Msg], opts_with_cc([])), + ?assertEqual(#{<<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => false}, Result). +``` + +### only_if_cached_directive_test + +```erlang +only_if_cached_directive_test() -> + Msg = msg_with_cc([<<"only-if-cached">>]), + Result = derive_cache_settings([Msg], opts_with_cc([])), + ?assertEqual( + #{ + <<"store">> => ?DEFAULT_STORE_OPT, + <<"lookup">> => ?DEFAULT_LOOKUP_OPT, + <<"only-if-cached">> => true + }, + Result + ). +``` + +### hashpath_ignore_prevents_storage_test + +```erlang +hashpath_ignore_prevents_storage_test() -> + Opts = (opts_with_cc([]))#{hashpath => ignore}, + Result = derive_cache_settings([], Opts), + ?assertEqual(#{<<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => ?DEFAULT_LOOKUP_OPT}, Result). +%% Test multiple directives +``` + +### multiple_directives_test + +```erlang +multiple_directives_test() -> + Msg = msg_with_cc([<<"no-store">>, <<"no-cache">>, <<"only-if-cached">>]), + Result = derive_cache_settings([Msg], opts_with_cc([])), + ?assertEqual( + #{ + <<"store">> => false, + <<"lookup">> => false, + <<"only-if-cached">> => true + }, + Result + ). +``` + +### empty_message_list_test + +```erlang +empty_message_list_test() -> + Result = derive_cache_settings([], opts_with_cc([])), + ?assertEqual(#{<<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => ?DEFAULT_LOOKUP_OPT}, Result). +``` + +### message_without_cache_control_test + +```erlang +message_without_cache_control_test() -> + Result = derive_cache_settings([#{}], opts_with_cc([])), + ?assertEqual(#{<<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => ?DEFAULT_LOOKUP_OPT}, Result). +%% Test the cache_source_to_cache_setting function directly +``` + +### opts_source_cache_control_test + +```erlang +opts_source_cache_control_test() -> + Result = + cache_source_to_cache_settings( + {opts, opts_with_cc([<<"no-store">>])}, + #{} + ), + ?assertEqual(#{ + <<"store">> => false, + <<"lookup">> => undefined, + <<"only-if-cached">> => undefined + }, Result). +``` + +### message_source_cache_control_test + +```erlang +message_source_cache_control_test() -> + Msg = msg_with_cc([<<"no-cache">>]), + Result = cache_source_to_cache_settings(Msg, #{}), + ?assertEqual(#{ + <<"store">> => undefined, + <<"lookup">> => false, + <<"only-if-cached">> => undefined + }, Result). +``` + +### cache_binary_result_test + +```erlang +cache_binary_result_test() -> + CachedMsg = <<"test-message">>, + Msg1 = #{ <<"test-key">> => CachedMsg }, + Msg2 = <<"test-key">>, + {ok, Res} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"always">>] }), + ?assertEqual(CachedMsg, Res), + {ok, Res2} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"only-if-cached">>] }), + {ok, Res3} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"only-if-cached">>] }), + ?assertEqual(CachedMsg, Res2), + ?assertEqual(Res2, Res3). +``` + +### cache_message_result_test + +```erlang +cache_message_result_test() -> + CachedMsg = + #{ + <<"purpose">> => <<"Test-Message">>, + <<"aux">> => #{ <<"aux-message">> => <<"Aux-Message-Value">> }, + <<"test-key">> => rand:uniform(1000000) + }, + Msg1 = #{ <<"test-key">> => CachedMsg, <<"local">> => <<"Binary">> }, + Msg2 = <<"test-key">>, + {ok, Res} = + hb_ao:resolve( + Msg1, + Msg2, + #{ + cache_control => [<<"always">>] + } + ), + ?event({res1, Res}), + ?event(reading_from_cache), + {ok, Res2} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"only-if-cached">>] }), + ?event(reading_from_cache_again), + {ok, Res3} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"only-if-cached">>] }), + ?event({res2, Res2}), + ?event({res3, Res3}), +``` + +--- + +*Generated from [hb_cache_control.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache_control.erl)* diff --git a/docs/book/src/hb_cache_render.erl.md b/docs/book/src/hb_cache_render.erl.md new file mode 100644 index 000000000..8cc1bd236 --- /dev/null +++ b/docs/book/src/hb_cache_render.erl.md @@ -0,0 +1,511 @@ +# hb_cache_render + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache_render.erl) + +A module that helps to render given Key graphs into the .dot files + +--- + +## Exported Functions + +- `cache_path_to_dot/2` +- `cache_path_to_dot/3` +- `cache_path_to_graph/3` +- `dot_to_svg/1` +- `get_graph_data/3` +- `prepare_signed_data/0` +- `prepare_unsigned_data/0` +- `render/1` +- `render/2` + +--- + +### render + +A module that helps to render given Key graphs into the .dot files +Render the given Key into svg + +```erlang +render(StoreOrOpts) -> + render(all, StoreOrOpts). +``` + +### render + +```erlang +render(ToRender, StoreOrOpts) -> + % Collect graph elements (nodes and arcs) by traversing the store + % Generate and view the graph visualization + % Write SVG to file and open it + file:write_file("new_render_diagram.svg", + dot_to_svg(cache_path_to_dot(ToRender, StoreOrOpts))), + os:cmd("open new_render_diagram.svg"), + ok. +``` + +### cache_path_to_dot + +Generate a dot file from a cache path and options/store + +```erlang +cache_path_to_dot(ToRender, StoreOrOpts) -> + cache_path_to_dot(ToRender, #{}, StoreOrOpts). +``` + +### cache_path_to_dot + +```erlang +cache_path_to_dot(ToRender, RenderOpts, StoreOrOpts) -> + graph_to_dot(cache_path_to_graph(ToRender, RenderOpts, StoreOrOpts), StoreOrOpts). +``` + +### cache_path_to_graph + +Main function to collect graph elements + +```erlang +cache_path_to_graph(ToRender, GraphOpts, StoreOrOpts) when is_map(StoreOrOpts) -> + Store = hb_opts:get(store, no_viable_store, StoreOrOpts), + ?event({store, Store}), + cache_path_to_graph(ToRender, GraphOpts, Store, StoreOrOpts). +``` + +### cache_path_to_graph + +```erlang +cache_path_to_graph(all, GraphOpts, Store, Opts) -> + Keys = + case hb_store:list(Store, <<"/">>) of + {ok, KeyList} -> KeyList; + not_found -> [] + end, + ?event({all_keys, Keys}), + cache_path_to_graph(Store, GraphOpts, Keys, Opts); +``` + +### cache_path_to_graph + +```erlang +cache_path_to_graph(InitPath, GraphOpts, Store, Opts) when is_binary(InitPath) -> + cache_path_to_graph(Store, GraphOpts, [InitPath], Opts); +``` + +### cache_path_to_graph + +```erlang +cache_path_to_graph(Store, GraphOpts, RootKeys, Opts) -> + % Use a map to track nodes, arcs and visited paths (to avoid cycles) + EmptyGraph = GraphOpts#{ nodes => #{}, arcs => #{}, visited => #{} }, + % Process all root keys and get the final graph + lists:foldl( + fun(Key, Acc) -> traverse_store(Store, Key, undefined, Acc, Opts) end, + EmptyGraph, + RootKeys + ). +``` + +### traverse_store + +Traverse the store recursively to build the graph + +```erlang +traverse_store(Store, Path, Parent, Graph, Opts) -> + % Get the path and check if we've already visited it + JoinedPath = hb_store:join(Path), + ResolvedPath = + case hb_link:is_link_key(JoinedPath) of + true -> + ?event({is_link_key, {path, Path}, {res_path, JoinedPath}}), + {ok, Link} = hb_store:read(Store, hb_store:resolve(Store, JoinedPath)), + ?event({resolved_link, {read, Link}}), + hb_store:resolve(Store, Link); + false -> hb_store:resolve(Store, Path) + end, + ?event({traverse_store, {path, Path}, {joined_path, JoinedPath}, {resolved_path, ResolvedPath}, {parent, Parent}}), + % Skip if we've already processed this node + case hb_maps:get(visited, Graph, #{}, Opts) of + #{ JoinedPath := _ } -> Graph; + _ -> + % Mark as visited to avoid cycles + Graph1 = Graph#{visited => hb_maps:put(JoinedPath, true, hb_maps:get(visited, Graph, #{}, Opts), Opts)}, + % ?event({traverse_store, {key, Key}, {graph1, Graph1}}), + % Process node based on its type + case hb_store:type(Store, ResolvedPath) of + simple -> + process_simple_node(Store, Path, Parent, ResolvedPath, JoinedPath, Graph1, Opts); + composite -> + process_composite_node(Store, Path, Parent, ResolvedPath, JoinedPath, Graph1, Opts); + _ -> + ?event({unknown_node_type, {path, Path}, {type, hb_store:type(Store, Path)}}), + Graph1 + end + end. +``` + +### process_simple_node + +Process a simple (leaf) node + +```erlang +process_simple_node(_Store, _Key, Parent, ResolvedPath, JoinedPath, Graph, Opts) -> + % ?event({process_simple_node, {key, Key}, {resolved_path, ResolvedPath}}), + % Add the node to the graph + case hb_maps:get(render_data, Graph, true, Opts) of + false -> Graph; + true -> + Graph1 = add_node(Graph, ResolvedPath, "lightblue", Opts), + % If we have a parent, add an arc from parent to this node + case Parent of + undefined -> Graph1; + ParentPath -> + Label = extract_label(JoinedPath), + add_arc(Graph1, ParentPath, ResolvedPath, Label, Opts) + end + end. +``` + +### process_composite_node + +Process a composite (directory) node + +```erlang +process_composite_node(_Store, <<"data">>, _Parent, _ResolvedPath, _JoinedPath, Graph, _Opts) -> + % Data is a special case: It contains every binary item in the store. +``` + +### process_composite_node + +```erlang +process_composite_node(Store, _Key, Parent, ResolvedPath, JoinedPath, Graph, Opts) -> + % Add the node to the graph + Graph1 = add_node(Graph, ResolvedPath, "lightcoral", Opts), + % If we have a parent, add an arc from parent to this node + Graph2 = case Parent of + undefined -> Graph1; + ParentPath -> + Label = extract_label(JoinedPath), + add_arc(Graph1, ParentPath, ResolvedPath, Label, Opts) + end, + % Process children recursively + case hb_store:list(Store, ResolvedPath) of + {ok, SubItems} -> + lists:foldl( + fun(SubItem, Acc) -> + ChildKey = [ResolvedPath, SubItem], + traverse_store(Store, ChildKey, ResolvedPath, Acc, Opts) + end, + Graph2, + SubItems + ); + _ -> Graph2 + end. +``` + +### add_node + +Add a node to the graph + +```erlang +add_node(Graph, ID, Color, Opts) -> + Nodes = hb_maps:get(nodes, Graph, #{}, Opts), + Graph#{nodes => hb_maps:put(ID, {ID, Color}, Nodes, Opts)}. +``` + +### add_arc + +Add an arc to the graph + +```erlang +add_arc(Graph, From, To, Label, Opts) -> + ?event({insert_arc, {id1, From}, {id2, To}, {label, Label}}), + Arcs = hb_maps:get(arcs, Graph, #{}, Opts), + Graph#{arcs => hb_maps:put({From, To, Label}, true, Arcs, Opts)}. +``` + +### extract_label + +Extract a label from a path + +```erlang +extract_label(Path) -> + case binary:split(Path, <<"/">>, [global]) of + [] -> Path; + Parts -> + FilteredParts = [P || P <- Parts, P /= <<>>], + case FilteredParts of + [] -> Path; + _ -> lists:last(FilteredParts) + end + end. +``` + +### graph_to_dot + +Generate the DOT file from the graph + +```erlang +graph_to_dot(Graph, Opts) -> + % Create graph header + Header = [ + <<"digraph filesystem {\n">>, + <<" node [shape=circle];\n">> + ], + % Create nodes section + Nodes = hb_maps:fold( + fun(ID, {Label, Color}, Acc) -> + [ + Acc, + io_lib:format( + <<" \"~s\" [label=\"~s\", color=~s, style=filled];~n">>, + [ID, hb_format:short_id(hb_util:bin(Label)), Color] + ) + ] + end, + [], + hb_maps:get(nodes, Graph, #{}, Opts), + Opts + ), + % Create arcs section + Arcs = hb_maps:fold( + fun({From, To, Label}, _, Acc) -> + [ + Acc, + io_lib:format( + <<" \"~s\" -> \"~s\" [label=\"~s\"];~n">>, + [From, To, hb_format:short_id(hb_util:bin(Label))] + ) + ] + end, + [], + hb_maps:get(arcs, Graph, #{}, Opts), + Opts + ), + % Create graph footer + Footer = <<"}\n">>, + % Combine all parts and convert to binary + iolist_to_binary([Header, Nodes, Arcs, Footer]). +``` + +### dot_to_svg + +Convert a dot graph to SVG format + +```erlang +dot_to_svg(DotInput) -> + % Create a port to the dot command + Port = open_port({spawn, "dot -Tsvg"}, [binary, use_stdio, stderr_to_stdout]), + % Send the dot content to the process + true = port_command(Port, iolist_to_binary(DotInput)), + % Get the SVG output + collect_output(Port, []). +``` + +### collect_output + +Helper function to collect output from port + +```erlang +collect_output(Port, Acc) -> + receive + {Port, {data, Data}} -> + case binary:part(Data, byte_size(Data) - 7, 7) of + <<"\n">> -> + port_close(Port), + iolist_to_binary(lists:reverse([Data | Acc])); + _ -> collect_output(Port, [Data | Acc]) + end; + {Port, eof} -> + port_close(Port), + iolist_to_binary(lists:reverse(Acc)) + after 10000 -> + {error, timeout} + end. +``` + +### get_graph_data + +Get graph data for the Three.js visualization + +```erlang +get_graph_data(Base, MaxSize, Opts) -> + % Try to generate graph using hb_cache_render + Graph = + try + % Use hb_cache_render to build the graph + cache_path_to_graph(Base, #{}, Opts) + catch + Error:Reason:Stack -> + ?event({hyperbuddy_graph_error, Error, Reason, Stack}), + #{nodes => #{}, arcs => #{}, visited => #{}} + end, + % Extract nodes and links for the visualization + NodesMap = maps:get(nodes, Graph, #{}), + ArcsMap = maps:get(arcs, Graph, #{}), + % Limit to top `MaxSize` nodes if there are too many + NodesList = + case maps:size(NodesMap) > MaxSize of + true -> + % Take a subset of nodes + {ReducedNodes, _} = lists:split( + MaxSize, + maps:to_list(NodesMap) + ), + ReducedNodes; + false -> + maps:to_list(NodesMap) + end, + % Get node IDs for filtering links + NodeIds = [ID || {ID, _} <- NodesList], + % Convert to JSON format for web visualization + Nodes = + [ + #{ + <<"id">> => ID, + <<"label">> => get_label(hb_util:bin(ID)), + <<"type">> => get_node_type(Color) + } + || + {ID, {_, Color}} <- NodesList + ], + % Filter links to only include those between nodes we're showing + FilteredLinks = + [ + {From, To, Label} + || + {From, To, Label} <- maps:keys(ArcsMap), + lists:member(From, NodeIds) + andalso lists:member(To, NodeIds) + ], + Links = + [ + #{ + <<"source">> => From, + <<"target">> => To, + <<"label">> => Label + } + || + {From, To, Label} <- FilteredLinks + ], + % Return the JSON data + JsonData = hb_json:encode(#{ <<"nodes">> => Nodes, <<"links">> => Links }), + {ok, #{ + <<"body">> => JsonData, + <<"content-type">> => <<"application/json">> + }}. +``` + +### get_node_type + +Convert node color from hb_cache_render to node type for visualization + +```erlang +get_node_type(Color) -> + case Color of + "lightblue" -> <<"simple">>; + "lightcoral" -> <<"composite">>; + _ -> <<"unknown">> + end. +``` + +### get_label + +Extract a readable label from a path + +```erlang +get_label(Path) -> + case binary:split(Path, <<"/">>, [global]) of + [] -> Path; + Parts -> + FilteredParts = [P || P <- Parts, P /= <<>>], + case FilteredParts of + [] -> Path; + _ -> lists:last(FilteredParts) + end + end. +``` + +### prepare_unsigned_data + +```erlang +prepare_unsigned_data() -> + Opts = #{ + store => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/render-fs">> + } + }, + Item = test_unsigned(#{ <<"key">> => <<"Simple unsigned data item">> }), + {ok, _Path} = hb_cache:write(Item, Opts). +``` + +### prepare_signed_data + +```erlang +prepare_signed_data() -> + Opts = #{ + store => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/render-fs">> + } + }, + Wallet = ar_wallet:new(), + Item = test_signed(#{ <<"l2-test-key">> => <<"l2-test-value">> }, Wallet), + %% Write the simple unsigned item + {ok, _Path} = hb_cache:write(Item, Opts). +``` + +### prepare_deeply_nested_complex_message + +```erlang +prepare_deeply_nested_complex_message() -> + Opts = #{ + store => #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/render-fs">> + } + }, + Wallet = ar_wallet:new(), + %% Create nested data + Level3SignedSubmessage = test_signed([1,2,3], Wallet), + Outer = + #{ + <<"level1">> => + hb_message:commit( + #{ + <<"level2">> => + #{ + <<"level3">> => Level3SignedSubmessage, + <<"e">> => <<"f">>, + <<"z">> => [1,2,3] + }, + <<"c">> => <<"d">>, + <<"g">> => [<<"h">>, <<"i">>], + <<"j">> => 1337 + }, + ar_wallet:new() + ), + <<"a">> => <<"b">> + }, + %% Write the nested item + {ok, _} = hb_cache:write(Outer, Opts). +``` + +### test_unsigned + +```erlang +test_unsigned(Data) -> + #{ + <<"base-test-key">> => <<"base-test-value">>, + <<"data">> => Data + }. +``` + +### test_signed + +```erlang +test_signed(Data, Wallet) -> +``` + +--- + +*Generated from [hb_cache_render.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache_render.erl)* diff --git a/docs/book/src/hb_client.erl.md b/docs/book/src/hb_client.erl.md new file mode 100644 index 000000000..0b206fcbe --- /dev/null +++ b/docs/book/src/hb_client.erl.md @@ -0,0 +1,244 @@ +# hb_client + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_client.erl) + +AO-Core API and HyperBEAM Built-In Devices + +--- + +## Exported Functions + +- `add_route/3` +- `arweave_timestamp/0` +- `resolve/4` +- `routes/2` +- `upload/2` +- `upload/3` + +--- + +### resolve + +Resolve a message pair on a remote node. + +```erlang +resolve(Node, Msg1, Msg2, Opts) -> + TABM2 = + hb_ao:set( + #{ + <<"path">> => hb_ao:get(<<"path">>, Msg2, <<"/">>, Opts), + <<"2.path">> => unset + }, + prefix_keys(<<"2.">>, Msg2, Opts), + Opts#{ hashpath => ignore } + ), + hb_http:post( + Node, + hb_maps:merge(prefix_keys(<<"1.">>, Msg1, Opts), TABM2, Opts), + Opts + ). +``` + +### prefix_keys + +```erlang +prefix_keys(Prefix, Message, Opts) -> + hb_maps:fold( + fun(Key, Val, Acc) -> + hb_maps:put(<>, Val, Acc, Opts) + end, + #{}, + hb_message:convert(Message, tabm, Opts), + Opts + ). +``` + +### routes + +```erlang +routes(Node, Opts) -> + resolve(Node, + #{ + <<"device">> => <<"Router@1.0">> + }, + #{ + <<"path">> => <<"routes">>, + <<"method">> => <<"GET">> + }, + Opts + ). +``` + +### add_route + +```erlang +add_route(Node, Route, Opts) -> + resolve(Node, + Route#{ + <<"device">> => <<"Router@1.0">> + }, + #{ + <<"path">> => <<"routes">>, + <<"method">> => <<"POST">> + }, + Opts + ). +``` + +### arweave_timestamp + +Grab the latest block information from the Arweave gateway node. + +```erlang +arweave_timestamp() -> + case hb_opts:get(mode) of + debug -> {0, 0, hb_util:human_id(<<0:256>>)}; + prod -> + {ok, {{_, 200, _}, _, Body}} = + httpc:request( + <<(hb_opts:get(gateway))/binary, "/block/current">> + ), + Fields = hb_json:decode(hb_util:bin(Body)), + Timestamp = hb_maps:get(<<"timestamp">>, Fields), + Hash = hb_maps:get(<<"indep_hash">>, Fields), + Height = hb_maps:get(<<"height">>, Fields), + {Timestamp, Height, Hash} + end. +``` + +### upload + +Upload a data item to the bundler node. + +```erlang +upload(Msg, Opts) -> + UploadResults = + lists:map( + fun(Device) -> + upload(Msg, Opts, Device) + end, + hb_message:commitment_devices(Msg, Opts) + ), + {ok, UploadResults}. +``` + +### upload + +```erlang +upload(Msg, Opts, <<"httpsig@1.0">>) -> + case hb_opts:get(bundler_httpsig, not_found, Opts) of + not_found -> + {error, no_httpsig_bundler}; + Bundler -> + ?event({uploading_item, Msg}), + hb_http:post(Bundler, <<"/tx">>, Msg, Opts) + end; +``` + +### upload + +```erlang +upload(Msg, Opts, <<"ans104@1.0">>) when is_map(Msg) -> + ?event({msg_to_convert, Msg}), + Converted = hb_message:convert(Msg, <<"ans104@1.0">>, Opts), + ?event({msg_to_tx_res, {converted, Converted}}), + Serialized = ar_bundles:serialize(Converted), + ?event({converted_msg_to_tx, Serialized}), + upload(Serialized, Opts, <<"ans104@1.0">>); +``` + +### upload + +```erlang +upload(Serialized, Opts, <<"ans104@1.0">>) when is_binary(Serialized) -> + ?event({uploading_item, Serialized}), + hb_http:post( + hb_opts:get(bundler_ans104, not_found, Opts), + #{ + <<"path">> => <<"/tx">>, + <<"content-type">> => <<"application/octet-stream">>, + <<"body">> => Serialized + }, + Opts#{ + http_client => + hb_opts:get(bundler_ans104_http_client, httpc, Opts) + } + ). +``` + +### upload_empty_raw_ans104_test + +```erlang +upload_empty_raw_ans104_test() -> + Serialized = ar_bundles:serialize( + ar_bundles:sign_item(#tx{ + data = <<"TEST">> + }, hb:wallet()) + ), + ?event({uploading_item, Serialized}), + Result = upload(Serialized, #{}, <<"ans104@1.0">>), + ?event({upload_result, Result}), + ?assertMatch({ok, _}, Result). +``` + +### upload_raw_ans104_test + +```erlang +upload_raw_ans104_test() -> + Serialized = ar_bundles:serialize( + ar_bundles:sign_item(#tx{ + data = <<"TEST">>, + tags = [{<<"test-tag">>, <<"test-value">>}] + }, hb:wallet()) + ), + ?event({uploading_item, Serialized}), + Result = upload(Serialized, #{}, <<"ans104@1.0">>), + ?event({upload_result, Result}), + ?assertMatch({ok, _}, Result). +``` + +### upload_raw_ans104_with_anchor_test + +```erlang +upload_raw_ans104_with_anchor_test() -> + Serialized = ar_bundles:serialize( + ar_bundles:sign_item(#tx{ + data = <<"TEST">>, + anchor = crypto:strong_rand_bytes(32), + tags = [{<<"test-tag">>, <<"test-value">>}] + }, hb:wallet()) + ), + ?event({uploading_item, Serialized}), + Result = upload(Serialized, #{}, <<"ans104@1.0">>), + ?event({upload_result, Result}), + ?assertMatch({ok, _}, Result). +``` + +### upload_empty_message_test + +```erlang +upload_empty_message_test() -> + Msg = #{ <<"data">> => <<"TEST">> }, + Committed = hb_message:commit(Msg, hb:wallet(), <<"ans104@1.0">>), + Result = upload(Committed, #{}, <<"ans104@1.0">>), + ?event({upload_result, Result}), + ?assertMatch({ok, _}, Result). +``` + +### upload_single_layer_message_test + +```erlang +upload_single_layer_message_test() -> + Msg = #{ + <<"data">> => <<"TEST">>, + <<"basic">> => <<"value">>, + <<"integer">> => 1 + }, + Committed = hb_message:commit(Msg, hb:wallet(), <<"ans104@1.0">>), + Result = upload(Committed, #{}, <<"ans104@1.0">>), + ?event({upload_result, Result}), +``` + +--- + +*Generated from [hb_client.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_client.erl)* diff --git a/docs/book/src/hb_crypto.erl.md b/docs/book/src/hb_crypto.erl.md new file mode 100644 index 000000000..723ce586b --- /dev/null +++ b/docs/book/src/hb_crypto.erl.md @@ -0,0 +1,143 @@ +# hb_crypto + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_crypto.erl) + +Implements the cryptographic functions and wraps the primitives +used in HyperBEAM. Abstracted such that this (extremely!) dangerous code +can be carefully managed. +HyperBEAM currently implements two hashpath algorithms: +* `sha-256-chain`: A simple chained SHA-256 hash. +* `accumulate-256`: A SHA-256 hash that chains the given IDs and accumulates + their values into a single commitment. +The accumulate algorithm is experimental and at this point only exists to +allow us to test multiple HashPath algorithms in HyperBEAM. + +--- + +## Exported Functions + +- `accumulate/1` +- `accumulate/2` +- `pbkdf2/5` +- `sha256_chain/2` +- `sha256/1` + +--- + +### sha256_chain + +Implements the cryptographic functions and wraps the primitives +Add a new ID to the end of a SHA-256 hash chain. + +```erlang +sha256_chain(ID1, ID2) when ?IS_ID(ID1) -> + sha256(<>); +``` + +### sha256_chain + +Implements the cryptographic functions and wraps the primitives +Add a new ID to the end of a SHA-256 hash chain. + +```erlang +sha256_chain(ID1, ID2) -> + throw({cannot_chain_bad_ids, ID1, ID2}). +``` + +### accumulate + +Accumulate two IDs, or a list of IDs, into a single commitment. This + +```erlang +accumulate(IDs) when is_list(IDs) -> + lists:foldl(fun accumulate/2, << 0:256 >>, IDs). +``` + +### accumulate + +```erlang +accumulate(ID1 = << ID1Int:256 >>, ID2 = << ID2Int:256 >>) + when (byte_size(ID1) =:= 32) and (byte_size(ID2) =:= 32) -> + << (ID1Int + ID2Int):256 >>; +``` + +### accumulate + +```erlang +accumulate(ID1, ID2) -> + throw({cannot_accumulate_bad_ids, ID1, ID2}). +``` + +### sha256 + +Wrap Erlang's `crypto:hash/2` to provide a standard interface. + +```erlang +sha256(Data) -> + crypto:hash(sha256, Data). +``` + +### pbkdf2 + +Wrap Erlang's `crypto:pbkdf2_hmac/5` to provide a standard interface. + +```erlang +pbkdf2(Alg, Password, Salt, Iterations, KeyLength) -> + case crypto:pbkdf2_hmac(Alg, Password, Salt, Iterations, KeyLength) of + Key when is_binary(Key) -> {ok, Key}; + {Tag, CFileInfo, Desc} -> + ?event( + {pbkdf2_error, + {tag, Tag}, + {desc, Desc}, + {c_file_info, CFileInfo} + } + ), + {error, Desc} + end. +``` + +### count_zeroes + +Count the number of leading zeroes in a bitstring. + +```erlang +count_zeroes(<<>>) -> + 0; +``` + +### count_zeroes + +Count the number of leading zeroes in a bitstring. + +```erlang +count_zeroes(<<0:1, Rest/bitstring>>) -> + 1 + count_zeroes(Rest); +``` + +### count_zeroes + +Count the number of leading zeroes in a bitstring. + +```erlang +count_zeroes(<<_:1, Rest/bitstring>>) -> + count_zeroes(Rest). +``` + +### sha256_chain_test + +Check that `sha-256-chain` correctly produces a hash matching + +```erlang +sha256_chain_test() -> + ID1 = <<1:256>>, + ID2 = <<2:256>>, + ID3 = sha256_chain(ID1, ID2), + HashBase = << ID1/binary, ID2/binary >>, + ?assertEqual(ID3, crypto:hash(sha256, HashBase)), + % Basic entropy check. +``` + +--- + +*Generated from [hb_crypto.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_crypto.erl)* diff --git a/docs/book/src/hb_debugger.erl.md b/docs/book/src/hb_debugger.erl.md new file mode 100644 index 000000000..42a1949d7 --- /dev/null +++ b/docs/book/src/hb_debugger.erl.md @@ -0,0 +1,242 @@ +# hb_debugger + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_debugger.erl) + +A module that provides bootstrapping interfaces for external debuggers +to connect to HyperBEAM. +The simplest way to utilize an external graphical debugger is to use the +`erlang-ls` extension for VS Code, Emacs, or other Language Server Protocol +(LSP) compatible editors. This repository contains a `launch.json` +configuration file for VS Code that can be used to spawn a new HyperBEAM, +attach the debugger to it, and execute the specified `Module:Function(Args)`. +Additionally, the node can be started with `rebar3 debugging` in order to +allow access to the console while also allowing the debugger to attach. +Boot time is approximately 10 seconds. + +--- + +## Exported Functions + +- `await_breakpoint/0` +- `profile_and_stop/1` +- `start_and_break/2` +- `start_and_break/3` +- `start_and_break/4` +- `start/0` + +--- + +### profile_and_stop + +A module that provides bootstrapping interfaces for external debuggers +Profile a function with eflame and stop the node. + +```erlang +profile_and_stop(Fun) -> + {ok, F} = file:open("profiling-output", [write]), + group_leader(F, self()), + io:format("profiling-output: started.~n"), + io:format("Profiling function: ~p.~n", [Fun]), + Res = + dev_profile:eval( + Fun, + #{ <<"return-mode">> => <<"open">>, <<"engine">> => <<"eflame">> }, + #{} + ), + io:format("Profiling complete. Res: ~p~n", [Res]), + init:stop(), + erlang:halt(). +``` + +### start + +```erlang +start() -> + io:format("Starting debugger...~n", []), + DebuggerRes = application:ensure_all_started(debugger), + io:format("Started debugger server. Result: ~p.~n", [DebuggerRes]), + io:format( + "Waiting for debugger. Node is: ~p. Cookie is: ~p.~n", + [node(), erlang:get_cookie()] + ), + await_debugger(). +``` + +### interpret + +Attempt to interpret a specified module to load it into the debugger. + +```erlang +interpret(Module) -> + Parent = self(), + spawn(fun() -> + case int:interpretable(Module) of + true -> + try Parent ! {interpreted, Module, int:i(Module) == ok} + catch _:_ -> + io:format("Could not load module: ~p.~n", [Module]), + false + end; + Error -> + io:format( + "Could not interpret module: ~p. Error: ~p.~n", + [Module, Error] + ), + false + end + end), + receive {interpreted, Module, Res} -> Res + after 250 -> false + end. +``` + +### interpret_modules + +Interpret modules from a list of atom prefixes. + +```erlang +interpret_modules(Prefixes) when is_binary(Prefixes) -> + interpret_modules(binary:split(Prefixes, <<",">>, [global, trim_all])); +``` + +### interpret_modules + +Interpret modules from a list of atom prefixes. + +```erlang +interpret_modules(Prefixes) when is_list(Prefixes) -> + RelevantModules = + lists:filter( + fun(Mod) -> + ModBin = hb_util:bin(Mod), + lists:any( + fun(Prefix) -> + PrefixBin = hb_util:bin(Prefix), + binary:longest_common_prefix([ModBin, PrefixBin]) == + byte_size(PrefixBin) + end, + Prefixes + ) + end, + hb_util:all_hb_modules() + ), + io:format("Relevant modules: ~p.~n", [RelevantModules]), + lists:foreach( + fun(Mod) -> + io:format("Interpreting module: ~p.~n", [Mod]), + interpret(Mod) + end, + RelevantModules + ), + RelevantModules. +``` + +### start_and_break + +A bootstrapping function to wait for an external debugger to be attached, + +```erlang +start_and_break(Module, Function) -> + start_and_break(Module, Function, [], []). +``` + +### start_and_break + +```erlang +start_and_break(Module, Function, Args) -> + start_and_break(Module, Function, Args, []). +``` + +### start_and_break + +```erlang +start_and_break(Module, Function, Args, DebuggerScope) -> + timer:sleep(1000), + spawn(fun() -> + start(), + interpret(Module), + interpret_modules(DebuggerScope), + SetRes = int:break_in(Module, Function, length(Args)), + io:format( + "Breakpoint set. Result from `int:break_in/3`: ~p.~n", + [SetRes] + ), + io:format("Invoking function...~n", []), + apply(Module, Function, Args), + io:format("Function invoked. Terminating.~n", []), + init:stop(), + erlang:halt() + end). +``` + +### await_debugger + +Await a debugger to be attached to the node. + +```erlang +await_debugger() -> await_debugger(0). +``` + +### await_debugger + +Await a debugger to be attached to the node. + +```erlang +await_debugger(N) -> + case is_debugging_node_connected() of + false -> + timer:sleep(1000), + io:format("Still waiting for debugger after ~p seconds...~n", [N]), + await_debugger(N + 1); + Node -> + io:format( + "External node connection detected. Peer: ~p.~n", + [Node] + ), + N + end. +``` + +### is_debugging_node_connected + +Is another Distributed Erlang node connected to us? + +```erlang +is_debugging_node_connected() -> + case nodes() ++ nodes(hidden) of + [] -> false; + [Node | _] -> Node + end. +``` + +### await_breakpoint + +Await a new breakpoint being set by the debugger. + +```erlang +await_breakpoint() -> + case is_debugging_node_connected() of + false -> start(); + _ -> do_nothing + end, + await_breakpoint(0). +``` + +### await_breakpoint + +```erlang +await_breakpoint(N) -> + io:format("Waiting for breakpoint to be set in function...~n", []), + case int:all_breaks() of + [] -> + timer:sleep(1000), + io:format("Still waiting for breakpoint after ~p seconds...~n", [N]), + await_breakpoint(N + 1); + [Breakpoint | _] -> + io:format("Breakpoint set. Info: ~p.~n", [Breakpoint]), + Breakpoint +``` + +--- + +*Generated from [hb_debugger.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_debugger.erl)* diff --git a/docs/book/src/hb_escape.erl.md b/docs/book/src/hb_escape.erl.md new file mode 100644 index 000000000..41ec69161 --- /dev/null +++ b/docs/book/src/hb_escape.erl.md @@ -0,0 +1,394 @@ +# hb_escape + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_escape.erl) + +Functions for escaping and unescaping mixed case values, for use in HTTP +headers. Both percent-encoding and escaping of double-quoted strings +(`"` => `\"`) are supported. +This is necessary for encodings of AO-Core messages for transmission in +HTTP/2 and HTTP/3, because uppercase header keys are explicitly disallowed. +While most map keys in HyperBEAM are normalized to lowercase, IDs are not. +Subsequently, we encode all header keys to lowercase %-encoded URI-style +strings because transmission. + +--- + +## Exported Functions + +- `decode_keys/2` +- `decode_quotes/1` +- `decode/1` +- `encode_keys/2` +- `encode_quotes/1` +- `encode/1` + +--- + +### encode + +Functions for escaping and unescaping mixed case values, for use in HTTP +Encode a binary as a URI-encoded string. + +```erlang +encode(Bin) when is_binary(Bin) -> + list_to_binary(percent_escape(binary_to_list(Bin))). +``` + +### decode + +Decode a URI-encoded string back to a binary. + +```erlang +decode(Bin) when is_binary(Bin) -> + list_to_binary(percent_unescape(binary_to_list(Bin))). +``` + +### encode_quotes + +Encode a string with escaped quotes. + +```erlang +encode_quotes(String) when is_binary(String) -> + list_to_binary(encode_quotes(binary_to_list(String))); +``` + +### encode_quotes + +Encode a string with escaped quotes. + +```erlang +encode_quotes([]) -> []; +``` + +### encode_quotes + +Encode a string with escaped quotes. + +```erlang +encode_quotes([$\" | Rest]) -> [$\\, $\" | encode_quotes(Rest)]; +``` + +### encode_quotes + +Encode a string with escaped quotes. +Decode a string with escaped quotes. + +```erlang +encode_quotes([C | Rest]) -> [C | encode_quotes(Rest)]. +``` + +### decode_quotes + +Encode a string with escaped quotes. +Decode a string with escaped quotes. + +```erlang +decode_quotes(String) when is_binary(String) -> + list_to_binary(decode_quotes(binary_to_list(String))); +``` + +### decode_quotes + +Encode a string with escaped quotes. +Decode a string with escaped quotes. + +```erlang +decode_quotes([]) -> []; +``` + +### decode_quotes + +Encode a string with escaped quotes. +Decode a string with escaped quotes. + +```erlang +decode_quotes([$\\, $\" | Rest]) -> [$\" | decode_quotes(Rest)]; +``` + +### decode_quotes + +Encode a string with escaped quotes. +Decode a string with escaped quotes. + +```erlang +decode_quotes([$\" | Rest]) -> decode_quotes(Rest); +``` + +### decode_quotes + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. + +```erlang +decode_quotes([C | Rest]) -> [C | decode_quotes(Rest)]. +``` + +### decode_keys + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. + +```erlang +decode_keys(Msg, Opts) when is_map(Msg) -> + hb_maps:from_list( + lists:map( + fun({Key, Value}) -> {decode(Key), Value} end, + hb_maps:to_list(Msg, Opts) + ) + ); +``` + +### decode_keys + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. +URI encode keys in the base layer of a message. Does not recurse. + +```erlang +decode_keys(Other, _Opts) -> Other. +``` + +### encode_keys + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. +URI encode keys in the base layer of a message. Does not recurse. + +```erlang +encode_keys(Msg, Opts) when is_map(Msg) -> + hb_maps:from_list( + lists:map( + fun({Key, Value}) -> {encode(Key), Value} end, + hb_maps:to_list(Msg, Opts) + ) + ); +``` + +### encode_keys + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. +URI encode keys in the base layer of a message. Does not recurse. +Escape a list of characters as a URI-encoded string. + +```erlang +encode_keys(Other, _Opts) -> Other. +``` + +### percent_escape + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. +URI encode keys in the base layer of a message. Does not recurse. +Escape a list of characters as a URI-encoded string. + +```erlang +percent_escape([]) -> []; +``` + +### percent_escape + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. +URI encode keys in the base layer of a message. Does not recurse. +Escape a list of characters as a URI-encoded string. + +```erlang +percent_escape([C | Cs]) when C >= $a, C =< $z -> [C | percent_escape(Cs)]; +``` + +### percent_escape + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. +URI encode keys in the base layer of a message. Does not recurse. +Escape a list of characters as a URI-encoded string. + +```erlang +percent_escape([C | Cs]) when C >= $0, C =< $9 -> [C | percent_escape(Cs)]; +``` + +### percent_escape + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. +URI encode keys in the base layer of a message. Does not recurse. +Escape a list of characters as a URI-encoded string. + +```erlang +percent_escape([C | Cs]) when + C == $.; C == $-; C == $_; C == $/; + C == $?; C == $& -> + [C | percent_escape(Cs)]; +``` + +### percent_escape + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. +URI encode keys in the base layer of a message. Does not recurse. +Escape a list of characters as a URI-encoded string. +Escape a single byte as a URI-encoded string. + +```erlang +percent_escape([C | Cs]) -> [escape_byte(C) | percent_escape(Cs)]. +``` + +### escape_byte + +Encode a string with escaped quotes. +Decode a string with escaped quotes. +Return a message with all of its keys decoded. +URI encode keys in the base layer of a message. Does not recurse. +Escape a list of characters as a URI-encoded string. +Escape a single byte as a URI-encoded string. + +```erlang +escape_byte(C) when C >= 0, C =< 255 -> + [$%, hex_digit(C bsr 4), hex_digit(C band 15)]. +``` + +### hex_digit + +```erlang +hex_digit(N) when N >= 0, N =< 9 -> + N + $0; +``` + +### hex_digit + +```erlang +hex_digit(N) when N > 9, N =< 15 -> + N + $a - 10. +``` + +### percent_unescape + +Unescape a URI-encoded string. + +```erlang +percent_unescape([$%, H1, H2 | Cs]) -> + Byte = (hex_value(H1) bsl 4) + hex_value(H2), + [Byte | percent_unescape(Cs)]; +``` + +### percent_unescape + +Unescape a URI-encoded string. + +```erlang +percent_unescape([C | Cs]) -> + [C | percent_unescape(Cs)]; +``` + +### percent_unescape + +Unescape a URI-encoded string. + +```erlang +percent_unescape([]) -> + []. +``` + +### hex_value + +```erlang +hex_value(C) when C >= $0, C =< $9 -> + C - $0; +``` + +### hex_value + +```erlang +hex_value(C) when C >= $a, C =< $f -> + C - $a + 10; +``` + +### hex_value + +```erlang +hex_value(C) when C >= $A, C =< $F -> + C - $A + 10. +``` + +### escape_unescape_identity_test + +```erlang +escape_unescape_identity_test() -> + % Test that unescape(escape(X)) == X for various inputs + TestCases = [ + <<"hello">>, + <<"hello, world!">>, + <<"hello+list">>, + <<"special@chars#here">>, + <<"UPPERCASE">>, + <<"MixedCASEstring">>, + <<"12345">>, + <<>> % Empty string + ], + ?event(parsing, + {escape_unescape_identity_test, + {test_cases, + [ + {Case, {explicit, encode(Case)}} + || + Case <- TestCases + ] + } + } + ), + lists:foreach(fun(TestCase) -> + ?assertEqual(TestCase, decode(encode(TestCase))) + end, TestCases). +``` + +### unescape_specific_test + +```erlang +unescape_specific_test() -> + % Test specific unescape cases + ?assertEqual(<<"a">>, decode(<<"%61">>)), + ?assertEqual(<<"A">>, decode(<<"%41">>)), + ?assertEqual(<<"!">>, decode(<<"%21">>)), + ?assertEqual(<<"hello, World!">>, decode(<<"hello%2c%20%57orld%21">>)), + ?assertEqual(<<"/">>, decode(<<"%2f">>)), + ?assertEqual(<<"?">>, decode(<<"%3f">>)). +``` + +### uppercase_test + +```erlang +uppercase_test() -> + % Test uppercase characters are properly escaped + ?assertEqual(<<"%41">>, encode(<<"A">>)), + ?assertEqual(<<"%42">>, encode(<<"B">>)), + ?assertEqual(<<"%5a">>, encode(<<"Z">>)), + ?assertEqual(<<"hello%20%57orld">>, encode(<<"hello World">>)), + ?assertEqual(<<"test%41%42%43">>, encode(<<"testABC">>)). +``` + +### escape_unescape_special_chars_test + +```erlang +escape_unescape_special_chars_test() -> + % Test characters that should be escaped + SpecialChars = [ + $@, $#, $", $$, $%, $&, $', $(, $), $*, $+, $,, $/, $:, $;, + $<, $=, $>, $?, $[, $\\, $], $^, $`, ${, $|, $}, $~, $\s + ], + TestString = list_to_binary(SpecialChars), +``` + +--- + +*Generated from [hb_escape.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_escape.erl)* diff --git a/docs/book/src/hb_event.erl.md b/docs/book/src/hb_event.erl.md new file mode 100644 index 000000000..6df4e3b70 --- /dev/null +++ b/docs/book/src/hb_event.erl.md @@ -0,0 +1,505 @@ +# hb_event + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_event.erl) + +Wrapper for incrementing prometheus counters. + +--- + +## Exported Functions + +- `counters/0` +- `diff/1` +- `diff/2` +- `increment_callers/1` +- `increment/3` +- `increment/4` +- `log/1` +- `log/2` +- `log/3` +- `log/4` +- `log/5` +- `log/6` + +--- + +### log + +Wrapper for incrementing prometheus counters. + +```erlang +log(_X) -> ok. +``` + +### log + +Wrapper for incrementing prometheus counters. + +```erlang +log(_Topic, _X) -> ok. +``` + +### log + +Wrapper for incrementing prometheus counters. + +```erlang +log(_Topic, _X, _Mod) -> ok. +``` + +### log + +Wrapper for incrementing prometheus counters. + +```erlang +log(_Topic, _X, _Mod, _Func) -> ok. +``` + +### log + +Wrapper for incrementing prometheus counters. + +```erlang +log(_Topic, _X, _Mod, _Func, _Line) -> ok. +``` + +### log + +Wrapper for incrementing prometheus counters. + +```erlang +log(_Topic, _X, _Mod, _Func, _Line, _Opts) -> ok. +-else. +``` + +### log + +Debugging log logging function. For now, it just prints to standard + +```erlang +log(X) -> log(global, X). +``` + +### log + +Debugging log logging function. For now, it just prints to standard + +```erlang +log(Topic, X) -> log(Topic, X, ""). +``` + +### log + +Debugging log logging function. For now, it just prints to standard + +```erlang +log(Topic, X, Mod) -> log(Topic, X, Mod, undefined). +``` + +### log + +Debugging log logging function. For now, it just prints to standard + +```erlang +log(Topic, X, Mod, Func) -> log(Topic, X, Mod, Func, undefined). +``` + +### log + +Debugging log logging function. For now, it just prints to standard + +```erlang +log(Topic, X, Mod, Func, Line) -> log(Topic, X, Mod, Func, Line, #{}). +``` + +### log + +Debugging log logging function. For now, it just prints to standard + +```erlang +log(Topic, X, Mod, undefined, Line, Opts) -> log(Topic, X, Mod, "", Line, Opts); +``` + +### log + +Debugging log logging function. For now, it just prints to standard + +```erlang +log(Topic, X, Mod, Func, undefined, Opts) -> log(Topic, X, Mod, Func, "", Opts); +``` + +### log + +Debugging log logging function. For now, it just prints to standard + +```erlang +log(Topic, X, Mod, Func, Line, Opts) -> + % Check if the debug_print option has the topic in it if set. +``` + +### should_print + +Determine if the topic should be printed. Uses a cache in the process + +```erlang +should_print(Topic, Opts) -> + case erlang:get({event_print, Topic}) of + {cached, X} -> X; + undefined -> + Result = + case hb_opts:get(debug_print, false, Opts) of + EventList when is_list(EventList) -> + lists:member(Topic, EventList); + true -> true; + false -> false + end, + erlang:put({event_print, Topic}, {cached, Result}), + Result + end. +``` + +### handle_tracer + +```erlang +handle_tracer(Topic, X, Opts) -> + AllowedTopics = [http, ao_result], + case lists:member(Topic, AllowedTopics) of + true -> + case hb_opts:get(trace, undefined, Opts) of + undefined -> + case tuple_to_list(X) of + [_ | Rest] -> + try + Map = maps:from_list(Rest), + TopicOpts = hb_opts:get(opts, #{}, Map), + case hb_opts:get(trace, undefined, TopicOpts) of + undefined -> ok; + TracePID -> + hb_tracer:record_step(TracePID, {Topic, X}) + end + catch + _:_ -> ok + end; + _ -> + ok + end; + TracePID -> hb_tracer:record_step(TracePID, {Topic, X}) + end; + _ -> ok + end. +``` + +### increment + +Increment the counter for the given topic and message. Registers the + +```erlang +increment(Topic, Message, Opts) -> + increment(Topic, Message, Opts, 1). +``` + +### increment + +```erlang +increment(global, _Message, _Opts, _Count) -> ignored; +``` + +### increment + +```erlang +increment(ao_core, _Message, _Opts, _Count) -> ignored; +``` + +### increment + +```erlang +increment(ao_internal, _Message, _Opts, _Count) -> ignored; +``` + +### increment + +```erlang +increment(ao_devices, _Message, _Opts, _Count) -> ignored; +``` + +### increment + +```erlang +increment(ao_subresolution, _Message, _Opts, _Count) -> ignored; +``` + +### increment + +```erlang +increment(signature_base, _Message, _Opts, _Count) -> ignored; +``` + +### increment + +```erlang +increment(id_base, _Message, _Opts, _Count) -> ignored; +``` + +### increment + +```erlang +increment(parsing, _Message, _Opts, _Count) -> ignored; +``` + +### increment + +```erlang +increment(Topic, Message, _Opts, Count) -> + case parse_name(Message) of + <<"debug", _/binary>> -> ignored; + EventName -> + TopicBin = parse_name(Topic), + case find_event_server() of + Pid when is_pid(Pid) -> + Pid ! {increment, TopicBin, EventName, Count}; + undefined -> + PID = spawn(fun() -> server() end), + hb_name:register(?MODULE, PID), + PID ! {increment, TopicBin, EventName, Count} + end + end. +``` + +### increment_callers + +Increment the call paths and individual upstream calling functions of + +```erlang +increment_callers(Topic) -> + increment_callers(Topic, erlang). +``` + +### increment_callers + +```erlang +increment_callers(Topic, Type) -> + BinTopic = hb_util:bin(Topic), + increment( + <>, + hb_format:trace_short(Type), + #{} + ), + lists:foreach( + fun(Caller) -> + increment(<>, Caller, #{}) + end, + hb_format:trace_to_list(hb_format:get_trace(Type)) + ). +``` + +### counters + +Return a message containing the current counter values for all logged + +```erlang +counters() -> + UnaggregatedCounts = + [ + {Group, Name, Count} + || + {{default, <<"event">>, [Group, Name], _}, Count, _} <- raw_counters() + ], + lists:foldl( + fun({Group, Name, Count}, Acc) -> + Acc#{ + Group => (maps:get(Group, Acc, #{}))#{ + Name => maps:get(Name, maps:get(Group, Acc, #{}), 0) + Count + } + } + end, + #{}, + UnaggregatedCounts + ). +``` + +### diff + +Return the change in the event counters before and after executing the + +```erlang +diff(Fun) -> + diff(Fun, #{}). +``` + +### diff + +```erlang +diff(Fun, Opts) -> + EventsBefore = counters(), + Res = Fun(), + EventsAfter = counters(), + {hb_message:diff(EventsBefore, EventsAfter, Opts), Res}. +``` + +### raw_counters + +```erlang +raw_counters() -> + []. +``` + +### raw_counters + +```erlang +raw_counters() -> + ets:tab2list(prometheus_counter_table). +``` + +### find_event_server + +Find the event server, creating it if it doesn't exist. We cache the + +```erlang +find_event_server() -> + case erlang:get({event_server, ?MODULE}) of + {cached, Pid} -> Pid; + undefined -> + PID = + case hb_name:lookup(?MODULE) of + Pid when is_pid(Pid) -> Pid; + undefined -> + NewServer = spawn(fun() -> server() end), + hb_name:register(?MODULE, NewServer), + NewServer + end, + erlang:put({event_server, ?MODULE}, {cached, PID}), + PID + end. +``` + +### server + +```erlang +server() -> + await_prometheus_started(), + prometheus_counter:declare( + [ + {name, <<"event">>}, + {help, <<"AO-Core execution events">>}, + {labels, [topic, event]} + ]), + handle_events(). +``` + +### handle_events + +```erlang +handle_events() -> + receive + {increment, TopicBin, EventName, Count} -> + case erlang:process_info(self(), message_queue_len) of + {message_queue_len, Len} when Len > ?OVERLOAD_QUEUE_LENGTH -> + % Print a warning, but do so less frequently the more + % overloaded the system is. +``` + +### await_prometheus_started + +Delay the event server until prometheus is started. + +```erlang +await_prometheus_started() -> + receive + Msg -> + case application:get_application(prometheus) of + undefined -> await_prometheus_started(); + _ -> self() ! Msg, ok + end + end. +``` + +### parse_name + +```erlang +parse_name(Name) when is_tuple(Name) -> + parse_name(element(1, Name)); +``` + +### parse_name + +```erlang +parse_name(Name) when is_atom(Name) -> + atom_to_binary(Name, utf8); +``` + +### parse_name + +```erlang +parse_name(Name) when is_binary(Name) -> + Name; +``` + +### parse_name + +```erlang +parse_name(Name) when is_list(Name) -> + iolist_to_binary(Name); +``` + +### parse_name + +Benchmark the performance of a full log of an event. + +```erlang +parse_name(_) -> no_event_name. +%%% Benchmark tests +``` + +### benchmark_event_test + +Benchmark the performance of a full log of an event. + +```erlang +benchmark_event_test() -> + Iterations = + hb_test_utils:benchmark( + fun() -> + log(test_module, {test, 1}) + end + ), + hb_test_utils:benchmark_print(<<"Recorded">>, <<"events">>, Iterations), + ?assert(Iterations >= 1000), + ok. +``` + +### benchmark_print_lookup_test + +Benchmark the performance of looking up whether a topic and module + +```erlang +benchmark_print_lookup_test() -> + DefaultOpts = hb_opts:default_message_with_env(), + Iterations = + hb_test_utils:benchmark( + fun() -> + should_print(test_module, DefaultOpts) + orelse should_print(test_event, DefaultOpts) + end + ), + hb_test_utils:benchmark_print(<<"Looked-up">>, <<"topics">>, Iterations), + ?assert(Iterations >= 1000), + ok. +``` + +### benchmark_increment_test + +Benchmark the performance of incrementing an event. + +```erlang +benchmark_increment_test() -> + Iterations = + hb_test_utils:benchmark( + fun() -> increment(test_module, {test, 1}, #{}) end + ), + hb_test_utils:benchmark_print(<<"Incremented">>, <<"events">>, Iterations), + ?assert(Iterations >= 1000), +``` + +--- + +*Generated from [hb_event.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_event.erl)* diff --git a/docs/book/src/hb_examples.erl.md b/docs/book/src/hb_examples.erl.md new file mode 100644 index 000000000..def3663df --- /dev/null +++ b/docs/book/src/hb_examples.erl.md @@ -0,0 +1,227 @@ +# hb_examples + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_examples.erl) + +This module contains end-to-end tests for Hyperbeam, accessing through +the HTTP interface. As well as testing the system, you can use these tests +as examples of how to interact with HyperBEAM nodes. + +--- + +### relay_with_payments_test_ + +This module contains end-to-end tests for Hyperbeam, accessing through +Start a node running the simple pay meta device, and use it to relay + +```erlang +relay_with_payments_test_() -> + {timeout, 30, fun relay_with_payments_test/0}. +``` + +### relay_with_payments_test + +```erlang +relay_with_payments_test() -> + HostWallet = ar_wallet:new(), + ClientWallet = ar_wallet:new(), + ClientAddress = hb_util:human_id(ar_wallet:to_address(ClientWallet)), + % Start a node with the simple-pay device enabled. +``` + +### paid_wasm_test_ + +Gain signed WASM responses from a node and verify them. + +```erlang +paid_wasm_test_() -> + {timeout, 30, fun paid_wasm/0}. +``` + +### paid_wasm + +```erlang +paid_wasm() -> + HostWallet = ar_wallet:new(), + ClientWallet = ar_wallet:new(), + ClientAddress = hb_util:human_id(ar_wallet:to_address(ClientWallet)), + ProcessorMsg = + #{ + <<"device">> => <<"p4@1.0">>, + <<"ledger-device">> => <<"simple-pay@1.0">>, + <<"pricing-device">> => <<"simple-pay@1.0">> + }, + HostNode = + hb_http_server:start_node( + Opts = #{ + store => [ + #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + } + ], + simple_pay_ledger => #{ ClientAddress => 100 }, + simple_pay_price => 10, + operator => ar_wallet:to_address(HostWallet), + on => #{ + <<"request">> => ProcessorMsg, + <<"response">> => ProcessorMsg + } + } + ), + % Read the WASM file from disk, post it to the host and execute it. +``` + +### create_schedule_aos2_test_disabled + +```erlang +create_schedule_aos2_test_disabled() -> + % The legacy process format, according to the ao.tn.1 spec: + % Data-Protocol The name of the Data-Protocol for this data-item 1-1 ao + % Variant The network version that this data-item is for 1-1 ao.TN.1 + % Type Indicates the shape of this Data-Protocol data-item 1-1 Process + % Module Links the process to ao module using the module's unique + % Transaction ID (TXID). 1-1 {TXID} + % Scheduler Specifies the scheduler unit by Wallet Address or Name, and can + % be referenced by a recent Scheduler-Location. 1-1 {ADDRESS} + % Cron-Interval An interval at which a particular Cron Message is recevied by the process, + % in the format X-Y, where X is a scalar value, and Y is milliseconds, + % seconds, minutes, hours, days, months, years, or blocks 0-n 1-second + % Cron-Tag-{Name} defines tags for Cron Messages at set intervals, + % specifying relevant metadata. 0-1 + % Memory-Limit Overrides maximum memory, in megabytes or gigabytes, set by + % Module, can not exceed modules setting 0-1 16-mb + % Compute-Limit Caps the compute cycles for a module per evaluation, ensuring + % efficient, controlled execution 0-1 1000 + % Pushed-For Message TXID that this Process is pushed as a result 0-1 {TXID} + % Cast Sets message handling: 'True' for do not push, 'False' for normal + % pushing 0-1 {True or False} + % Authority Defines a trusted wallet address which can send Messages to + % the Process 0-1 {ADDRESS} + % On-Boot Defines a startup script to run when the process is spawned. If + % value "Data" it uses the Data field of the Process Data Item. If it is a + % TXID it will load that TX from Arweave and execute it. 0-1 {Data or TXID} + % {Any-Tags} Custom Tags specific for the initial input of the Process 0-n + Node = + try hb_http_server:start_node(#{ priv_wallet => hb:wallet() }) + catch + _:_ -> + <<"http://localhost:8734">> + end, + ProcMsg = #{ + <<"data-protocol">> => <<"ao">>, + <<"type">> => <<"Process">>, + <<"variant">> => <<"ao.TN.1">>, + <<"type">> => <<"Process">>, + <<"module">> => <<"bkjb55i07GUCUSWROtKK4HU1mBS_X0TyH3M5jMV6aPg">>, + <<"scheduler">> => hb_util:human_id(hb:address()), + <<"memory-limit">> => <<"1024-mb">>, + <<"compute-limit">> => <<"10000000">>, + <<"authority">> => hb_util:human_id(hb:address()), + <<"scheduler-location">> => hb_util:human_id(hb:address()) + }, + Wallet = hb:wallet(), + SignedProc = hb_message:commit(ProcMsg, Wallet), + IDNone = hb_message:id(SignedProc, none), + IDAll = hb_message:id(SignedProc, all), + {ok, Res} = schedule(SignedProc, IDNone, Wallet, Node), + ?event({res, Res}), + receive after 100 -> ok end, + ?event({id, IDNone, IDAll}), + {ok, Res2} = hb_http:get( + Node, + <<"/~scheduler@1.0/slot?target=", IDNone/binary>>, + #{} + ), + ?assertMatch(Slot when Slot >= 0, hb_ao:get(<<"at-slot">>, Res2, #{})). +``` + +### schedule + +```erlang +schedule(ProcMsg, Target) -> + schedule(ProcMsg, Target, hb:wallet()). +``` + +### schedule + +```erlang +schedule(ProcMsg, Target, Wallet) -> + schedule(ProcMsg, Target, Wallet, <<"http://localhost:8734">>). +``` + +### schedule + +```erlang +schedule(ProcMsg, Target, Wallet, Node) -> + SignedReq = + hb_message:commit( + #{ + <<"path">> => <<"/~scheduler@1.0/schedule">>, + <<"target">> => Target, + <<"body">> => ProcMsg + }, + Wallet + ), + ?event({signed_req, SignedReq}), + hb_http:post(Node, SignedReq, #{}). +``` + +### relay_schedule_ans104_test + +Test that we can schedule an ANS-104 data item on a relayed node. The + +```erlang +relay_schedule_ans104_test() -> + SchedulerWallet = ar_wallet:new(), + ComputeWallet = ar_wallet:new(), + RelayWallet = ar_wallet:new(), + ?event(debug_test, + {wallets, + {scheduler, hb_util:human_id(SchedulerWallet)}, + {compute, hb_util:human_id(ComputeWallet)}, + {relay, hb_util:human_id(RelayWallet)} + } + ), + Scheduler = + hb_http_server:start_node( + #{ + on => #{ + <<"start">> => #{ + <<"device">> => <<"scheduler@1.0">>, + <<"path">> => <<"location">>, + <<"method">> => <<"POST">>, + <<"target">> => <<"self">>, + <<"require-codec">> => <<"ans104@1.0">>, + <<"hook">> => #{ + <<"result">> => <<"ignore">>, + <<"commit-request">> => true + } + } + }, + store => [hb_test_utils:test_store()], + priv_wallet => SchedulerWallet + } + ), + ?event(debug_test, {scheduler, Scheduler}), + Compute = + hb_http_server:start_node( + #{ + priv_wallet => ComputeWallet, + store => + [ + ComputeStore = hb_test_utils:test_store(), + #{ + <<"store-module">> => hb_store_remote_node, + <<"name">> => <<"cache-TEST/remote-node">>, + <<"node">> => Scheduler + } + ] + } + ), + % Get the scheduler location of the scheduling node and write it to the + % compute node's store. +``` + +--- + +*Generated from [hb_examples.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_examples.erl)* diff --git a/docs/book/src/hb_features.erl.md b/docs/book/src/hb_features.erl.md new file mode 100644 index 000000000..6f49a16b2 --- /dev/null +++ b/docs/book/src/hb_features.erl.md @@ -0,0 +1,132 @@ +# hb_features + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_features.erl) + +A module that exports a list of feature flags that the node supports +using the `-ifdef` macro. +As a consequence, this module acts as a proxy of information between the +build system and the runtime execution environment. + +--- + +## Exported Functions + +- `all/0` +- `eflame/0` +- `enabled/1` +- `genesis_wasm/0` +- `http3/0` +- `rocksdb/0` +- `test/0` + +--- + +### all + +A module that exports a list of feature flags that the node supports +Returns a list of all feature flags that the node supports. + +```erlang +all() -> + Features = + lists:filtermap( + fun({Name, _}) -> + case lists:member(Name, [all, enabled, module_info]) of + true -> false; + false -> {true, Name} + end + end, + ?MODULE:module_info(exports) + ), + hb_maps:from_list( + lists:map( + fun(Name) -> + {Name, ?MODULE:Name()} + end, + Features + ) + ). +``` + +### enabled + +Returns true if the feature flag is enabled. + +```erlang +enabled(Feature) -> + hb_maps:get(Feature, all(), false). +``` + +### http3 + +```erlang +http3() -> true. +-else. +``` + +### http3 + +```erlang +http3() -> false. +-endif. +``` + +### rocksdb + +```erlang +rocksdb() -> true. +-else. +``` + +### rocksdb + +```erlang +rocksdb() -> false. +-endif. +``` + +### genesis_wasm + +```erlang +genesis_wasm() -> true. +-else. +``` + +### genesis_wasm + +```erlang +genesis_wasm() -> false. +-endif. +``` + +### eflame + +```erlang +eflame() -> true. +-else. +``` + +### eflame + +```erlang +eflame() -> false. +-endif. +``` + +### test + +```erlang +test() -> true. +-else. +``` + +### test + +```erlang +test() -> false. +-endif. +``` + +--- + +*Generated from [hb_features.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_features.erl)* diff --git a/docs/book/src/hb_format.erl.md b/docs/book/src/hb_format.erl.md new file mode 100644 index 000000000..89462d1b4 --- /dev/null +++ b/docs/book/src/hb_format.erl.md @@ -0,0 +1,1201 @@ +# hb_format + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_format.erl) + +Formatting and debugging utilities for HyperBEAM. +This module provides text formatting capabilities for debugging output, +message pretty-printing, stack trace formatting, and human-readable +representations of binary data and cryptographic identifiers. +The functions in this module are primarily used for development and +debugging purposes, supporting the logging and diagnostic infrastructure +throughout the HyperBEAM system. + +--- + +## Exported Functions + +- `binary/1` +- `error/2` +- `escape_format/1` +- `eunit_print/2` +- `get_trace/1` +- `indent_lines/2` +- `indent/2` +- `indent/3` +- `indent/4` +- `maybe_multiline/3` +- `message/1` +- `message/2` +- `message/3` +- `print_trace_short/4` +- `print_trace/4` +- `print/1` +- `print/3` +- `print/4` +- `print/5` +- `remove_leading_noise/1` +- `remove_noise/1` +- `remove_trailing_noise/1` +- `short_id/1` +- `term/1` +- `term/2` +- `term/3` +- `trace_macro_helper/5` +- `trace_short/0` +- `trace_short/1` +- `trace_to_list/1` +- `trace/1` + +--- + +### print + +Formatting and debugging utilities for HyperBEAM. +Print a message to the standard error stream, prefixed by the amount + +```erlang +print(X) -> + print(X, <<>>, #{}). +``` + +### print + +```erlang +print(X, Info, Opts) -> + io:format( + standard_error, + "=== HB DEBUG ===~s==>~n~s~n", + [Info, term(X, Opts, 0)] + ), + X. +``` + +### print + +```erlang +print(X, Mod, Func, LineNum) -> + print(X, format_debug_trace(Mod, Func, LineNum, #{}), #{}). +``` + +### print + +```erlang +print(X, Mod, Func, LineNum, Opts) -> + Now = erlang:system_time(millisecond), + Last = erlang:put(last_debug_print, Now), + TSDiff = case Last of undefined -> 0; _ -> Now - Last end, + Info = + hb_util:bin( + io_lib:format( + "[~pms in ~s @ ~s]", + [ + TSDiff, + case server_id() of + undefined -> hb_util:bin(io_lib:format("~p", [self()])); + ServerID -> + hb_util:bin( + io_lib:format( + "~s (~p)", + [short_id(ServerID), self()] + ) + ) + end, + format_debug_trace(Mod, Func, LineNum, Opts) + ] + ) + ), + print(X, Info, Opts). +``` + +### server_id + +Retreive the server ID of the calling process, if known. + +```erlang +server_id() -> + server_id(#{ server_id => undefined }). +``` + +### server_id + +```erlang +server_id(Opts) -> + case hb_opts:get(server_id, undefined, Opts) of + undefined -> get(server_id); + ServerID -> ServerID + end. +``` + +### format_debug_trace + +Generate the appropriate level of trace for a given call. + +```erlang +format_debug_trace(Mod, Func, Line, Opts) -> + case hb_opts:get(debug_print_trace, false, #{}) of + short -> + Trace = + case hb_opts:get(debug_trace_type, erlang, Opts) of + erlang -> get_trace(erlang); + ao -> + % If we are printing AO-Core traces, we add the module + % and line number to the end to show exactly where in + % the handler-flow the event arose. +``` + +### term + +Convert a term to a string for debugging print purposes. + +```erlang +term(X) -> term(X, #{}). +``` + +### term + +Convert a term to a string for debugging print purposes. + +```erlang +term(X, Opts) -> term(X, Opts, 0). +``` + +### term + +Convert a term to a string for debugging print purposes. + +```erlang +term(X, Opts, Indent) -> + try do_debug_fmt(X, Opts, Indent) + catch A:B:C -> + Mode = hb_opts:get(mode, prod, Opts), + PrintFailPreference = hb_opts:get(debug_print_fail_mode, quiet, Opts), + case {Mode, PrintFailPreference} of + {debug, quiet} -> + indent("[!Format failed!] ~p", [X], Opts, Indent); + {debug, _} -> + indent( + "[PRINT FAIL:] ~80p~n===== PRINT ERROR WAS ~p:~p =====~n~s", + [ + X, + A, + B, + hb_util:bin( + format_trace( + C, + hb_opts:get(stack_print_prefixes, [], #{}) + ) + ) + ], + Opts, + Indent + ); + _ -> + indent("[!Format failed!]", [], Opts, Indent) + end + end. +``` + +### do_debug_fmt + +```erlang +do_debug_fmt( + { { {rsa, _PublicExpnt1}, _Priv1, _Priv2 }, + { {rsa, _PublicExpnt2}, Pub } + }, + Opts, Indent +) -> + format_address(Pub, Opts, Indent); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt( + { AtomValue, + { + { {rsa, _PublicExpnt1}, _Priv1, _Priv2 }, + { {rsa, _PublicExpnt2}, Pub } + } + }, + Opts, Indent +) -> + AddressString = format_address(Pub, Opts, Indent), + indent("~p: ~s", [AtomValue, AddressString], Opts, Indent); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt({explicit, X}, Opts, Indent) -> + indent("[Explicit:] ~p", [X], Opts, Indent); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt({string, X}, Opts, Indent) -> + indent("~s", [X], Opts, Indent); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt({trace, Trace}, Opts, Indent) -> + indent("~n~s", [trace(Trace)], Opts, Indent); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt({as, undefined, Msg}, Opts, Indent) -> + "\n" ++ indent("Subresolve => ", [], Opts, Indent) ++ + maybe_multiline(Msg, Opts, Indent + 1); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt({as, DevID, Msg}, Opts, Indent) -> + "\n" ++ indent("Subresolve as ~s => ", [DevID], Opts, Indent) ++ + maybe_multiline(Msg, Opts, Indent + 1); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt({X, Y}, Opts, Indent) when is_atom(X) and is_atom(Y) -> + indent("~p: ~p", [X, Y], Opts, Indent); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt({X, Y}, Opts, Indent) when is_record(Y, tx) -> + indent("~p: [TX item]~n~s", + [X, ar_bundles:format(Y, Indent + 1, Opts)], + Opts, + Indent + ); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt({X, Y}, Opts, Indent) when is_map(Y); is_list(Y) -> + Formatted = maybe_multiline(Y, Opts, Indent + 1), + indent( + case is_binary(X) of + true -> "~s"; + false -> "~p" + end ++ "~s", + [ + X, + case is_multiline(Formatted) of + true -> " ==>" ++ Formatted; + false -> ": " ++ Formatted + end + ], + Opts, + Indent + ); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt({X, Y}, Opts, Indent) -> + indent( + "~s: ~s", + [ + remove_leading_noise(term(X, Opts, Indent)), + remove_leading_noise(term(Y, Opts, Indent)) + ], + Opts, + Indent + ); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt(TX, Opts, Indent) when is_record(TX, tx) -> + indent("[TX item]~n~s", + [ar_bundles:format(TX, Indent, Opts)], + Opts, + Indent + ); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt(MaybePrivMap, Opts, Indent) when is_map(MaybePrivMap) -> + Map = hb_private:reset(MaybePrivMap), + case maybe_format_short(Map, Opts, Indent) of + {ok, SimpleFmt} -> SimpleFmt; + error -> + "\n" ++ lists:flatten(message(Map, Opts, Indent)) + end; +``` + +### do_debug_fmt + +```erlang +do_debug_fmt(Tuple, Opts, Indent) when is_tuple(Tuple) -> + format_tuple(Tuple, Opts, Indent); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt(X, Opts, Indent) when is_binary(X) -> + indent("~s", [binary(X)], Opts, Indent); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt(Str = [X | _], Opts, Indent) when is_integer(X) andalso X >= 32 andalso X < 127 -> + indent("~s", [Str], Opts, Indent); +``` + +### do_debug_fmt + +```erlang +do_debug_fmt(MsgList, Opts, Indent) when is_list(MsgList) -> + format_list(MsgList, Opts, Indent); +``` + +### do_debug_fmt + +If the user attempts to print a wallet, format it as an address. + +```erlang +do_debug_fmt(X, Opts, Indent) -> + indent("~80p", [X], Opts, Indent). +``` + +### format_address + +If the user attempts to print a wallet, format it as an address. + +```erlang +format_address(Wallet, Opts, Indent) -> + indent("Wallet [Addr: ~s]", + [short_id(hb_util:human_id(ar_wallet:to_address(Wallet)))], + Opts, + Indent + ). +``` + +### format_tuple + +Helper function to format tuples with arity greater than 2. + +```erlang +format_tuple(Tuple, Opts, Indent) -> + to_lines(lists:map( + fun(Elem) -> + term(Elem, Opts, Indent) + end, + tuple_to_list(Tuple) + )). +``` + +### format_list + +Format a list. Comes in three forms: all on one line, individual items + +```erlang +format_list(MsgList, Opts, Indent) -> + case maybe_format_short(MsgList, Opts, Indent) of + {ok, SimpleFmt} -> SimpleFmt; + error -> + "\n" ++ + indent("List [~w] {", [length(MsgList)], Opts, Indent) ++ + format_list_lines(MsgList, Opts, Indent) + end. +``` + +### format_list_lines + +Format a list as a multi-line string. + +```erlang +format_list_lines(MsgList, Opts, Indent) -> + Numbered = hb_util:number(MsgList), + Lines = + lists:map( + fun({N, Msg}) -> + format_list_item(N, Msg, Opts, Indent) + end, + Numbered + ), + AnyLong = + lists:any( + fun({Mode, _}) -> Mode == multiline end, + Lines + ), + case AnyLong of + false -> + "\n" ++ + remove_trailing_noise( + lists:flatten( + lists:map( + fun({_, Line}) -> + Line + end, + Lines + ) + ) + ) ++ + "\n" ++ + indent("}", [], Opts, Indent); + true -> + "\n" ++ + lists:flatten(lists:map( + fun({N, Msg}) -> + {_, Line} = format_list_item(multiline, N, Msg, Opts, Indent), + Line + end, + Numbered + )) ++ indent("}", [], Opts, Indent) + end. +``` + +### format_list_item + +Format a single element of a list. + +```erlang +format_list_item(N, Msg, Opts, Indent) -> + case format_list_item(short, N, Msg, Opts, Indent) of + {short, String} -> {short, String}; + error -> format_list_item(multiline, N, Msg, Opts, Indent) + end. +``` + +### format_list_item + +```erlang +format_list_item(short, N, Msg, Opts, Indent) -> + case maybe_format_short(Msg, Opts, Indent) of + {ok, SimpleFmt} -> + {short, indent("~s => ~s~n", [N, SimpleFmt], Opts, Indent + 1)}; + error -> error + end; +``` + +### format_list_item + +```erlang +format_list_item(multiline, N, Msg, Opts, Indent) -> + Formatted = + case is_multiline(Base = term(Msg, Opts, Indent + 2)) of + true -> Base; + false -> remove_leading_noise(Base) + end, + { + multiline, + indent( + "~s => ~s~n", + [N, Formatted], + Opts, + Indent + 1 + ) + }. +``` + +### to_lines + +Join a list of strings and remove trailing noise. + +```erlang +to_lines(Elems) -> + remove_trailing_noise(do_to_lines(Elems)). +``` + +### do_to_lines + +```erlang +do_to_lines([]) -> []; +``` + +### do_to_lines + +```erlang +do_to_lines(In =[RawElem | Rest]) -> + Elem = lists:flatten(RawElem), + case lists:member($\n, Elem) of + true -> lists:flatten(lists:join("\n", In)); + false -> Elem ++ ", " ++ do_to_lines(Rest) + end. +``` + +### remove_noise + +Remove any leading or trailing noise from a string. + +```erlang +remove_noise(Str) -> + remove_leading_noise(remove_trailing_noise(Str)). +``` + +### remove_leading_noise + +Remove any leading whitespace from a string. + +```erlang +remove_leading_noise(Str) -> + remove_leading_noise(Str, ?NOISE_CHARS). +``` + +### remove_leading_noise + +```erlang +remove_leading_noise(Bin, Noise) when is_binary(Bin) -> + hb_util:bin(remove_leading_noise(hb_util:list(Bin), Noise)); +``` + +### remove_leading_noise + +```erlang +remove_leading_noise([], _) -> []; +``` + +### remove_leading_noise + +```erlang +remove_leading_noise([Char|Str], Noise) -> + case lists:member(Char, Noise) of + true -> + remove_leading_noise(Str, Noise); + false -> [Char|Str] + end. +``` + +### remove_trailing_noise + +Remove trailing noise characters from a string. By default, this is + +```erlang +remove_trailing_noise(Str) -> + removing_trailing_noise(Str, ?NOISE_CHARS). +``` + +### removing_trailing_noise + +```erlang +removing_trailing_noise(Bin, Noise) when is_binary(Bin) -> + removing_trailing_noise(binary:bin_to_list(Bin), Noise); +``` + +### removing_trailing_noise + +```erlang +removing_trailing_noise(BinList, Noise) when is_list(BinList) -> + case lists:member(lists:last(BinList), Noise) of + true -> + removing_trailing_noise(lists:droplast(BinList), Noise); + false -> BinList + end. +``` + +### indent + +Format a string with an indentation level. + +```erlang +indent(Str, Indent) -> indent(Str, #{}, Indent). +``` + +### indent + +Format a string with an indentation level. + +```erlang +indent(Str, Opts, Indent) -> indent(Str, [], Opts, Indent). +``` + +### indent + +Format a string with an indentation level. + +```erlang +indent(FmtStr, Terms, Opts, Ind) -> + IndentSpaces = hb_opts:get(debug_print_indent, Opts), + EscapedFmt = escape_format(FmtStr), + lists:droplast( + lists:flatten( + io_lib:format( + [$\s || _ <- lists:seq(1, Ind * IndentSpaces)] ++ + lists:flatten(EscapedFmt) ++ "\n", + Terms + ) + ) + ). +``` + +### escape_format + +Escape a string for use as an io_lib:format specifier. + +```erlang +escape_format(Str) when is_list(Str) -> + re:replace( + Str, + "~([a-z\\-_]+@[0-9]+\\.[0-9]+)", "~~\\1", + [global, {return, list}] + ); +``` + +### escape_format + +Escape a string for use as an io_lib:format specifier. +Format an error message as a string. + +```erlang +escape_format(Else) -> Else. +``` + +### error + +Escape a string for use as an io_lib:format specifier. +Format an error message as a string. + +```erlang +error(ErrorMsg, Opts) -> + Type = hb_ao:get(<<"type">>, ErrorMsg, <<"">>, Opts), + Details = hb_ao:get(<<"details">>, ErrorMsg, <<"">>, Opts), + Stacktrace = hb_ao:get(<<"stacktrace">>, ErrorMsg, <<"">>, Opts), + hb_util:bin( + [ + <<"Termination type: '">>, Type, + <<"'\n\nStacktrace:\n\n">>, Stacktrace, + <<"\n\nError details:\n\n">>, Details + ] + ). +``` + +### indent_lines + +Take a series of strings or a combined string and format as a + +```erlang +indent_lines(Strings, Indent) when is_binary(Strings) -> + indent_lines(binary:split(Strings, <<"\n">>, [global]), Indent); +``` + +### indent_lines + +Take a series of strings or a combined string and format as a + +```erlang +indent_lines(Strings, Indent) when is_list(Strings) -> + hb_util:bin(lists:join( + "\n", + [ + indent(hb_util:list(String), #{}, Indent) + || + String <- Strings + ] + )). +``` + +### binary + +Format a binary as a short string suitable for printing. + +```erlang +binary(Bin) -> + case short_id(Bin) of + undefined -> + MaxBinPrint = hb_opts:get(debug_print_binary_max), + Printable = + binary:part( + Bin, + 0, + case byte_size(Bin) of + X when X < MaxBinPrint -> X; + _ -> MaxBinPrint + end + ), + PrintSegment = + case is_human_binary(Printable) of + true -> Printable; + false -> hb_util:encode(Printable) + end, + lists:flatten( + [ + "\"", + [PrintSegment], + case Printable == Bin of + true -> "\""; + false -> + io_lib:format( + "...\" <~s bytes>", + [hb_util:human_int(byte_size(Bin))] + ) + end + ] + ); + ShortID -> + lists:flatten(io_lib:format("~s", [ShortID])) + end. +``` + +### maybe_multiline + +Format a map as either a single line or a multi-line string depending + +```erlang +maybe_multiline(X, Opts, Indent) -> + case maybe_format_short(X, Opts, Indent) of + {ok, SimpleFmt} -> SimpleFmt; + error -> + "\n" ++ lists:flatten(message(X, Opts, Indent)) + end. +``` + +### maybe_format_short + +Attempt to generate a short formatting of a message, using the given + +```erlang +maybe_format_short(X, Opts, _Indent) -> + MaxLen = hb_opts:get(debug_print_map_line_threshold, 100, Opts), + SimpleFmt = + case is_binary(X) of + true -> binary(X); + false -> io_lib:format("~p", [X]) + end, + case is_multiline(SimpleFmt) orelse (lists:flatlength(SimpleFmt) > MaxLen) of + true -> error; + false -> {ok, SimpleFmt} + end. +``` + +### is_multiline + +Is the given string a multi-line string? + +```erlang +is_multiline(Str) -> + lists:member($\n, Str). +``` + +### eunit_print + +Format and print an indented string to standard error. + +```erlang +eunit_print(FmtStr, FmtArgs) -> + io:format( + standard_error, + "~n~s ", + [indent(FmtStr ++ "...", FmtArgs, #{}, 4)] + ). +``` + +### print_trace + +Print the trace of the current stack, up to the first non-hyperbeam + +```erlang +print_trace(Stack, CallMod, CallFunc, CallLine) -> + print_trace(Stack, "HB TRACE", + lists:flatten(io_lib:format("[~s:~w ~p]", + [CallMod, CallLine, CallFunc]) + )). +``` + +### print_trace + +```erlang +print_trace(Stack, Label, CallerInfo) -> + io:format(standard_error, "=== ~s ===~s==>~n~s", + [ + Label, CallerInfo, + lists:flatten(trace(Stack)) + ]). +``` + +### trace + +Format a stack trace as a list of strings, one for each stack frame. + +```erlang +trace(Stack) -> + format_trace(Stack, hb_opts:get(stack_print_prefixes, [], #{})). +``` + +### format_trace + +```erlang +format_trace([], _) -> []; +``` + +### format_trace + +```erlang +format_trace([Item|Rest], Prefixes) -> + case element(1, Item) of + Atom when is_atom(Atom) -> + case true of %is_hb_module(Atom, Prefixes) of + true -> + [ + format_trace(Item, Prefixes) | + format_trace(Rest, Prefixes) + ]; + false -> [] + end; + _ -> [] + end; +``` + +### format_trace + +```erlang +format_trace({Func, ArityOrTerm, Extras}, Prefixes) -> + format_trace({no_module, Func, ArityOrTerm, Extras}, Prefixes); +``` + +### format_trace + +```erlang +format_trace({Mod, Func, ArityOrTerm, Extras}, _Prefixes) -> + ExtraMap = hb_maps:from_list(Extras), + indent( + "~p:~p/~p [~s]~n", + [ + Mod, Func, ArityOrTerm, + case hb_maps:get(line, ExtraMap, undefined) of + undefined -> "No details"; + Line -> + hb_maps:get(file, ExtraMap) + ++ ":" ++ integer_to_list(Line) + end + ], + #{}, + 1 + ). +``` + +### print_trace_short + +Print a trace to the standard error stream. + +```erlang +print_trace_short(Trace, Mod, Func, Line) -> + io:format(standard_error, "=== [ HB SHORT TRACE ~p:~w ~p ] ==> ~s~n", + [ + Mod, Line, Func, + trace_short(Trace) + ] + ). +``` + +### trace_to_list + +Return a list of calling modules and lines from a trace, removing all + +```erlang +trace_to_list(Trace) -> + Prefixes = hb_opts:get(stack_print_prefixes, [], #{}), + lists:filtermap( + fun(TraceItem) when is_binary(TraceItem) -> + {true, TraceItem}; + (TraceItem) -> + Formatted = format_trace_element(TraceItem), + case hb_util:is_hb_module(Formatted, Prefixes) of + true -> {true, Formatted}; + false -> false + end + end, + Trace + ). +``` + +### trace_short + +Format a trace to a short string. + +```erlang +trace_short() -> trace_short(get_trace(erlang)). +``` + +### trace_short + +Format a trace to a short string. + +```erlang +trace_short(Type) when is_atom(Type) -> trace_short(get_trace(Type)); +``` + +### trace_short + +Format a trace to a short string. +Format a trace element in form `mod:line` or `mod:func` for Erlang + +```erlang +trace_short(Trace) when is_list(Trace) -> + lists:join(" / ", lists:reverse(trace_to_list(Trace))). +``` + +### format_trace_element + +Format a trace to a short string. +Format a trace element in form `mod:line` or `mod:func` for Erlang + +```erlang +format_trace_element(Bin) when is_binary(Bin) -> Bin; +``` + +### format_trace_element + +Format a trace to a short string. +Format a trace element in form `mod:line` or `mod:func` for Erlang + +```erlang +format_trace_element({Mod, Line}) -> + lists:flatten(io_lib:format("~p:~p", [Mod, Line])); +``` + +### format_trace_element + +Format a trace to a short string. +Format a trace element in form `mod:line` or `mod:func` for Erlang + +```erlang +format_trace_element({Mod, _, _, [{file, _}, {line, Line}|_]}) -> + lists:flatten(io_lib:format("~p:~p", [Mod, Line])); +``` + +### format_trace_element + +Format a trace to a short string. +Format a trace element in form `mod:line` or `mod:func` for Erlang +Utility function to help macro `?trace/0` remove the first frame of the + +```erlang +format_trace_element({Mod, Func, _ArityOrTerm, _Extras}) -> + lists:flatten(io_lib:format("~p:~p", [Mod, Func])). +``` + +### trace_macro_helper + +Format a trace to a short string. +Format a trace element in form `mod:line` or `mod:func` for Erlang +Utility function to help macro `?trace/0` remove the first frame of the + +```erlang +trace_macro_helper(Fun, {_, {_, Stack}}, Mod, Func, Line) -> + Fun(Stack, Mod, Func, Line). +``` + +### get_trace + +Get the trace of the current execution. If the argument is `erlang`, + +```erlang +get_trace(erlang) -> + case catch error(debugging_print) of + {_, {_, Stack}} -> normalize_trace(Stack); + _ -> [] + end; +``` + +### get_trace + +Get the trace of the current execution. If the argument is `erlang`, + +```erlang +get_trace(ao) -> + case get(ao_stack) of + undefined -> []; + Stack -> Stack + end. +``` + +### normalize_trace + +Remove all calls from this module from the top of a trace. + +```erlang +normalize_trace([]) -> []; +``` + +### normalize_trace + +Remove all calls from this module from the top of a trace. + +```erlang +normalize_trace([{Mod, _, _, _}|Rest]) when Mod == ?MODULE -> + normalize_trace(Rest); +``` + +### normalize_trace + +Remove all calls from this module from the top of a trace. +Format a message for printing, optionally taking an indentation level + +```erlang +normalize_trace(Trace) -> Trace. +``` + +### message + +Remove all calls from this module from the top of a trace. +Format a message for printing, optionally taking an indentation level + +```erlang +message(Item) -> message(Item, #{}). +``` + +### message + +Remove all calls from this module from the top of a trace. +Format a message for printing, optionally taking an indentation level + +```erlang +message(Item, Opts) -> message(Item, Opts, 0). +``` + +### message + +Remove all calls from this module from the top of a trace. +Format a message for printing, optionally taking an indentation level + +```erlang +message(Bin, Opts, Indent) when is_binary(Bin) -> + indent( + binary(Bin), + Opts, + Indent + ); +``` + +### message + +Remove all calls from this module from the top of a trace. +Format a message for printing, optionally taking an indentation level + +```erlang +message(List, Opts, Indent) when is_list(List) -> + % Remove the leading newline from the formatted list, if it exists. +``` + +### message + +```erlang +message(RawMap, Opts, Indent) when is_map(RawMap) -> + % Should we filter out the priv key? + FilterPriv = hb_opts:get(debug_show_priv, false, Opts), + MainPriv = hb_maps:get(<<"priv">>, RawMap, #{}, Opts), + % Add private keys to the output if they are not hidden. Opt takes 3 forms: + % 1. `false' -- never show priv + % 2. `if_present' -- show priv only if there are keys inside + % 2. `always' -- always show priv + FooterKeys = + case {FilterPriv, MainPriv} of + {false, _} -> []; + {if_present, #{}} -> []; + {_, Priv} -> [{<<"!Private!">>, Priv}] + end, + Map = + case FilterPriv of + false -> RawMap; + _ -> hb_private:reset(RawMap) + end, + % Define helper functions for formatting elements of the map. +``` + +### message + +```erlang +message(Item, Opts, Indent) -> + % Whatever we have is not a message map. +``` + +### short_id + +Return a short ID for the different types of IDs used in AO-Core. + +```erlang +short_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 32 -> + short_id(hb_util:human_id(Bin)); +``` + +### short_id + +Return a short ID for the different types of IDs used in AO-Core. + +```erlang +short_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 43 -> + << FirstTag:5/binary, _:33/binary, LastTag:5/binary >> = Bin, + << FirstTag/binary, "..", LastTag/binary >>; +``` + +### short_id + +Return a short ID for the different types of IDs used in AO-Core. + +```erlang +short_id(Bin) when byte_size(Bin) > 43 andalso byte_size(Bin) < 100 -> + case binary:split(Bin, <<"/">>, [trim_all, global]) of + [First, Second] when byte_size(Second) == 43 -> + FirstEnc = short_id(First), + SecondEnc = short_id(Second), + << FirstEnc/binary, "/", SecondEnc/binary >>; + [First, Key] -> + FirstEnc = short_id(First), + << FirstEnc/binary, "/", Key/binary >>; + _ -> + Bin + end; +``` + +### short_id + +Return a short ID for the different types of IDs used in AO-Core. + +```erlang +short_id(<< "/", SingleElemHashpath/binary >>) -> + Enc = short_id(SingleElemHashpath), + if is_binary(Enc) -> << "/", Enc/binary >>; + true -> undefined + end; +``` + +### short_id + +Return a short ID for the different types of IDs used in AO-Core. + +```erlang +short_id(Key) when byte_size(Key) < 43 -> Key; +``` + +### short_id + +Return a short ID for the different types of IDs used in AO-Core. +Determine whether a binary is human-readable. + +```erlang +short_id(_) -> undefined. +``` + +### is_human_binary + +Return a short ID for the different types of IDs used in AO-Core. +Determine whether a binary is human-readable. + +```erlang +is_human_binary(Bin) when is_binary(Bin) -> + case unicode:characters_to_binary(Bin) of + {error, _, _} -> false; + _ -> true +``` + +--- + +*Generated from [hb_format.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_format.erl)* diff --git a/docs/book/src/hb_gateway_client.erl.md b/docs/book/src/hb_gateway_client.erl.md new file mode 100644 index 000000000..a1cc38bdf --- /dev/null +++ b/docs/book/src/hb_gateway_client.erl.md @@ -0,0 +1,371 @@ +# hb_gateway_client + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_gateway_client.erl) + +Implementation of Arweave's GraphQL API to gain access to specific +items of data stored on the network. +This module must be used to get full HyperBEAM `structured@1.0` form messages +from data items stored on the network, as Arweave gateways do not presently +expose all necessary fields to retrieve this information outside of the +GraphQL API. When gateways integrate serving in `httpsig@1.0` form, this +module will be deprecated. + +--- + +## Exported Functions + +- `data/2` +- `item_spec/0` +- `query/2` +- `query/3` +- `query/4` +- `query/5` +- `read/2` +- `result_to_message/2` +- `scheduler_location/2` + +--- + +### read + +Implementation of Arweave's GraphQL API to gain access to specific +Get a data item (including data and tags) by its ID, using the node's + +```erlang +read(ID, Opts) -> + {Query, Variables} = case maps:is_key(<<"subindex">>, Opts) of + true -> + Tags = subindex_to_tags(maps:get(<<"subindex">>, Opts)), + { + << + "query($transactionIds: [ID!]!) { ", + "transactions(ids: $transactionIds,", + "tags: ", (Tags)/binary , ",", + "first: 1){ ", + "edges { ", (item_spec())/binary , " } ", + "} ", + "} " + >>, + #{ + <<"transactionIds">> => [hb_util:human_id(ID)] + } + }; + false -> + { + << + "query($transactionIds: [ID!]!) { ", + "transactions(ids: $transactionIds, first: 1){ ", + "edges { ", (item_spec())/binary , " } ", + "} ", + "} " + >>, + #{ + <<"transactionIds">> => [hb_util:human_id(ID)] + } + } + end, + case query(Query, Variables, Opts) of + {error, Reason} -> {error, Reason}; + {ok, GqlMsg} -> + case hb_ao:get(<<"data/transactions/edges/1/node">>, GqlMsg, Opts) of + not_found -> + ?event({read_not_found, {id, ID}, {gql_msg, GqlMsg}}), + {error, not_found}; + Item -> + ?event({read_found, {id, ID}, {item, Item}}), + result_to_message(ID, Item, Opts) + end + end. +``` + +### item_spec + +Gives the fields of a transaction that are needed to construct an +Get the data associated with a transaction by its ID, using the node's + +```erlang +item_spec() -> + <<""" + node { + id + anchor + signature + recipient + owner { key } + fee { winston } + quantity { winston } + tags { name value } + data { size } + } + cursor + """>>. +``` + +### data + +Gives the fields of a transaction that are needed to construct an +Get the data associated with a transaction by its ID, using the node's + +```erlang +data(ID, Opts) -> + Req = #{ + <<"multirequest-accept-status">> => 200, + <<"multirequest-responses">> => 1, + <<"path">> => <<"/raw/", ID/binary>>, + <<"method">> => <<"GET">> + }, + case hb_http:request(Req, Opts) of + {ok, Res} -> + ?event(gateway, + {data, + {id, ID}, + {response, Res}, + {body, hb_ao:get(<<"body">>, Res, <<>>, Opts)} + } + ), + {ok, hb_ao:get(<<"body">>, Res, <<>>, Opts)}; + Res -> + ?event(gateway, {request_error, {id, ID}, {response, Res}}), + {error, no_viable_gateway} + end. +``` + +### scheduler_location + +Find the location of the scheduler based on its ID, through GraphQL. + +```erlang +scheduler_location(Address, Opts) -> + Query = + <<"query($SchedulerAddrs: [String!]!) { ", + "transactions(", + "owners: $SchedulerAddrs, ", + "tags: { name: \"Type\" values: [\"Scheduler-Location\"] }, ", + "first: 1", + "){ ", + "edges { ", + (item_spec())/binary , + " } ", + "} ", + "}">>, + Variables = #{ <<"SchedulerAddrs">> => [Address] }, + case query(Query, Variables, Opts) of + {error, Reason} -> + ?event({scheduler_location, {query, Query}, {error, Reason}}), + {error, Reason}; + {ok, GqlMsg} -> + ?event({scheduler_location_req, {query, Query}, {response, GqlMsg}}), + case hb_ao:get(<<"data/transactions/edges/1/node">>, GqlMsg, Opts) of + not_found -> + ?event(scheduler_location, + {graphql_scheduler_location_not_found, + {address, Address} + } + ), + {error, not_found}; + Item = #{ <<"id">> := ID } -> + ?event(scheduler_location, + {found_via_graphql, + {address, Address}, + {id, ID} + } + ), + result_to_message(ID, Item, Opts) + end + end. +``` + +### query + +Run a GraphQL request encoded as a binary. The node message may contain + +```erlang +query(Query, Opts) -> + query(Query, undefined, Opts). +``` + +### query + +```erlang +query(Query, Variables, Opts) -> + query(Query, Variables, undefined, Opts). +``` + +### query + +```erlang +query(Query, Variables, Node, Opts) -> + query(Query, Variables, Node, undefined, Opts). +``` + +### query + +```erlang +query(Query, Variables, Node, Operation, Opts) -> + % Either use the given node if provided, or use the local machine's routes + % to find the GraphQL endpoint. +``` + +### result_to_message + +Takes a GraphQL item node, matches it with the appropriate data from a + +```erlang +result_to_message(Item, Opts) -> + case hb_maps:get(<<"id">>, Item, not_found, Opts) of + ExpectedID when is_binary(ExpectedID) -> + result_to_message(ExpectedID, Item, Opts); + _ -> + result_to_message(undefined, Item, Opts) + end. +``` + +### result_to_message + +```erlang +result_to_message(ExpectedID, Item, Opts) -> + GQLOpts = + Opts#{ + hashpath => ignore, + cache_control => [<<"no-cache">>, <<"no-store">>] + }, + % We have the headers, so we can get the data. +``` + +### normalize_null + +```erlang +normalize_null(null) -> <<>>; +``` + +### normalize_null + +```erlang +normalize_null(not_found) -> <<>>; +``` + +### normalize_null + +```erlang +normalize_null(Bin) when is_binary(Bin) -> Bin. +``` + +### decode_id_or_null + +```erlang +decode_id_or_null(Bin) when byte_size(Bin) > 0 -> + hb_util:human_id(Bin); +``` + +### decode_id_or_null + +```erlang +decode_id_or_null(_) -> + <<>>. +``` + +### decode_or_null + +```erlang +decode_or_null(Bin) when is_binary(Bin) -> + hb_util:decode(Bin); +``` + +### decode_or_null + +```erlang +decode_or_null(_) -> + <<>>. +``` + +### subindex_to_tags + +Takes a list of messages with `name` and `value` fields, and formats + +```erlang +subindex_to_tags(Subindex) -> + Formatted = + lists:map( + fun(Spec) -> + io_lib:format( + "{ name: \"~s\", values: [\"~s\"]}", + [ + hb_ao:get(<<"name">>, Spec), + hb_ao:get(<<"value">>, Spec) + ] + ) + end, + hb_util:message_to_ordered_list(Subindex) + ), + ListInner = + hb_util:bin( + string:join([lists:flatten(E) || E <- Formatted], ", ") + ), + <<"[", ListInner/binary, "]">>. +%%% Tests +``` + +### ans104_no_data_item_test + +Takes a list of messages with `name` and `value` fields, and formats + +```erlang +ans104_no_data_item_test() -> + % Start a random node so that all of the services come up. +``` + +### scheduler_location_test + +Test that we can get the scheduler location. + +```erlang +scheduler_location_test() -> + % Start a random node so that all of the services come up. +``` + +### l1_transaction_test + +Test l1 message from graphql +Test l2 message from graphql + +```erlang +l1_transaction_test() -> + _Node = hb_http_server:start_node(#{}), + {ok, Res} = read(<<"uJBApOt4ma3pTfY6Z4xmknz5vAasup4KcGX7FJ0Of8w">>, #{}), + ?event(gateway, {l1_transaction, Res}), + Data = maps:get(<<"data">>, Res), + ?assertEqual(<<"Hello World">>, Data). +``` + +### l2_dataitem_test + +Test l1 message from graphql +Test l2 message from graphql +Test optimistic index + +```erlang +l2_dataitem_test() -> + _Node = hb_http_server:start_node(#{}), + {ok, Res} = read(<<"oyo3_hCczcU7uYhfByFZ3h0ELfeMMzNacT-KpRoJK6g">>, #{}), + ?event(gateway, {l2_dataitem, Res}), + Data = maps:get(<<"data">>, Res), + ?assertEqual(<<"Hello World">>, Data). +``` + +### ao_dataitem_test + +Test l1 message from graphql +Test l2 message from graphql +Test optimistic index + +```erlang +ao_dataitem_test() -> + _Node = hb_http_server:start_node(#{}), + {ok, Res} = read(<<"oyo3_hCczcU7uYhfByFZ3h0ELfeMMzNacT-KpRoJK6g">>, #{ }), + ?event(gateway, {l2_dataitem, Res}), + Data = maps:get(<<"data">>, Res), +``` + +--- + +*Generated from [hb_gateway_client.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_gateway_client.erl)* diff --git a/docs/book/src/hb_http.erl.md b/docs/book/src/hb_http.erl.md new file mode 100644 index 000000000..ce0d291c0 --- /dev/null +++ b/docs/book/src/hb_http.erl.md @@ -0,0 +1,914 @@ +# hb_http + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http.erl) + +Hyperbeam's core HTTP request/reply functionality. The functions in this +module generally take a message request from their caller and return a +response in message form, as granted by the peer. This module is mostly +used by hb_client, but can also be used by other modules that need to make +HTTP requests. + +--- + +## Exported Functions + +- `accept_to_codec/2` +- `get/2` +- `get/3` +- `message_to_request/2` +- `post/3` +- `post/4` +- `reply/4` +- `req_to_tabm_singleton/3` +- `request/2` +- `request/4` +- `request/5` +- `start/0` + +--- + +### start + +```erlang +start() -> + httpc:set_options([{max_keep_alive_length, 0}]), + ok. +``` + +### get + +Gets a URL via HTTP and returns the resulting message in deserialized + +```erlang +get(Node, Opts) -> get(Node, <<"/">>, Opts). +``` + +### get + +Gets a URL via HTTP and returns the resulting message in deserialized + +```erlang +get(Node, PathBin, Opts) when is_binary(PathBin) -> + get(Node, #{ <<"path">> => PathBin }, Opts); +``` + +### get + +Gets a URL via HTTP and returns the resulting message in deserialized + +```erlang +get(Node, Message, Opts) -> + request( + <<"GET">>, + Node, + hb_ao:get(<<"path">>, Message, <<"/">>, Opts), + Message, + Opts + ). +``` + +### post + +Posts a message to a URL on a remote peer via HTTP. Returns the + +```erlang +post(Node, Path, Opts) when is_binary(Path) -> + post(Node, #{ <<"path">> => Path }, Opts); +``` + +### post + +Posts a message to a URL on a remote peer via HTTP. Returns the + +```erlang +post(Node, Message, Opts) -> + post(Node, + hb_ao:get( + <<"path">>, + Message, + <<"/">>, + Opts#{ topic => ao_internal } + ), + Message, + Opts + ). +``` + +### post + +```erlang +post(Node, Path, Message, Opts) -> + case request(<<"POST">>, Node, Path, Message, Opts) of + {ok, Res} -> + ?event(http, {post_response, Res}), + {ok, Res}; + Error -> Error + end. +``` + +### request + +Posts a binary to a URL on a remote peer via HTTP, returning the raw + +```erlang +request(Message, Opts) -> + % Special case: We are not given a peer and a path, so we need to + % preprocess the URL to find them. +``` + +### request + +```erlang +request(Method, Peer, Path, Opts) -> + request(Method, Peer, Path, #{}, Opts). +``` + +### request + +```erlang +request(Method, Config = #{ <<"nodes">> := Nodes }, Path, Message, Opts) when is_list(Nodes) -> + % The request has a `route' (see `dev_router' for more details), so we use the + % `multirequest' functionality, rather than a single request. +``` + +### request + +```erlang +request(Method, #{ <<"opts">> := ReqOpts, <<"uri">> := URI }, _Path, Message, Opts) -> + % The request has a set of additional options, so we apply them to the + % request. +``` + +### request + +```erlang +request(Method, Peer, Path, RawMessage, Opts) -> + ?event({request, {method, Method}, {peer, Peer}, {path, Path}, {message, RawMessage}}), + Req = + prepare_request( + hb_maps:get( + <<"codec-device">>, + RawMessage, + <<"httpsig@1.0">>, + Opts + ), + Method, + Peer, + Path, + RawMessage, + Opts + ), + StartTime = os:system_time(millisecond), + % Perform the HTTP request. +``` + +### response_status_to_atom + +Convert a HTTP status code to a status atom. + +```erlang +response_status_to_atom(Status) -> + case Status of + 201 -> created; + X when X < 400 -> ok; + X when X < 500 -> error; + _ -> failure + end. +``` + +### outbound_result_to_message + +Convert an HTTP response to a message. + +```erlang +outbound_result_to_message(<<"ans104@1.0">>, Status, Headers, Body, Opts) -> + ?event(http_outbound, + {result_is_ans104, {headers, Headers}, {body, Body}}, + Opts + ), + try ar_bundles:deserialize(Body) of + Deserialized -> + { + response_status_to_atom(Status), + hb_message:convert( + Deserialized, + <<"structured@1.0">>, + <<"ans104@1.0">>, + Opts + ) + } + catch + _Class:ExceptionPattern:Stacktrace -> + % The response message had a `codec-device: ans104@1.0', but we + % failed to deserialize it, so we fallback to HTTPSig. +``` + +### outbound_result_to_message + +```erlang +outbound_result_to_message(<<"httpsig@1.0">>, Status, Headers, Body, Opts) -> + ?event(http_outbound, {result_is_httpsig, {body, Body}}, Opts), + { + response_status_to_atom(Status), + http_response_to_httpsig(Status, Headers, Body, Opts) + }. +``` + +### http_response_to_httpsig + +Convert a HTTP response to a httpsig message. +Given a message, return the information needed to make the request. + +```erlang +http_response_to_httpsig(Status, HeaderMap, Body, Opts) -> + (hb_message:convert( + hb_maps:merge( + HeaderMap#{ <<"status">> => hb_util:bin(Status) }, + case Body of + <<>> -> #{}; + _ -> #{ <<"body">> => Body } + end, + Opts + ), + #{ <<"device">> => <<"structured@1.0">>, <<"bundle">> => true }, + <<"httpsig@1.0">>, + Opts + ))#{ <<"status">> => hb_util:int(Status) }. +``` + +### message_to_request + +Convert a HTTP response to a httpsig message. +Given a message, return the information needed to make the request. + +```erlang +message_to_request(M, Opts) -> + % Get the route for the message + Res = route_to_request(M, RouteRes = dev_router:route(M, Opts), Opts), + ?event(debug_http, {route_res, {route_res, RouteRes}, {full_res, Res}, {msg, M}}), + Res. +``` + +### route_to_request + +Parse a `dev_router:route` response and return a tuple of request + +```erlang +route_to_request(M, {ok, URI}, Opts) when is_binary(URI) -> + route_to_request(M, {ok, #{ <<"uri">> => URI, <<"opts">> => #{} }}, Opts); +``` + +### route_to_request + +Parse a `dev_router:route` response and return a tuple of request + +```erlang +route_to_request(M, {ok, #{ <<"uri">> := XPath, <<"opts">> := ReqOpts}}, Opts) -> + % The request is a direct HTTP URL, so we need to split the path into a + % host and path. +``` + +### route_to_request + +```erlang +route_to_request(M, {ok, Routes}, Opts) -> + ?event(http_outbound, {found_routes, {req, M}, {routes, Routes}}), + % The result is a route, so we leave it to `request' to handle it. +``` + +### route_to_request + +```erlang +route_to_request(M, {error, Reason}, _Opts) -> + {error, {no_viable_route, {reason, Reason}, {message, M}}}. +``` + +### prepare_request + +Turn a set of request arguments into a request message, formatted in the + +```erlang +prepare_request(Format, Method, Peer, Path, RawMessage, Opts) -> + Message = hb_ao:normalize_keys(RawMessage, Opts), + % Generate a `cookie' key for the message, if an unencoded cookie is + % present. +``` + +### reply + +Reply to the client's HTTP request with a message. + +```erlang +reply(Req, TABMReq, Message, Opts) -> + Status = + case hb_ao:get(<<"status">>, Message, Opts) of + not_found -> 200; + S-> S + end, + reply(Req, TABMReq, Status, Message, Opts). +``` + +### reply + +```erlang +reply(Req, TABMReq, BinStatus, RawMessage, Opts) when is_binary(BinStatus) -> + reply(Req, TABMReq, binary_to_integer(BinStatus), RawMessage, Opts); +``` + +### reply + +```erlang +reply(InitReq, TABMReq, Status, RawMessage, Opts) -> + KeyNormMessage = hb_ao:normalize_keys(RawMessage, Opts), + {ok, Req, Message} = reply_handle_cookies(InitReq, KeyNormMessage, Opts), + {ok, HeadersBeforeCors, EncodedBody} = + encode_reply( + Status, + TABMReq, + Message, + Opts + ), + % Get the CORS request headers from the message, if they exist. +``` + +### reply_handle_cookies + +Handle replying with cookies if the message contains them. Returns the + +```erlang +reply_handle_cookies(Req, Message, Opts) -> + {ok, Cookies} = dev_codec_cookie:extract(Message, #{}, Opts), + ?event(debug_cookie, {encoding_reply_cookies, {explicit, Cookies}}), + case Cookies of + NoCookies when map_size(NoCookies) == 0 -> {ok, Req, Message}; + _ -> + % The internal values of the `cookie' field will be stored in the + % `priv_store' by default, so we let `dev_codec_cookie:opts/1' + % reset the options. +``` + +### add_cors_headers + +Add permissive CORS headers to a message, if the message has not already + +```erlang +add_cors_headers(Msg, ReqHdr, Opts) -> + CorHeaders = #{ + <<"access-control-allow-origin">> => <<"*">>, + <<"access-control-allow-methods">> => <<"GET, POST, PUT, DELETE, OPTIONS">>, + <<"access-control-expose-headers">> => <<"*">> + }, + WithAllowHeaders = case ReqHdr of + <<>> -> CorHeaders; + _ -> CorHeaders#{ + <<"access-control-allow-headers">> => ReqHdr + } + end, + % Keys in the given message will overwrite the defaults listed below if + % included, due to `hb_maps:merge''s precidence order. +``` + +### encode_reply + +Generate the headers and body for a HTTP response message. + +```erlang +encode_reply(Status, TABMReq, Message, Opts) -> + Codec = accept_to_codec(TABMReq, Message, Opts), + ?event(http, {encoding_reply, {codec, Codec}, {message, Message}}), + BaseHdrs = + hb_maps:merge( + #{ + <<"codec-device">> => Codec + }, + case codec_to_content_type(Codec, Opts) of + undefined -> #{}; + CT -> #{ <<"content-type">> => CT } + end, + Opts + ), + AcceptBundle = + hb_util:atom( + hb_maps:get(<<"accept-bundle">>, TABMReq, false, Opts) + ), + ?event(http, + {encoding_reply, + {status, Status}, + {codec, Codec}, + {should_bundle, AcceptBundle}, + {response_message, Message} + } + ), + % Codecs generally do not need to specify headers outside of the content-type, + % aside the default `httpsig@1.0' codec, which expresses its form in HTTP + % documents, and subsequently must set its own headers. +``` + +### accept_to_codec + +Calculate the codec name to use for a reply given the original parsed + +```erlang +accept_to_codec(OriginalReq, Opts) -> + accept_to_codec(OriginalReq, undefined, Opts). +``` + +### accept_to_codec + +```erlang +accept_to_codec(#{ <<"require-codec">> := RequiredCodec }, _Reply, Opts) -> + mime_to_codec(RequiredCodec, Opts); +``` + +### accept_to_codec + +```erlang +accept_to_codec(_OriginalReq, #{ <<"content-type">> := _ }, _Opts) -> + <<"httpsig@1.0">>; +``` + +### accept_to_codec + +```erlang +accept_to_codec(OriginalReq, _, Opts) -> + Accept = hb_maps:get(<<"accept">>, OriginalReq, <<"*/*">>, Opts), + ?event(debug_accept, + {accept_to_codec, + {original_req, OriginalReq}, + {accept, Accept} + } + ), + mime_to_codec(Accept, Opts). +``` + +### mime_to_codec + +Find a codec name from a mime-type. + +```erlang +mime_to_codec(<<"application/", Mime/binary>>, Opts) -> + Name = + case binary:match(Mime, <<"@">>) of + nomatch -> << Mime/binary, "@1.0" >>; + _ -> Mime + end, + case hb_ao:load_device(Name, Opts) of + {ok, _} -> Name; + {error, _} -> + Default = default_codec(Opts), + ?event(http, + {codec_parsing_error, + {given, Name}, + {defaulting_to, Default} + } + ), + Default + end; +``` + +### mime_to_codec + +Find a codec name from a mime-type. + +```erlang +mime_to_codec(<<"device/", Name/binary>>, _Opts) -> Name; +``` + +### mime_to_codec + +Find a codec name from a mime-type. + +```erlang +mime_to_codec(Device, Opts) -> + case binary:match(Device, <<"@">>) of + nomatch -> default_codec(Opts); + _ -> Device + end. +``` + +### default_codec + +Return the default codec for the given options. +Call the `content-type` key on a message with the given codec, using + +```erlang +default_codec(Opts) -> + hb_opts:get(default_codec, <<"httpsig@1.0">>, Opts). +``` + +### codec_to_content_type + +Return the default codec for the given options. +Call the `content-type` key on a message with the given codec, using + +```erlang +codec_to_content_type(Codec, Opts) -> + FastOpts = + Opts#{ + hashpath => ignore, + cache_control => [<<"no-cache">>, <<"no-store">>], + cache_lookup_hueristics => false, + load_remote_devices => false, + error_strategy => continue + }, + case hb_ao:get(<<"content-type">>, #{ <<"device">> => Codec }, FastOpts) of + not_found -> undefined; + CT -> CT + end. +``` + +### req_to_tabm_singleton + +Convert a cowboy request to a normalized message. We first parse the + +```erlang +req_to_tabm_singleton(Req, Body, Opts) -> + FullPath = + << + (cowboy_req:path(Req))/binary, + "?", + (cowboy_req:qs(Req))/binary + >>, + Headers = cowboy_req:headers(Req), + {ok, _Path, QueryKeys} = hb_singleton:from_path(FullPath), + PrimitiveMsg = maps:merge(Headers, QueryKeys), + Codec = + case hb_maps:find(<<"codec-device">>, PrimitiveMsg, Opts) of + {ok, ExplicitCodec} -> ExplicitCodec; + error -> + case hb_maps:find(<<"content-type">>, PrimitiveMsg, Opts) of + {ok, ContentType} -> mime_to_codec(ContentType, Opts); + error -> default_codec(Opts) + end + end, + ?event(http, + {parsing_req, + {path, FullPath}, + {query, QueryKeys}, + {headers, Headers}, + {primitive_message, PrimitiveMsg} + } + ), + ?event({req_to_tabm_singleton, {codec, Codec}}), + case Codec of + <<"httpsig@1.0">> -> + ?event( + {req_to_tabm_singleton, + {request, {explicit, Req}, + {body, {string, Body}} + }} + ), + httpsig_to_tabm_singleton(PrimitiveMsg, Req, Body, Opts); + <<"ans104@1.0">> -> + Item = ar_bundles:deserialize(Body), + ?event(debug_accept, + {deserialized_ans104, + {item, Item}, + {exact, {explicit, Item}} + } + ), + case ar_bundles:verify_item(Item) of + true -> + ?event(ans104, {valid_ans104_signature, Item}), + ANS104 = + hb_message:convert( + Item, + <<"structured@1.0">>, + <<"ans104@1.0">>, + Opts + ), + normalize_unsigned(PrimitiveMsg, Req, ANS104, Opts); + false -> + throw({invalid_ans104_signature, Item}) + end; + Codec -> + % Assume that the codec stores the encoded message in the `body' field. +``` + +### httpsig_to_tabm_singleton + +HTTPSig messages are inherently mixed into the transport layer, so they + +```erlang +httpsig_to_tabm_singleton(PrimMsg, Req, Body, Opts) -> + {ok, Decoded} = + hb_message:with_only_committed( + hb_message:convert( + PrimMsg#{ <<"body">> => Body }, + <<"structured@1.0">>, + <<"httpsig@1.0">>, + Opts + ), + Opts + ), + ?event(http, {decoded, Decoded}, Opts), + ForceSignedRequests = hb_opts:get(force_signed_requests, false, Opts), + case (not ForceSignedRequests) orelse hb_message:verify(Decoded, all, Opts) of + true -> + ?event(http_verify, {verified_signature, Decoded}), + Signers = hb_message:signers(Decoded, Opts), + case Signers =/= [] andalso hb_opts:get(store_all_signed, false, Opts) of + true -> + ?event(http_verify, {storing_signed_from_wire, Decoded}), + {ok, _} = + hb_cache:write(Decoded, + Opts#{ + store => + #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-http">> + } + } + ); + false -> + do_nothing + end, + normalize_unsigned(PrimMsg, Req, Decoded, Opts); + false -> + ?event(http_verify, + {invalid_signature, + {signed, Decoded}, + {force, ForceSignedRequests} + } + ), + throw({invalid_commitments, Decoded}) + end. +``` + +### normalize_unsigned + +Add the method and path to a message, if they are not already present. + +```erlang +normalize_unsigned(PrimMsg, Req = #{ headers := RawHeaders }, Msg, Opts) -> + ?event({adding_method_and_path_from_request, {explicit, Req}}), + Method = cowboy_req:method(Req), + MsgPath = + hb_maps:get( + <<"path">>, + Msg, + hb_maps:get( + <<"path">>, + RawHeaders, + iolist_to_binary( + cowboy_req:uri( + Req, + #{ + host => undefined, + port => undefined, + scheme => undefined + } + ) + ), + Opts + ), + Opts + ), + FilterKeys = hb_opts:get(http_inbound_filter_keys, ?DEFAULT_FILTER_KEYS, Opts), + FilteredMsg = hb_message:without_unless_signed(FilterKeys, Msg, Opts), + BaseMsg = + FilteredMsg#{ + <<"method">> => Method, + <<"path">> => MsgPath, + <<"accept-bundle">> => + maps:get( + <<"accept-bundle">>, + Msg, + maps:get( + <<"accept-bundle">>, + PrimMsg, + maps:get(<<"accept-bundle">>, RawHeaders, false) + ) + ), + <<"accept">> => + Accept = maps:get( + <<"accept">>, + Msg, + maps:get( + <<"accept">>, + PrimMsg, + maps:get(<<"accept">>, RawHeaders, <<"*/*">>) + ) + ) + }, + ?event(debug_accept, {normalize_unsigned, {accept, Accept}}), + % Parse and add the cookie from the request, if present. We reinstate the + % `cookie' field in the message, as it is not typically signed, yet should + % be honored by the node anyway. +``` + +### simple_ao_resolve_unsigned_test + +```erlang +simple_ao_resolve_unsigned_test() -> + URL = hb_http_server:start_node(), + TestMsg = #{ <<"path">> => <<"/key1">>, <<"key1">> => <<"Value1">> }, + ?assertEqual({ok, <<"Value1">>}, post(URL, TestMsg, #{})). +``` + +### simple_ao_resolve_signed_test + +```erlang +simple_ao_resolve_signed_test() -> + URL = hb_http_server:start_node(), + TestMsg = #{ <<"path">> => <<"/key1">>, <<"key1">> => <<"Value1">> }, + Wallet = hb:wallet(), + {ok, Res} = + post( + URL, + hb_message:commit(TestMsg, Wallet), + #{} + ), + ?assertEqual(<<"Value1">>, Res). +``` + +### nested_ao_resolve_test + +```erlang +nested_ao_resolve_test() -> + URL = hb_http_server:start_node(), + Wallet = hb:wallet(), + {ok, Res} = + post( + URL, + hb_message:commit(#{ + <<"path">> => <<"/key1/key2/key3">>, + <<"key1">> => + #{<<"key2">> => + #{ + <<"key3">> => <<"Value2">> + } + } + }, Wallet), + #{} + ), + ?assertEqual(<<"Value2">>, Res). +``` + +### wasm_compute_request + +```erlang +wasm_compute_request(ImageFile, Func, Params) -> + wasm_compute_request(ImageFile, Func, Params, <<"">>). +``` + +### wasm_compute_request + +```erlang +wasm_compute_request(ImageFile, Func, Params, ResultPath) -> + {ok, Bin} = file:read_file(ImageFile), + Wallet = hb:wallet(), + hb_message:commit(#{ + <<"path">> => <<"/init/compute/results", ResultPath/binary>>, + <<"device">> => <<"wasm-64@1.0">>, + <<"function">> => Func, + <<"parameters">> => Params, + <<"body">> => Bin + }, Wallet). +``` + +### run_wasm_unsigned_test + +```erlang +run_wasm_unsigned_test() -> + Node = hb_http_server:start_node(#{force_signed => false}), + Msg = wasm_compute_request(<<"test/test-64.wasm">>, <<"fac">>, [3.0]), + {ok, Res} = post(Node, Msg, #{}), + ?event({res, Res}), + ?assertEqual(6.0, hb_ao:get(<<"output/1">>, Res, #{})). +``` + +### run_wasm_signed_test + +```erlang +run_wasm_signed_test() -> + Opts = #{ priv_wallet => hb:wallet() }, + URL = hb_http_server:start_node(#{force_signed => true}), + Msg = wasm_compute_request(<<"test/test-64.wasm">>, <<"fac">>, [3.0], <<"">>), + {ok, Res} = post(URL, hb_message:commit(Msg, Opts), Opts), + ?assertEqual(6.0, hb_ao:get(<<"output/1">>, Res, #{})). +``` + +### get_deep_unsigned_wasm_state_test + +```erlang +get_deep_unsigned_wasm_state_test() -> + URL = hb_http_server:start_node(#{force_signed => false}), + Msg = wasm_compute_request(<<"test/test-64.wasm">>, <<"fac">>, [3.0], <<"">>), + {ok, Res} = post(URL, Msg, #{}), + ?assertEqual(6.0, hb_ao:get(<<"/output/1">>, Res, #{})). +``` + +### get_deep_signed_wasm_state_test + +```erlang +get_deep_signed_wasm_state_test() -> + URL = hb_http_server:start_node(#{force_signed => true}), + Msg = + wasm_compute_request( + <<"test/test-64.wasm">>, + <<"fac">>, + [3.0], + <<"/output">> + ), + {ok, Res} = post(URL, Msg, #{}), + ?assertEqual(6.0, hb_ao:get(<<"1">>, Res, #{})). +``` + +### cors_get_test + +```erlang +cors_get_test() -> + URL = hb_http_server:start_node(), + {ok, Res} = get(URL, <<"/~meta@1.0/info">>, #{}), + ?assertEqual( + <<"*">>, + hb_ao:get(<<"access-control-allow-origin">>, Res, #{}) + ). +``` + +### ans104_wasm_test + +```erlang +ans104_wasm_test() -> + TestStore = [hb_test_utils:test_store()], + TestOpts = + #{ + force_signed => true, + store => TestStore, + priv_wallet => ar_wallet:new() + }, + ClientStore = [hb_test_utils:test_store()], + ClientOpts = #{ store => ClientStore, priv_wallet => hb:wallet() }, + URL = hb_http_server:start_node(TestOpts), + {ok, Bin} = file:read_file(<<"test/test-64.wasm">>), + Msg = + hb_message:commit( + #{ + <<"require-codec">> => <<"ans104@1.0">>, + <<"codec-device">> => <<"ans104@1.0">>, + <<"device">> => <<"wasm-64@1.0">>, + <<"function">> => <<"fac">>, + <<"parameters">> => [3.0], + <<"body">> => Bin + }, + ClientOpts, + #{ <<"device">> => <<"ans104@1.0">>, <<"bundle">> => true } + ), + ?assert(hb_message:verify(Msg, all, ClientOpts)), + ?event({msg, Msg}), + {ok, Res} = + post( + URL, + Msg#{ <<"path">> => <<"/init/compute/results">> }, + ClientOpts + ), + ?event({res, Res}), + ?assertEqual(6.0, hb_ao:get(<<"output/1">>, Res, ClientOpts)). +``` + +### send_large_signed_request_test + +```erlang +send_large_signed_request_test() -> + % Note: If the signature scheme ever changes, we will need to run the + % following to get a freshly signed request. +``` + +### index_test + +```erlang +index_test() -> + NodeURL = hb_http_server:start_node(), + {ok, Res} = + get( + NodeURL, + #{ + <<"path">> => <<"/~test-device@1.0/load">>, + <<"accept-bundle">> => false + }, + #{} + ), + ?assertEqual(<<"i like turtles!">>, hb_ao:get(<<"body">>, Res, #{})). +``` + +### index_request_test + +```erlang +index_request_test() -> + URL = hb_http_server:start_node(), + {ok, Res} = + get( + URL, + #{ + <<"path">> => <<"/~test-device@1.0/load?name=dogs">>, + <<"accept-bundle">> => false + }, + #{} + ), + ?assertEqual(<<"i like dogs!">>, hb_ao:get(<<"body">>, Res, #{})). +``` + +--- + +*Generated from [hb_http.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http.erl)* diff --git a/docs/book/src/hb_http_benchmark_tests.erl.md b/docs/book/src/hb_http_benchmark_tests.erl.md new file mode 100644 index 000000000..75b5c72f3 --- /dev/null +++ b/docs/book/src/hb_http_benchmark_tests.erl.md @@ -0,0 +1,8 @@ +# hb_http_benchmark_tests + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_benchmark_tests.erl) + + +--- + +*Generated from [hb_http_benchmark_tests.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_benchmark_tests.erl)* diff --git a/docs/book/src/hb_http_client.erl.md b/docs/book/src/hb_http_client.erl.md new file mode 100644 index 000000000..49afa1f68 --- /dev/null +++ b/docs/book/src/hb_http_client.erl.md @@ -0,0 +1,923 @@ +# hb_http_client + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_client.erl) + +A wrapper library for gun. This module originates from the Arweave +project, and has been modified for use in HyperBEAM. + +--- + +## Exported Functions + +- `handle_call/3` +- `handle_cast/2` +- `handle_info/2` +- `init/1` +- `req/2` +- `start_link/1` +- `terminate/2` + +--- + +### start_link + +A wrapper library for gun. This module originates from the Arweave + +```erlang +start_link(Opts) -> + gen_server:start_link({local, ?MODULE}, ?MODULE, Opts, []). +``` + +### req + +```erlang +req(Args, Opts) -> req(Args, false, Opts). +``` + +### req + +```erlang +req(Args, ReestablishedConnection, Opts) -> + case hb_opts:get(http_client, gun, Opts) of + gun -> gun_req(Args, ReestablishedConnection, Opts); + httpc -> httpc_req(Args, ReestablishedConnection, Opts) + end. +``` + +### httpc_req + +```erlang +httpc_req(Args, _, Opts) -> + #{ + peer := Peer, + path := Path, + method := RawMethod, + headers := Headers, + body := Body + } = Args, + ?event({httpc_req, Args}), + {Host, Port} = parse_peer(Peer, Opts), + Scheme = case Port of + 443 -> "https"; + _ -> "http" + end, + ?event(http_client, {httpc_req, {explicit, Args}}), + URL = binary_to_list(iolist_to_binary([Scheme, "://", Host, ":", integer_to_binary(Port), Path])), + FilteredHeaders = hb_maps:without([<<"content-type">>, <<"cookie">>], Headers, Opts), + HeaderKV = + [ + {binary_to_list(Key), binary_to_list(Value)} + || + {Key, Value} <- hb_maps:to_list(FilteredHeaders, Opts) + ] ++ + [ + {<<"cookie">>, CookieLine} + || + CookieLine <- + case hb_maps:get(<<"cookie">>, Headers, [], Opts) of + Binary when is_binary(Binary) -> + [Binary]; + List when is_list(List) -> + List + end + ], + Method = binary_to_existing_atom(hb_util:to_lower(RawMethod)), + ContentType = hb_maps:get(<<"content-type">>, Headers, <<"application/octet-stream">>, Opts), + Request = + case Method of + get -> + { + URL, + HeaderKV + }; + _ -> + { + URL, + HeaderKV, + binary_to_list(ContentType), + Body + } + end, + ?event({http_client_outbound, Method, URL, Request}), + HTTPCOpts = [{full_result, true}, {body_format, binary}], + StartTime = os:system_time(millisecond), + case httpc:request(Method, Request, [], HTTPCOpts) of + {ok, {{_, Status, _}, RawRespHeaders, RespBody}} -> + EndTime = os:system_time(millisecond), + RespHeaders = + [ + {list_to_binary(Key), list_to_binary(Value)} + || + {Key, Value} <- RawRespHeaders + ], + ?event(http_client, {httpc_resp, Status, RespHeaders, RespBody}), + record_duration(#{ + <<"request-method">> => method_to_bin(Method), + <<"request-path">> => hb_util:bin(Path), + <<"status-class">> => get_status_class(Status), + <<"duration">> => EndTime - StartTime + }, + Opts + ), + {ok, Status, RespHeaders, RespBody}; + {error, Reason} -> + ?event(http_client, {httpc_error, Reason}), + {error, Reason} + end. +``` + +### gun_req + +```erlang +gun_req(Args, ReestablishedConnection, Opts) -> + StartTime = os:system_time(millisecond), + #{ peer := Peer, path := Path, method := Method } = Args, + Response = + case catch gen_server:call(?MODULE, {get_connection, Args, Opts}, infinity) of + {ok, PID} -> + ar_rate_limiter:throttle(Peer, Path, Opts), + case request(PID, Args, Opts) of + {error, Error} when Error == {shutdown, normal}; + Error == noproc -> + case ReestablishedConnection of + true -> + {error, client_error}; + false -> + req(Args, true, Opts) + end; + Reply -> + Reply + end; + {'EXIT', _} -> + {error, client_error}; + Error -> + Error + end, + EndTime = os:system_time(millisecond), + %% Only log the metric for the top-level call to req/2 - not the recursive call + %% that happens when the connection is reestablished. +``` + +### record_duration + +Record the duration of the request in an async process. We write the + +```erlang +record_duration(Details, Opts) -> + spawn( + fun() -> + % First, write to prometheus if it is enabled. Prometheus works + % only with strings as lists, so we encode the data before granting + % it. +``` + +### maybe_invoke_monitor + +Invoke the HTTP monitor message with AO-Core, if it is set in the + +```erlang +maybe_invoke_monitor(Details, Opts) -> + case hb_ao:get(<<"http_monitor">>, Opts, Opts) of + not_found -> ok; + Monitor -> + % We have a monitor message. Place the `details' into the body, set + % the `method' to "POST", add the `http_reference' (if applicable) + % and sign the request. We use the node message's wallet as the + % source of the key. +``` + +### init + +```erlang +init(Opts) -> + case hb_opts:get(prometheus, not hb_features:test(), Opts) of + true -> + ?event({starting_prometheus_application, + {test_mode, hb_features:test()} + } + ), + try + application:ensure_all_started([prometheus, prometheus_cowboy]), + init_prometheus(Opts) + catch + Type:Reason:Stack -> + ?event(warning, + {prometheus_not_started, + {type, Type}, + {reason, Reason}, + {stack, Stack} + } + ), + {ok, #state{ opts = Opts }} + end; + false -> {ok, #state{ opts = Opts }} + end. +``` + +### init_prometheus + +```erlang +init_prometheus(Opts) -> + application:ensure_all_started([prometheus, prometheus_cowboy]), + prometheus_counter:new([ + {name, gun_requests_total}, + {labels, [http_method, route, status_class]}, + { + help, + "The total number of GUN requests." + } + ]), + prometheus_gauge:new([{name, outbound_connections}, + {help, "The current number of the open outbound network connections"}]), + prometheus_histogram:new([ + {name, http_request_duration_seconds}, + {buckets, [0.01, 0.1, 0.5, 1, 5, 10, 30, 60]}, + {labels, [http_method, route, status_class]}, + { + help, + "The total duration of an hb_http_client:req call. This includes more than" + " just the GUN request itself (e.g. establishing a connection, " + "throttling, etc...)" + } + ]), + prometheus_histogram:new([ + {name, http_client_get_chunk_duration_seconds}, + {buckets, [0.1, 1, 10, 60]}, + {labels, [status_class, peer]}, + { + help, + "The total duration of an HTTP GET chunk request made to a peer." + } + ]), + prometheus_counter:new([ + {name, http_client_downloaded_bytes_total}, + {help, "The total amount of bytes requested via HTTP, per remote endpoint"}, + {labels, [route]} + ]), + prometheus_counter:new([ + {name, http_client_uploaded_bytes_total}, + {help, "The total amount of bytes posted via HTTP, per remote endpoint"}, + {labels, [route]} + ]), + ?event(started), + {ok, #state{ opts = Opts }}. +``` + +### handle_call + +```erlang +handle_call({get_connection, Args, Opts}, From, + #state{ pid_by_peer = PIDPeer, status_by_pid = StatusByPID } = State) -> + Peer = hb_maps:get(peer, Args, undefined, Opts), + case hb_maps:get(Peer, PIDPeer, not_found, Opts) of + not_found -> + {ok, PID} = open_connection(Args, hb_maps:merge(State#state.opts, Opts, Opts)), + MonitorRef = monitor(process, PID), + PIDPeer2 = hb_maps:put(Peer, PID, PIDPeer, Opts), + StatusByPID2 = + hb_maps:put( + PID, + {{connecting, [{From, Args}]}, MonitorRef, Peer}, + StatusByPID, + Opts + ), + { + reply, + {ok, PID}, + State#state{ + pid_by_peer = PIDPeer2, + status_by_pid = StatusByPID2 + } + }; + PID -> + case hb_maps:get(PID, StatusByPID, undefined, Opts) of + {{connecting, PendingRequests}, MonitorRef, Peer} -> + StatusByPID2 = + hb_maps:put(PID, + { + {connecting, [{From, Args} | PendingRequests]}, + MonitorRef, + Peer + }, + StatusByPID, + Opts + ), + {noreply, State#state{ status_by_pid = StatusByPID2 }}; + {connected, _MonitorRef, Peer} -> + {reply, {ok, PID}, State} + end + end; +``` + +### handle_call + +```erlang +handle_call(Request, _From, State) -> + ?event(warning, {unhandled_call, {module, ?MODULE}, {request, Request}}), + {reply, ok, State}. +``` + +### handle_cast + +```erlang +handle_cast(Cast, State) -> + ?event(warning, {unhandled_cast, {module, ?MODULE}, {cast, Cast}}), + {noreply, State}. +``` + +### handle_info + +```erlang +handle_info({gun_up, PID, _Protocol}, #state{ status_by_pid = StatusByPID } = State) -> + case hb_maps:get(PID, StatusByPID, not_found) of + not_found -> + %% A connection timeout should have occurred. +``` + +### handle_info + +```erlang +handle_info({gun_error, PID, Reason}, + #state{ pid_by_peer = PIDByPeer, status_by_pid = StatusByPID } = State) -> + case hb_maps:get(PID, StatusByPID, not_found) of + not_found -> + ?event(warning, {gun_connection_error_with_unknown_pid}), + {noreply, State}; + {Status, _MonitorRef, Peer} -> + PIDByPeer2 = hb_maps:remove(Peer, PIDByPeer), + StatusByPID2 = hb_maps:remove(PID, StatusByPID), + Reason2 = + case Reason of + timeout -> + connect_timeout; + {Type, _} -> + Type; + _ -> + Reason + end, + case Status of + {connecting, PendingRequests} -> + reply_error(PendingRequests, Reason2); + connected -> + dec_prometheus_gauge(outbound_connections), + ok + end, + gun:shutdown(PID), + ?event({connection_error, {reason, Reason}}), + {noreply, State#state{ status_by_pid = StatusByPID2, pid_by_peer = PIDByPeer2 }} + end; +``` + +### handle_info + +```erlang +handle_info({gun_down, PID, Protocol, Reason, _KilledStreams, _UnprocessedStreams}, + #state{ pid_by_peer = PIDByPeer, status_by_pid = StatusByPID } = State) -> + case hb_maps:get(PID, StatusByPID, not_found) of + not_found -> + ?event(warning, + {gun_connection_down_with_unknown_pid, {protocol, Protocol}}), + {noreply, State}; + {Status, _MonitorRef, Peer} -> + PIDByPeer2 = hb_maps:remove(Peer, PIDByPeer), + StatusByPID2 = hb_maps:remove(PID, StatusByPID), + Reason2 = + case Reason of + {Type, _} -> + Type; + _ -> + Reason + end, + case Status of + {connecting, PendingRequests} -> + reply_error(PendingRequests, Reason2); + _ -> + dec_prometheus_gauge(outbound_connections), + ok + end, + {noreply, + State#state{ + status_by_pid = StatusByPID2, + pid_by_peer = PIDByPeer2 + } + } + end; +``` + +### handle_info + +```erlang +handle_info({'DOWN', _Ref, process, PID, Reason}, + #state{ pid_by_peer = PIDByPeer, status_by_pid = StatusByPID } = State) -> + case hb_maps:get(PID, StatusByPID, not_found) of + not_found -> + {noreply, State}; + {Status, _MonitorRef, Peer} -> + PIDByPeer2 = hb_maps:remove(Peer, PIDByPeer), + StatusByPID2 = hb_maps:remove(PID, StatusByPID), + case Status of + {connecting, PendingRequests} -> + reply_error(PendingRequests, Reason); + _ -> + dec_prometheus_gauge(outbound_connections), + ok + end, + {noreply, + State#state{ + status_by_pid = StatusByPID2, + pid_by_peer = PIDByPeer2 + } + } + end; +``` + +### handle_info + +```erlang +handle_info(Message, State) -> + ?event(warning, {unhandled_info, {module, ?MODULE}, {message, Message}}), + {noreply, State}. +``` + +### terminate + +```erlang +terminate(Reason, #state{ status_by_pid = StatusByPID }) -> + ?event(info,{http_client_terminating, {reason, Reason}}), + hb_maps:map(fun(PID, _Status) -> gun:shutdown(PID) end, StatusByPID), + ok. +``` + +### inc_prometheus_gauge + +Safe wrapper for prometheus_gauge:inc/2. + +```erlang +inc_prometheus_gauge(Name) -> + case application:get_application(prometheus) of + undefined -> ok; + _ -> + try prometheus_gauge:inc(Name) + catch _:_ -> + init_prometheus(#{}), + prometheus_gauge:inc(Name) + end + end. +``` + +### dec_prometheus_gauge + +Safe wrapper for prometheus_gauge:dec/2. + +```erlang +dec_prometheus_gauge(Name) -> + case application:get_application(prometheus) of + undefined -> ok; + _ -> prometheus_gauge:dec(Name) + end. +``` + +### inc_prometheus_counter + +```erlang +inc_prometheus_counter(Name, Labels, Value) -> + case application:get_application(prometheus) of + undefined -> ok; + _ -> prometheus_counter:inc(Name, Labels, Value) + end. +``` + +### open_connection + +```erlang +open_connection(#{ peer := Peer }, Opts) -> + {Host, Port} = parse_peer(Peer, Opts), + ?event(http_outbound, {parsed_peer, {peer, Peer}, {host, Host}, {port, Port}}), + BaseGunOpts = + #{ + http_opts => + #{ + keepalive => + hb_opts:get( + http_keepalive, + no_keepalive_timeout, + Opts + ) + }, + retry => 0, + connect_timeout => + hb_opts:get( + http_connect_timeout, + no_connect_timeout, + Opts + ) + }, + Transport = + case Port of + 443 -> tls; + _ -> tcp + end, + DefaultProto = + case hb_features:http3() of + true -> http3; + false -> http2 + end, + % Fallback through earlier HTTP versions if the protocol is not supported. +``` + +### parse_peer + +```erlang +parse_peer(Peer, Opts) -> + Parsed = uri_string:parse(Peer), + case Parsed of + #{ host := Host, port := Port } -> + {hb_util:list(Host), Port}; + URI = #{ host := Host } -> + { + hb_util:list(Host), + case hb_maps:get(scheme, URI, undefined, Opts) of + <<"https">> -> 443; + _ -> hb_opts:get(port, 8734, Opts) + end + } + end. +``` + +### reply_error + +```erlang +reply_error([], _Reason) -> + ok; +``` + +### reply_error + +```erlang +reply_error([PendingRequest | PendingRequests], Reason) -> + ReplyTo = element(1, PendingRequest), + Args = element(2, PendingRequest), + Method = hb_maps:get(method, Args), + Path = hb_maps:get(path, Args), + record_response_status(Method, Path, {error, Reason}), + gen_server:reply(ReplyTo, {error, Reason}), + reply_error(PendingRequests, Reason). +``` + +### record_response_status + +```erlang +record_response_status(Method, Path, Response) -> + inc_prometheus_counter(gun_requests_total, + [ + hb_util:list(method_to_bin(Method)), + Path, + hb_util:list(get_status_class(Response)) + ], + 1 + ). +``` + +### method_to_bin + +```erlang +method_to_bin(get) -> + <<"GET">>; +``` + +### method_to_bin + +```erlang +method_to_bin(post) -> + <<"POST">>; +``` + +### method_to_bin + +```erlang +method_to_bin(put) -> + <<"PUT">>; +``` + +### method_to_bin + +```erlang +method_to_bin(head) -> + <<"HEAD">>; +``` + +### method_to_bin + +```erlang +method_to_bin(delete) -> + <<"DELETE">>; +``` + +### method_to_bin + +```erlang +method_to_bin(connect) -> + <<"CONNECT">>; +``` + +### method_to_bin + +```erlang +method_to_bin(options) -> + <<"OPTIONS">>; +``` + +### method_to_bin + +```erlang +method_to_bin(trace) -> + <<"TRACE">>; +``` + +### method_to_bin + +```erlang +method_to_bin(patch) -> + <<"PATCH">>; +``` + +### method_to_bin + +```erlang +method_to_bin(_) -> + <<"unknown">>. +``` + +### request + +```erlang +request(PID, Args, Opts) -> + Timer = + inet:start_timer( + hb_opts:get(http_request_send_timeout, no_request_send_timeout, Opts) + ), + Method = hb_maps:get(method, Args, undefined, Opts), + Path = hb_maps:get(path, Args, undefined, Opts), + HeaderMap = hb_maps:get(headers, Args, #{}, Opts), + % Normalize cookie header lines from the header map. We support both + % lists of cookie lines and a single cookie line. +``` + +### await_response + +```erlang +await_response(Args, Opts) -> + #{ pid := PID, stream_ref := Ref, timer := Timer, limit := Limit, + counter := Counter, acc := Acc, method := Method, path := Path } = Args, + case gun:await(PID, Ref, inet:timeout(Timer)) of + {response, fin, Status, Headers} -> + upload_metric(Args), + ?event(http, {gun_response, {status, Status}, {headers, Headers}, {body, none}}), + {ok, Status, Headers, <<>>}; + {response, nofin, Status, Headers} -> + await_response(Args#{ status => Status, headers => Headers }, Opts); + {data, nofin, Data} -> + case Limit of + infinity -> + await_response(Args#{ acc := [Acc | Data] }, Opts); + Limit -> + Counter2 = size(Data) + Counter, + case Limit >= Counter2 of + true -> + await_response( + Args#{ + counter := Counter2, + acc := [Acc | Data] + }, + Opts + ); + false -> + ?event(error, {http_fetched_too_much_data, Args, + <<"Fetched too much data">>, Opts}), + {error, too_much_data} + end + end; + {data, fin, Data} -> + FinData = iolist_to_binary([Acc | Data]), + download_metric(FinData, Args), + upload_metric(Args), + {ok, + hb_maps:get(status, Args, undefined, Opts), + hb_maps:get(headers, Args, undefined, Opts), + FinData + }; + {error, timeout} = Response -> + record_response_status(Method, Path, Response), + gun:cancel(PID, Ref), + log(warn, gun_await_process_down, Args, Response, Opts), + Response; + {error, Reason} = Response when is_tuple(Reason) -> + record_response_status(Method, Path, Response), + log(warn, gun_await_process_down, Args, Reason, Opts), + Response; + Response -> + record_response_status(Method, Path, Response), + log(warn, gun_await_unknown, Args, Response, Opts), + Response + end. +``` + +### log + +```erlang +log(Type, Event, #{method := Method, peer := Peer, path := Path}, Reason, Opts) -> + ?event( + http, + {gun_log, + {type, Type}, + {event, Event}, + {method, Method}, + {peer, Peer}, + {path, Path}, + {reason, Reason} + }, + Opts + ), + ok. +``` + +### download_metric + +```erlang +download_metric(Data, #{path := Path}) -> + inc_prometheus_counter( + http_client_downloaded_bytes_total, + [Path], + byte_size(Data) + ). +``` + +### upload_metric + +```erlang +upload_metric(#{method := post, path := Path, body := Body}) -> + inc_prometheus_counter( + http_client_uploaded_bytes_total, + [Path], + byte_size(Body) + ); +``` + +### upload_metric + +```erlang +upload_metric(_) -> + ok. +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({ok, {{Status, _}, _, _, _, _}}) -> + get_status_class(Status); +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error, connection_closed}) -> + <<"connection_closed">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error, connect_timeout}) -> + <<"connect_timeout">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error, timeout}) -> + <<"timeout">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error,{shutdown,timeout}}) -> + <<"shutdown_timeout">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error, econnrefused}) -> + <<"econnrefused">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error, {shutdown,econnrefused}}) -> + <<"shutdown_econnrefused">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error, {shutdown,ehostunreach}}) -> + <<"shutdown_ehostunreach">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error, {shutdown,normal}}) -> + <<"shutdown_normal">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error, {closed,_}}) -> + <<"closed">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class({error, noproc}) -> + <<"noproc">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class(208) -> + <<"already_processed">>; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class(Data) when is_integer(Data), Data > 0 -> + hb_util:bin(prometheus_http:status_class(Data)); +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class(Data) when is_binary(Data) -> + case catch binary_to_integer(Data) of + {_, _} -> + <<"unknown">>; + Status -> + get_status_class(Status) + end; +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class(Data) when is_atom(Data) -> + atom_to_binary(Data); +``` + +### get_status_class + +Return the HTTP status class label for cowboy_requests_total and + +```erlang +get_status_class(_) -> +``` + +--- + +*Generated from [hb_http_client.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_client.erl)* diff --git a/docs/book/src/hb_http_client_sup.erl.md b/docs/book/src/hb_http_client_sup.erl.md new file mode 100644 index 000000000..7c59be6af --- /dev/null +++ b/docs/book/src/hb_http_client_sup.erl.md @@ -0,0 +1,34 @@ +# hb_http_client_sup + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_client_sup.erl) + +The supervisor for the gun HTTP client wrapper. + +--- + +## Exported Functions + +- `init/1` +- `start_link/1` + +--- + +### start_link + +The supervisor for the gun HTTP client wrapper. + +```erlang +start_link(Opts) -> + supervisor:start_link({local, ?MODULE}, ?MODULE, Opts). +``` + +### init + +```erlang +init(Opts) -> + {ok, {{one_for_one, 5, 10}, [?CHILD(hb_http_client, worker, Opts)]}}. +``` + +--- + +*Generated from [hb_http_client_sup.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_client_sup.erl)* diff --git a/docs/book/src/hb_http_multi.erl.md b/docs/book/src/hb_http_multi.erl.md new file mode 100644 index 000000000..1cbd093b2 --- /dev/null +++ b/docs/book/src/hb_http_multi.erl.md @@ -0,0 +1,393 @@ +# hb_http_multi + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_multi.erl) + +An interface for resolving requests across multiple HTTP servers, either +concurrently or sequentially, and processing the results in a configurable +manner. +The `Config` message for a call to `request/5` may contain the following +fields: +- `multirequest-nodes`: A list of nodes to request from. +- `multirequest-responses`: The number of responses to gather. +- `multirequest-stop-after`: Whether to stop after the required number of + responses. +- `multirequest-parallel`: Whether to run the requests in parallel. +- `multirequest-admissible`: A message to resolve against the response. +- `multirequest-admissible-status`: The statuses that are admissible. +The `admissible` message is executed as a `base` message, with its `path` +field moved to the request (or set to `is-admissible` if not present): +``` + resolve(Base, Response#{ <<"path">> => Base/path OR /is-admissible }, Opts) +''' + +--- + +## Exported Functions + +- `request/5` + +--- + +### request + +An interface for resolving requests across multiple HTTP servers, either +Dispatch the same HTTP request to many nodes. Can be configured to + +```erlang +request(Config, Method, Path, Message, Opts) -> + #{ + nodes := Nodes, + responses := Responses, + stop_after := StopAfter, + admissible := Admissible, + admissible_status := Statuses, + parallel := Parallel + } = multirequest_opts(Config, Message, Opts), + MultirequestMsg = + hb_message:without_unless_signed( + lists:filter( + fun(<<"multirequest-", _/binary>>) -> true; (_) -> false end, + hb_maps:keys(Message) + ), + Message, + Opts + ), + ?event(debug_multi, + {multirequest_opts_parsed, + {config, Config}, + {method, Method}, + {path, Path}, + {raw_message, Message}, + {message_to_send, MultirequestMsg} + }), + AllResults = + if Parallel -> + parallel_multirequest( + Nodes, + Responses, + StopAfter, + Method, + Path, + MultirequestMsg, + Admissible, + Statuses, + Opts + ); + true -> + serial_multirequest( + Nodes, + Responses, + Method, + Path, + MultirequestMsg, + Admissible, + Statuses, + Opts + ) + end, + ?event(http, {multirequest_results, {results, AllResults}}), + case AllResults of + [] -> {error, no_viable_responses}; + Results -> if Responses == 1 -> hd(Results); true -> Results end + end. +``` + +### multirequest_opts + +Get the multirequest options from the config or message. The options in + +```erlang +multirequest_opts(Config, Message, Opts) -> + Opts#{ + nodes => + multirequest_opt(<<"nodes">>, Config, Message, #{}, Opts), + responses => + multirequest_opt(<<"responses">>, Config, Message, 1, Opts), + stop_after => + multirequest_opt(<<"stop-after">>, Config, Message, true, Opts), + admissible => + multirequest_opt(<<"admissible">>, Config, Message, undefined, Opts), + admissible_status => + multirequest_opt(<<"admissible-status">>, Config, Message, <<"All">>, Opts), + parallel => + multirequest_opt(<<"parallel">>, Config, Message, false, Opts) + }. +``` + +### multirequest_opt + +Get a value for a multirequest option from the config or message. + +```erlang +multirequest_opt(Key, Config, Message, Default, Opts) -> + hb_ao:get_first( + [ + {Message, <<"multirequest-", Key/binary>>}, + {Config, Key} + ], + Default, + Opts#{ hashpath => ignore } + ). +``` + +### is_admissible + +Check if a response is admissible, according to the configuration. First, + +```erlang +is_admissible(ok, Res, Admissible, Statuses, Opts) -> + ?event(debug_multi, + {is_admissible, + {response, Res}, + {admissible, Admissible}, + {statuses, Statuses} + } + ), + AdmissibleStatus = admissible_status(Res, Statuses), + ?event(debug_multi, {admissible_status, {result, AdmissibleStatus}}), + AdmissibleResponse = admissible_response(Res, Admissible, Opts), + ?event(debug_multi, {admissible_response, {result, AdmissibleResponse}}), + AdmissibleStatus andalso AdmissibleResponse; +``` + +### is_admissible + +Check if a response is admissible, according to the configuration. First, +Serially request a message, collecting responses until the required + +```erlang +is_admissible(_, _, _, _, _) -> false. +``` + +### serial_multirequest + +Check if a response is admissible, according to the configuration. First, +Serially request a message, collecting responses until the required + +```erlang +serial_multirequest(_Nodes, 0, _Method, _Path, _Message, _Admissible, _Statuses, _Opts) -> []; +``` + +### serial_multirequest + +Check if a response is admissible, according to the configuration. First, +Serially request a message, collecting responses until the required + +```erlang +serial_multirequest([], _, _Method, _Path, _Message, _Admissible, _Statuses, _Opts) -> []; +``` + +### serial_multirequest + +Check if a response is admissible, according to the configuration. First, +Serially request a message, collecting responses until the required + +```erlang +serial_multirequest([Node|Nodes], Remaining, Method, Path, Message, Admissible, Statuses, Opts) -> + {ErlStatus, Res} = hb_http:request(Method, Node, Path, Message, Opts), + case is_admissible(ErlStatus, Res, Admissible, Statuses, Opts) of + true -> + ?event(http, {admissible_status, {response, Res}}), + [ + {ErlStatus, Res} + | + serial_multirequest( + Nodes, + Remaining - 1, + Method, + Path, + Message, + Admissible, + Statuses, + Opts + ) + ]; + false -> + ?event(http, {inadmissible_status, {response, Res}}), + serial_multirequest( + Nodes, + Remaining, + Method, + Path, + Message, + Admissible, + Statuses, + Opts + ) + end. +``` + +### parallel_multirequest + +Dispatch the same HTTP request to many nodes in parallel. + +```erlang +parallel_multirequest(Nodes, Responses, StopAfter, Method, Path, Message, Admissible, Statuses, Opts) -> + Ref = make_ref(), + Parent = self(), + Procs = + lists:map( + fun(Node) -> + spawn( + fun() -> + Res = hb_http:request(Method, Node, Path, Message, Opts), + receive no_reply -> stopping + after 0 -> Parent ! {Ref, self(), Res} + end + end + ) + end, + Nodes + ), + parallel_responses([], Procs, Ref, Responses, StopAfter, Admissible, Statuses, Opts). +``` + +### admissible_status + +Check if a status is allowed, according to the configuration. Statuses + +```erlang +admissible_status(_, <<"All">>) -> true; +``` + +### admissible_status + +Check if a status is allowed, according to the configuration. Statuses + +```erlang +admissible_status(_ResponseMsg = #{ <<"status">> := Status }, Statuses) -> + admissible_status(Status, Statuses); +``` + +### admissible_status + +Check if a status is allowed, according to the configuration. Statuses + +```erlang +admissible_status(Status, Statuses) when is_integer(Statuses) -> + admissible_status(Status, [Statuses]); +``` + +### admissible_status + +Check if a status is allowed, according to the configuration. Statuses + +```erlang +admissible_status(Status, Statuses) when is_binary(Status) -> + admissible_status(binary_to_integer(Status), Statuses); +``` + +### admissible_status + +Check if a status is allowed, according to the configuration. Statuses + +```erlang +admissible_status(Status, Statuses) when is_binary(Statuses) -> + % Convert the statuses to a list of integers. +``` + +### admissible_status + +```erlang +admissible_status(Status, Statuses) when is_list(Statuses) -> + lists:member(Status, Statuses). +``` + +### admissible_response + +If an `admissable` message is set for the request, check if the response + +```erlang +admissible_response(_Response, undefined, _Opts) -> true; +``` + +### admissible_response + +If an `admissable` message is set for the request, check if the response + +```erlang +admissible_response(Response, Msg, Opts) -> + Path = hb_maps:get(<<"path">>, Msg, <<"is-admissible">>, Opts), + Req = Response#{ <<"path">> => Path }, + Base = hb_message:without_unless_signed([<<"path">>], Msg, Opts), + ?event(debug_multi, + {executing_admissible_message, {message, Base}, {req, Req}} + ), + case hb_ao:resolve(Base, Req, Opts) of + {ok, Res} when is_atom(Res) or is_binary(Res) -> + ?event(debug_multi, {admissible_result, {result, Res}}), + hb_util:atom(Res) == true; + {error, Reason} -> + ?event(debug_multi, {admissible_error, {reason, Reason}}), + false + end. +``` + +### parallel_responses + +Collect the necessary number of responses, and stop workers if + +```erlang +parallel_responses(Res, Procs, Ref, 0, false, _Admissible, _Statuses, _Opts) -> + lists:foreach(fun(P) -> P ! no_reply end, Procs), + empty_inbox(Ref), + {ok, Res}; +``` + +### parallel_responses + +Collect the necessary number of responses, and stop workers if + +```erlang +parallel_responses(Res, Procs, Ref, 0, true, _Admissible, _Statuses, _Opts) -> + lists:foreach(fun(P) -> exit(P, kill) end, Procs), + empty_inbox(Ref), + Res; +``` + +### parallel_responses + +Collect the necessary number of responses, and stop workers if + +```erlang +parallel_responses(Res, Procs, Ref, Awaiting, StopAfter, Admissible, Statuses, Opts) -> + receive + {Ref, Pid, {Status, NewRes}} -> + case is_admissible(Status, NewRes, Admissible, Statuses, Opts) of + true -> + parallel_responses( + [NewRes | Res], + lists:delete(Pid, Procs), + Ref, + Awaiting - 1, + StopAfter, + Admissible, + Statuses, + Opts + ); + false -> + parallel_responses( + Res, + lists:delete(Pid, Procs), + Ref, + Awaiting, + StopAfter, + Admissible, + Statuses, + Opts + ) + end +end. +``` + +### empty_inbox + +Empty the inbox of the current process for all messages with the given + +```erlang +empty_inbox(Ref) -> +``` + +--- + +*Generated from [hb_http_multi.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_multi.erl)* diff --git a/docs/book/src/hb_http_server.erl.md b/docs/book/src/hb_http_server.erl.md new file mode 100644 index 000000000..201f860e1 --- /dev/null +++ b/docs/book/src/hb_http_server.erl.md @@ -0,0 +1,607 @@ +# hb_http_server + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_server.erl) + +A router that attaches a HTTP server to the AO-Core resolver. +Because AO-Core is built to speak in HTTP semantics, this module +only has to marshal the HTTP request into a message, and then +pass it to the AO-Core resolver. +`hb_http:reply/4` is used to respond to the client, handling the +process of converting a message back into an HTTP response. +The router uses an `Opts` message as its Cowboy initial state, +such that changing it on start of the router server allows for +the execution parameters of all downstream requests to be controlled. + +--- + +## Exported Functions + +- `allowed_methods/2` +- `get_opts/0` +- `get_opts/1` +- `init/2` +- `set_default_opts/1` +- `set_opts/1` +- `set_opts/2` +- `set_proc_server_id/1` +- `start_node/0` +- `start_node/1` +- `start/0` +- `start/1` + +--- + +### start + +A router that attaches a HTTP server to the AO-Core resolver. +Starts the HTTP server. Optionally accepts an `Opts` message, which + +```erlang +start() -> + ?event(http, {start_store, <<"cache-mainnet">>}), + Loaded = + case hb_opts:load(Loc = hb_opts:get(hb_config_location, <<"config.flat">>)) of + {ok, Conf} -> + ?event(boot, {loaded_config, Loc, Conf}), + Conf; + {error, Reason} -> + ?event(boot, {failed_to_load_config, Loc, Reason}), + #{} + end, + MergedConfig = + hb_maps:merge( + hb_opts:default_message_with_env(), + Loaded + ), + %% Apply store defaults before starting store + StoreOpts = hb_opts:get(store, no_store, MergedConfig), + StoreDefaults = hb_opts:get(store_defaults, #{}, MergedConfig), + UpdatedStoreOpts = + case StoreOpts of + no_store -> no_store; + _ when is_list(StoreOpts) -> hb_store_opts:apply(StoreOpts, StoreDefaults); + _ -> StoreOpts + end, + hb_store:start(UpdatedStoreOpts), + PrivWallet = + hb:wallet( + hb_opts:get( + priv_key_location, + <<"hyperbeam-key.json">>, + Loaded + ) + ), + maybe_greeter(MergedConfig, PrivWallet), + start( + Loaded#{ + priv_wallet => PrivWallet, + store => UpdatedStoreOpts, + port => hb_opts:get(port, 8734, Loaded), + cache_writers => [hb_util:human_id(ar_wallet:to_address(PrivWallet))] + } + ). +``` + +### start + +```erlang +start(Opts) -> + application:ensure_all_started([ + kernel, + stdlib, + inets, + ssl, + ranch, + cowboy, + gun, + os_mon + ]), + hb:init(), + BaseOpts = set_default_opts(Opts), + {ok, Listener, _Port} = new_server(BaseOpts), + {ok, Listener}. +``` + +### maybe_greeter + +Print the greeter message to the console if we are not running tests. + +```erlang +maybe_greeter(MergedConfig, PrivWallet) -> + case hb_features:test() of + false -> + print_greeter(MergedConfig, PrivWallet); + true -> + ok + end. +``` + +### print_greeter + +Print the greeter message to the console. Includes the version, operator + +```erlang +print_greeter(Config, PrivWallet) -> + FormattedConfig = hb_format:term(Config, Config, 2), + io:format("~n" + "===========================================================~n" + "== ██╗ ██╗██╗ ██╗██████╗ ███████╗██████╗ ==~n" + "== ██║ ██║╚██╗ ██╔╝██╔══██╗██╔════╝██╔══██╗ ==~n" + "== ███████║ ╚████╔╝ ██████╔╝█████╗ ██████╔╝ ==~n" + "== ██╔══██║ ╚██╔╝ ██╔═══╝ ██╔══╝ ██╔══██╗ ==~n" + "== ██║ ██║ ██║ ██║ ███████╗██║ ██║ ==~n" + "== ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═╝ ==~n" + "== ==~n" + "== ██████╗ ███████╗ █████╗ ███╗ ███╗ VERSION: ==~n" + "== ██╔══██╗██╔════╝██╔══██╗████╗ ████║ v~p. ==~n" + "== ██████╔╝█████╗ ███████║██╔████╔██║ ==~n" + "== ██╔══██╗██╔══╝ ██╔══██║██║╚██╔╝██║ EAT GLASS, ==~n" + "== ██████╔╝███████╗██║ ██║██║ ╚═╝ ██║ BUILD THE ==~n" + "== ╚═════╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ FUTURE. ==~n" + "===========================================================~n" + "== Node activate at: ~s ==~n" + "== Operator: ~s ==~n" + "===========================================================~n" + "== Config: ==~n" + "===========================================================~n" + " ~s~n" + "===========================================================~n", + [ + ?HYPERBEAM_VERSION, + string:pad( + lists:flatten( + io_lib:format( + "http://~s:~p", + [ + hb_opts:get(host, <<"localhost">>, Config), + hb_opts:get(port, 8734, Config) + ] + ) + ), + 35, leading, $ + ), + hb_util:human_id(ar_wallet:to_address(PrivWallet)), + FormattedConfig + ] + ). +``` + +### new_server + +Trigger the creation of a new HTTP server node. Accepts a `NodeMsg` + +```erlang +new_server(RawNodeMsg) -> + RawNodeMsgWithDefaults = + hb_maps:merge( + hb_opts:default_message_with_env(), + RawNodeMsg#{ only => local } + ), + HookMsg = #{ <<"body">> => RawNodeMsgWithDefaults }, + NodeMsg = + case dev_hook:on(<<"start">>, HookMsg, RawNodeMsgWithDefaults) of + {ok, #{ <<"body">> := NodeMsgAfterHook }} -> NodeMsgAfterHook; + Unexpected -> + ?event(http, + {failed_to_start_server, + {unexpected_hook_result, Unexpected} + } + ), + throw( + {failed_to_start_server, + {unexpected_hook_result, Unexpected} + } + ) + end, + % Put server ID into node message so it's possible to update current server + hb_http:start(), + ServerID = + hb_util:human_id( + ar_wallet:to_address( + hb_opts:get( + priv_wallet, + no_wallet, + NodeMsg + ) + ) + ), + % Put server ID into node message so it's possible to update current server + % params + NodeMsgWithID = hb_maps:put(http_server, ServerID, NodeMsg), + Dispatcher = cowboy_router:compile([{'_', [{'_', ?MODULE, ServerID}]}]), + ProtoOpts = #{ + env => #{dispatch => Dispatcher, node_msg => NodeMsgWithID}, + stream_handlers => [cowboy_stream_h], + max_connections => infinity, + idle_timeout => hb_opts:get(idle_timeout, 300000, NodeMsg) + }, + PrometheusOpts = + case hb_opts:get(prometheus, not hb_features:test(), NodeMsg) of + true -> + ?event(prometheus, + {starting_prometheus, {test_mode, hb_features:test()}} + ), + % Attempt to start the prometheus application, if possible. +``` + +### start_http3 + +```erlang +start_http3(ServerID, ProtoOpts, NodeMsg) -> + ?event(http, {start_http3, ServerID}), + Parent = self(), + ServerPID = + spawn(fun() -> + application:ensure_all_started(quicer), + {ok, Listener} = cowboy:start_quic( + ServerID, + TransOpts = #{ + socket_opts => [ + {certfile, "test/test-tls.pem"}, + {keyfile, "test/test-tls.key"}, + {port, Port = hb_opts:get(port, 8734, NodeMsg)} + ] + }, + ProtoOpts + ), + ranch_server:set_new_listener_opts( + ServerID, + 1024, + ranch:normalize_opts( + hb_maps:to_list(TransOpts#{ port => Port }) + ), + ProtoOpts, + [] + ), + ranch_server:set_addr(ServerID, {<<"localhost">>, Port}), + % Bypass ranch's requirement to have a connection supervisor define + % to support updating protocol opts. +``` + +### http3_conn_sup_loop + +```erlang +http3_conn_sup_loop() -> + receive + _ -> + % Ignore any other messages + http3_conn_sup_loop() + end. +``` + +### start_http2 + +```erlang +start_http2(ServerID, ProtoOpts, NodeMsg) -> + ?event(http, {start_http2, ServerID}), + StartRes = cowboy:start_clear( + ServerID, + [ + {port, Port = hb_opts:get(port, 8734, NodeMsg)} + ], + ProtoOpts + ), + case StartRes of + {ok, Listener} -> + ?event(debug_router_info, {http2_started, {listener, Listener}, {port, Port}}), + {ok, Port, Listener}; + {error, {already_started, Listener}} -> + ?event(http, {http2_already_started, {listener, Listener}}), + ?event(debug_router_info, + {restarting, + {id, ServerID}, + {node_msg, NodeMsg} + } + ), + cowboy:set_env(ServerID, node_msg, #{}), + % {ok, Port, Listener} + cowboy:stop_listener(ServerID), + start_http2(ServerID, ProtoOpts, NodeMsg) + end. +``` + +### init + +Entrypoint for all HTTP requests. Receives the Cowboy request option and + +```erlang +init(Req, ServerID) -> + case cowboy_req:method(Req) of + <<"OPTIONS">> -> cors_reply(Req, ServerID); + _ -> + {ok, Body} = read_body(Req), + handle_request(Req, Body, ServerID) + end. +``` + +### read_body + +Helper to grab the full body of a HTTP request, even if it's chunked. + +```erlang +read_body(Req) -> read_body(Req, <<>>). +``` + +### read_body + +Helper to grab the full body of a HTTP request, even if it's chunked. + +```erlang +read_body(Req0, Acc) -> + case cowboy_req:read_body(Req0) of + {ok, Data, _Req} -> {ok, << Acc/binary, Data/binary >>}; + {more, Data, Req} -> read_body(Req, << Acc/binary, Data/binary >>) + end. +``` + +### cors_reply + +Reply to CORS preflight requests. + +```erlang +cors_reply(Req, _ServerID) -> + Req2 = cowboy_req:reply(204, #{ + <<"access-control-allow-origin">> => <<"*">>, + <<"access-control-allow-headers">> => <<"*">>, + <<"access-control-allow-methods">> => + <<"GET, POST, PUT, DELETE, OPTIONS, PATCH">> + }, Req), + ?event(http_debug, {cors_reply, {req, Req}, {req2, Req2}}), + {ok, Req2, no_state}. +``` + +### handle_request + +Handle all non-CORS preflight requests as AO-Core requests. Execution + +```erlang +handle_request(RawReq, Body, ServerID) -> + % Insert the start time into the request so that it can be used by the + % `hb_http' module to calculate the duration of the request. +``` + +### handle_error + +Return a 500 error response to the client. + +```erlang +handle_error(Req, Singleton, Type, Details, Stacktrace, NodeMsg) -> + DetailsStr = hb_util:bin(hb_format:message(Details, NodeMsg, 1)), + StacktraceStr = hb_util:bin(hb_format:trace(Stacktrace)), + ErrorMsg = + #{ + <<"status">> => 500, + <<"type">> => hb_util:bin(hb_format:message(Type)), + <<"details">> => DetailsStr, + <<"stacktrace">> => StacktraceStr + }, + ErrorBin = hb_format:error(ErrorMsg, NodeMsg), + ?event( + http_error, + {returning_500_error, + {string, + hb_format:indent_lines( + <<"\n", ErrorBin/binary, "\n">>, + 1 + ) + } + } + ), + % Remove leading and trailing noise from the stacktrace and details. +``` + +### allowed_methods + +Return the list of allowed methods for the HTTP server. + +```erlang +allowed_methods(Req, State) -> + { + [<<"GET">>, <<"POST">>, <<"PUT">>, <<"DELETE">>, <<"OPTIONS">>, <<"PATCH">>], + Req, + State + }. +``` + +### set_opts + +Merges the provided `Opts` with uncommitted values from `Request`, + +```erlang +set_opts(Opts) -> + case hb_opts:get(http_server, no_server_ref, Opts) of + no_server_ref -> + ok; + ServerRef -> + ok = cowboy:set_env(ServerRef, node_msg, Opts) + end. +``` + +### set_opts + +```erlang +set_opts(Request, Opts) -> + PreparedOpts = + hb_opts:mimic_default_types( + Opts, + false, + Opts + ), + PreparedRequest = + hb_opts:mimic_default_types( + hb_message:uncommitted(Request), + false, + Opts + ), + MergedOpts = + maps:merge( + PreparedOpts, + PreparedRequest + ), + ?event(set_opts, {merged_opts, {explicit, MergedOpts}}), + History = + hb_opts:get(node_history, [], Opts) + ++ [ hb_private:reset(maps:without([node_history], PreparedRequest)) ], + FinalOpts = MergedOpts#{ + http_server => hb_opts:get(http_server, no_server, Opts), + node_history => History + }, + {set_opts(FinalOpts), FinalOpts}. +``` + +### get_opts + +Get the node message for the current process. + +```erlang +get_opts() -> + get_opts(#{ http_server => get(server_id) }). +``` + +### get_opts + +```erlang +get_opts(NodeMsg) -> + ServerRef = hb_opts:get(http_server, no_server_ref, NodeMsg), + cowboy:get_env(ServerRef, node_msg, no_node_msg). +``` + +### set_proc_server_id + +Initialize the server ID for the current process. + +```erlang +set_proc_server_id(ServerID) -> + put(server_id, ServerID). +``` + +### set_default_opts + +Apply the default node message to the given opts map. + +```erlang +set_default_opts(Opts) -> + % Create a temporary opts map that does not include the defaults. +``` + +### start_node + +Test that we can start the server, send a message, and get a response. + +```erlang +start_node() -> + start_node(#{}). +``` + +### start_node + +```erlang +start_node(Opts) -> + application:ensure_all_started([ + kernel, + stdlib, + inets, + ssl, + ranch, + cowboy, + gun, + os_mon + ]), + hb:init(), + hb_sup:start_link(Opts), + ServerOpts = set_default_opts(Opts), + {ok, _Listener, Port} = new_server(ServerOpts), + <<"http://localhost:", (integer_to_binary(Port))/binary, "/">>. +%%% Tests +%%% The following only covering the HTTP server initialization process. For tests +%%% of HTTP server requests/responses, see `hb_http.erl'. +``` + +### set_node_opts_test + +Ensure that the `start` hook can be used to modify the node options. We +Test the set_opts/2 function that merges request with options, + +```erlang +set_node_opts_test() -> + Node = + start_node(#{ + on => #{ + <<"start">> => #{ + <<"device">> => + #{ + <<"start">> => + fun(_, #{ <<"body">> := NodeMsg }, _) -> + {ok, #{ + <<"body">> => + NodeMsg#{ <<"test-success">> => true } + }} + end + } + } + } + }), + {ok, LiveOpts} = hb_http:get(Node, <<"/~meta@1.0/info">>, #{}), + ?assert(hb_ao:get(<<"test-success">>, LiveOpts, false, #{})). +``` + +### set_opts_test + +Ensure that the `start` hook can be used to modify the node options. We +Test the set_opts/2 function that merges request with options, + +```erlang +set_opts_test() -> + DefaultOpts = hb_opts:default_message_with_env(), + start_node(DefaultOpts#{ + priv_wallet => Wallet = ar_wallet:new(), + port => rand:uniform(10000) + 10000 + }), + Opts = get_opts(#{ + http_server => hb_util:human_id(ar_wallet:to_address(Wallet)) + }), + NodeHistory = hb_opts:get(node_history, [], Opts), + ?event(debug_node_history, {node_history_length, length(NodeHistory)}), + ?assert(length(NodeHistory) == 0), + % Test case 1: Empty node_history case + Request1 = #{ + <<"hello">> => <<"world">> + }, + {ok, UpdatedOpts1} = set_opts(Request1, Opts), + NodeHistory1 = hb_opts:get(node_history, not_found, UpdatedOpts1), + Key1 = hb_opts:get(<<"hello">>, not_found, UpdatedOpts1), + ?event(debug_node_history, {node_history_length, length(NodeHistory1)}), + ?assert(length(NodeHistory1) == 1), + ?assert(Key1 == <<"world">>), + % Test case 2: Non-empty node_history case + Request2 = #{ + <<"hello2">> => <<"world2">> + }, + {ok, UpdatedOpts2} = set_opts(Request2, UpdatedOpts1), + NodeHistory2 = hb_opts:get(node_history, not_found, UpdatedOpts2), + Key2 = hb_opts:get(<<"hello2">>, not_found, UpdatedOpts2), + ?event(debug_node_history, {node_history_length, length(NodeHistory2)}), + ?assert(length(NodeHistory2) == 2), + ?assert(Key2 == <<"world2">>), + % Test case 3: Non-empty node_history case + {ok, UpdatedOpts3} = set_opts(#{}, UpdatedOpts2#{ <<"hello3">> => <<"world3">> }), + NodeHistory3 = hb_opts:get(node_history, not_found, UpdatedOpts3), + Key3 = hb_opts:get(<<"hello3">>, not_found, UpdatedOpts3), + ?event(debug_node_history, {node_history_length, length(NodeHistory3)}), + ?assert(length(NodeHistory3) == 3), + ?assert(Key3 == <<"world3">>). +``` + +### restart_server_test + +Ensure that the `start` hook can be used to modify the node options. We +Test the set_opts/2 function that merges request with options, + +```erlang +restart_server_test() -> + % We force HTTP2, overriding the HTTP3 feature, because HTTP3 restarts don't work yet. +``` + +--- + +*Generated from [hb_http_server.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_server.erl)* diff --git a/docs/book/src/hb_json.erl.md b/docs/book/src/hb_json.erl.md new file mode 100644 index 000000000..e048b5d6b --- /dev/null +++ b/docs/book/src/hb_json.erl.md @@ -0,0 +1,39 @@ +# hb_json + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_json.erl) + +Wrapper for encoding and decoding JSON. Supports maps and Jiffy's old +`ejson` format. This module abstracts the underlying JSON library, allowing +us to switch between libraries as needed in the future. + +--- + +## Exported Functions + +- `decode/1` +- `decode/2` +- `encode/1` + +--- + +### encode + +Wrapper for encoding and decoding JSON. Supports maps and Jiffy's old +Takes a term in Erlang's native form and encodes it as a JSON string. + +```erlang +encode(Term) -> + iolist_to_binary(json:encode(Term)). +``` + +### decode + +Takes a JSON string and decodes it into an Erlang term. + +```erlang +decode(Bin) -> json:decode(Bin). +``` + +--- + +*Generated from [hb_json.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_json.erl)* diff --git a/docs/book/src/hb_keccak.erl.md b/docs/book/src/hb_keccak.erl.md new file mode 100644 index 000000000..81ca7b8e7 --- /dev/null +++ b/docs/book/src/hb_keccak.erl.md @@ -0,0 +1,123 @@ +# hb_keccak + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_keccak.erl) + +## Exported Functions + +- `keccak_256/1` +- `key_to_ethereum_address/1` +- `sha3_256/1` + +--- + +### init + +```erlang +init() -> + SoName = filename:join([code:priv_dir(hb), "hb_keccak"]), + erlang:load_nif(SoName, 0). +``` + +### sha3_256 + +```erlang +sha3_256(_Bin) -> + erlang:nif_error(not_loaded). +``` + +### keccak_256 + +```erlang +keccak_256(_Bin) -> + erlang:nif_error(not_loaded). +``` + +### to_hex + +```erlang +to_hex(Bin) when is_binary(Bin) -> + binary:encode_hex(Bin). +``` + +### key_to_ethereum_address + +```erlang +key_to_ethereum_address(Key) when is_binary(Key) -> + <<_Prefix: 1/binary, NoCompressionByte/binary>> = Key, + Prefix = hb_util:to_hex(hb_keccak:keccak_256(NoCompressionByte)), + Last40 = binary:part(Prefix, byte_size(Prefix) - 40, 40), + Hash = hb_keccak:keccak_256(Last40), + HashHex = hb_util:to_hex(Hash), + ChecksumAddress = hash_to_checksum_address(Last40, HashHex), + ChecksumAddress. +``` + +### hash_to_checksum_address + +```erlang +hash_to_checksum_address(Last40, Hash) when + is_binary(Last40), + is_binary(Hash), + byte_size(Last40) =:= 40 -> + Checksummed = lists:zip(binary:bin_to_list(Last40), binary:bin_to_list(binary:part(Hash, 0, 40))), + Formatted = lists:map(fun({Char, H}) -> + case H >= $8 of + true -> string:to_upper([Char]); + false -> [Char] + end + end, Checksummed), + <<"0x", (list_to_binary(lists:append(Formatted)))/binary>>. +%% Test functions +``` + +### keccak_256_test + +```erlang +keccak_256_test() -> + Input = <<"testing">>, + Expected = <<"5F16F4C7F149AC4F9510D9CF8CF384038AD348B3BCDC01915F95DE12DF9D1B02">>, + Actual = to_hex(hb_keccak:keccak_256(Input)), + ?assertEqual(Expected, Actual). +``` + +### keccak_256_key_test + +```erlang +keccak_256_key_test() -> + Input = <<"BAoixXds4JhW42pzlLb83B3-I21lX78j3Q7cPaoFiCjMgjYwYLDj-xL132J147ifZFwRBmzmEMC8eYAXzbRNWuA">>, + BinaryInput = hb_util:decode(Input), + <<_Prefix: 1/binary, NoCompressionByte/binary>> = BinaryInput, + Prefix = hb_keccak:keccak_256(NoCompressionByte), + PrefixHex = hb_util:to_hex(Prefix), + ?assertEqual(PrefixHex, <<"12f9afe6abd38444cab38e8cb7b4360f7f6298de2e7a11009270f35f189bd77e">>), + Last40 = binary:part(PrefixHex, byte_size(PrefixHex) - 40, 40), + ?assertEqual(Last40, <<"b7b4360f7f6298de2e7a11009270f35f189bd77e">>), + Hash = hb_keccak:keccak_256(Last40), + HashHex = hb_util:to_hex(Hash), + ChecksumAddress = hash_to_checksum_address(Last40, HashHex), + ?assertEqual(ChecksumAddress, <<"0xb7B4360F7F6298dE2e7a11009270F35F189Bd77E">>). +``` + +### keccak_256_key_to_address_test + +```erlang +keccak_256_key_to_address_test() -> + Input = <<"BAoixXds4JhW42pzlLb83B3-I21lX78j3Q7cPaoFiCjMgjYwYLDj-xL132J147ifZFwRBmzmEMC8eYAXzbRNWuA">>, + ChecksumAddress = key_to_ethereum_address(hb_util:decode(Input)), + ?assertEqual(ChecksumAddress, <<"0xb7B4360F7F6298dE2e7a11009270F35F189Bd77E">>). +``` + +### sha3_256_test + +```erlang +sha3_256_test() -> + %% "abc" => known SHA3-256 hash from NIST + Input = <<"testing">>, + Expected = <<"7F5979FB78F082E8B1C676635DB8795C4AC6FABA03525FB708CB5FD68FD40C5E">>, + Actual = to_hex(hb_keccak:sha3_256(Input)), + ?assertEqual(Expected, Actual). +``` + +--- + +*Generated from [hb_keccak.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_keccak.erl)* diff --git a/docs/book/src/hb_link.erl.md b/docs/book/src/hb_link.erl.md new file mode 100644 index 000000000..21de8caa6 --- /dev/null +++ b/docs/book/src/hb_link.erl.md @@ -0,0 +1,264 @@ +# hb_link + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_link.erl) + +Utility functions for working with links. + +--- + +## Exported Functions + +- `decode_all_links/1` +- `format_unresolved/1` +- `format_unresolved/2` +- `format_unresolved/3` +- `format/1` +- `format/2` +- `format/3` +- `is_link_key/1` +- `normalize/2` +- `normalize/3` +- `remove_link_specifier/1` + +--- + +### normalize + +Utility functions for working with links. +Takes a message and ensures that it is normalized: + +```erlang +normalize(Msg, Opts) when is_map(Opts) -> + normalize(Msg, hb_opts:get(linkify_mode, offload, Opts), Opts). +``` + +### normalize + +```erlang +normalize(Msg, false, _Opts) -> + Msg; +``` + +### normalize + +```erlang +normalize(Msg, Mode, Opts) when is_map(Msg) -> + maps:merge( + maps:with([<<"commitments">>, <<"priv">>], Msg), + maps:from_list( + lists:map( + fun({Key, {link, ID, LinkOpts = #{ <<"type">> := <<"link">> }}}) -> + % The value is a link. Deconstruct it and ensure it is + % normalized (lazy links are made greedy, and both are + % returned in binary TABM form). +``` + +### normalize + +```erlang +normalize(OtherVal, Mode, Opts) when is_list(OtherVal) -> + lists:map(fun(X) -> normalize(X, Mode, Opts) end, OtherVal); +``` + +### normalize + +```erlang +normalize(OtherVal, _Mode, _Opts) -> + OtherVal. +``` + +### decode_all_links + +Decode links embedded in the headers of a message. + +```erlang +decode_all_links(Msg) when is_map(Msg) -> + maps:from_list( + lists:map( + fun({Key, MaybeID}) -> + case is_link_key(Key) of + true -> + NewKey = binary:part(Key, 0, byte_size(Key) - 5), + {NewKey, + { + link, + MaybeID, + #{ + <<"type">> => <<"link">>, + <<"lazy">> => false + } + } + }; + _ -> {Key, MaybeID} + end + end, + maps:to_list(Msg) + ) + ); +``` + +### decode_all_links + +Decode links embedded in the headers of a message. + +```erlang +decode_all_links(List) when is_list(List) -> + lists:map(fun(X) -> decode_all_links(X) end, List); +``` + +### decode_all_links + +Decode links embedded in the headers of a message. + +```erlang +decode_all_links(OtherVal) -> + OtherVal. +``` + +### is_link_key + +Determine if a key is an encoded link. + +```erlang +is_link_key(Key) when byte_size(Key) >= 5 -> + binary:part(Key, byte_size(Key) - 5, 5) =:= <<"+link">>; +``` + +### is_link_key + +Determine if a key is an encoded link. +Remove any `+link` suffixes from a key. + +```erlang +is_link_key(_) -> false. +``` + +### remove_link_specifier + +Determine if a key is an encoded link. +Remove any `+link` suffixes from a key. + +```erlang +remove_link_specifier(Key) -> + case is_link_key(Key) of + true -> binary:part(Key, 0, byte_size(Key) - 5); + false -> Key + end. +``` + +### format + +Format a link as a short string suitable for printing. Checks the node + +```erlang +format(Link) -> format(Link, #{}). +``` + +### format + +Format a link as a short string suitable for printing. Checks the node + +```erlang +format(Link, Opts) -> + format(Link, Opts, 0). +``` + +### format + +```erlang +format(Link, Opts, Indent) -> + case hb_opts:get(debug_resolve_links, false, Opts) of + true -> + try + hb_format:message( + hb_cache:ensure_all_loaded(Link, Opts), + Opts, + Indent + ) + catch + _:_ -> << "!UNRESOLVABLE! ", (format_unresolved(Link, Opts))/binary >> + end; + false -> format_unresolved(Link, Opts, Indent) + end. +``` + +### format_unresolved + +Format a link without resolving it. + +```erlang +format_unresolved(Link) -> + format_unresolved(Link, #{}). +``` + +### format_unresolved + +```erlang +format_unresolved({link, ID, Opts}, BaseOpts) -> + format_unresolved({link, ID, Opts}, BaseOpts, 0). +``` + +### format_unresolved + +```erlang +format_unresolved({link, ID, Opts}, BaseOpts, Indent) -> + hb_util:bin( + hb_format:indent( + "~s~s: ~s", + [ + case maps:get(<<"lazy">>, Opts, false) of + true -> <<"Lazy link">>; + false -> <<"Link">> + end, + case maps:get(<<"type">>, Opts, no_type) of + no_type -> <<>>; + Type -> <<" (to ", (hb_util:bin(Type))/binary, ")" >> + end, + ID + ], + BaseOpts, + Indent + ) + ). +``` + +### offload_linked_message_test + +```erlang +offload_linked_message_test() -> + Opts = #{}, + Msg = #{ + <<"immediate-key">> => <<"immediate-value">>, + <<"link-key">> => #{ + <<"immediate-key-2">> => <<"link-value">>, + <<"link-key-2">> => #{ + <<"immediate-key-3">> => <<"link-value-2">> + } + } + }, + Offloaded = normalize(Msg, offload, Opts), + Structured = hb_message:convert(Offloaded, <<"structured@1.0">>, tabm, Opts), + ?event(linkify, {test_recvd_linkified, {msg, Structured}}), + Loaded = hb_cache:ensure_all_loaded(Structured, Opts), + ?event(linkify, {test_recvd_loaded, {msg, Loaded}}), + ?assertEqual(Msg, Loaded). +``` + +### offload_list_test + +```erlang +offload_list_test() -> + Opts = #{}, + Msg = #{ + <<"list-key">> => [1.0, 2.0, 3.0] + }, + TABM = hb_message:convert(Msg, tabm, <<"structured@1.0">>, Opts), + Linkified = normalize(TABM, offload, Opts), + Msg2 = hb_message:convert(Linkified, <<"structured@1.0">>, tabm, Opts), + Res = hb_cache:ensure_all_loaded(Msg2, Opts), + ?assertEqual(Msg, Res). +``` + +--- + +*Generated from [hb_link.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_link.erl)* diff --git a/docs/book/src/hb_logger.erl.md b/docs/book/src/hb_logger.erl.md new file mode 100644 index 000000000..8e17454ab --- /dev/null +++ b/docs/book/src/hb_logger.erl.md @@ -0,0 +1,130 @@ +# hb_logger + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_logger.erl) + +## Exported Functions + +- `log/2` +- `register/1` +- `report/1` +- `start/0` +- `start/1` + +--- + +### start + +```erlang +start() -> start(undefined). +``` + +### start + +```erlang +start(Client) -> + spawn(fun() -> + loop(#state{client = Client}) + end). +``` + +### log + +```erlang +log(Monitor, Data) -> + Monitor ! {log, Data}. +``` + +### register + +```erlang +register(Monitor) -> + ?event({self(), registering}), + Monitor ! {register, self()}. +``` + +### report + +```erlang +report(Monitor) -> + Monitor ! {report, self()}, + receive + {report, Activity} -> + Activity + end. +``` + +### loop + +```erlang +loop(#state { processes = [], client = undefined }) -> done; +``` + +### loop + +```erlang +loop(#state { processes = [], client = C, activity = A }) -> + C ! {?MODULE, self(), done, A}; +``` + +### loop + +```erlang +loop(State) -> + receive + {log, Activity} -> + console(State, Activity), + loop(State#state{ activity = [Activity | State#state.activity] }); + {register, PID} -> + ?event(registered), + %erlang:monitor(process, PID), + console(State, Act = {ok, registered, PID}), + ?event({registered, PID}), + loop(State#state{ + processes = + [PID | case State#state.processes of waiting -> []; L -> L end], + activity = [Act | State#state.activity] + }); + {'DOWN', _MonitorRef, process, PID, Reason} -> + console(State, Act = {terminated, Reason, PID}), + ?event({dead, PID}), + loop(State#state{ + processes = State#state.processes -- [PID], + activity = [Act | State#state.activity] + }); + {report, PID} -> + PID ! {report, State#state.activity}, + loop(State) + end. +``` + +### console + +```erlang +console(#state { console = false }, _) -> + not_printing; +``` + +### console + +```erlang +console(S, {Status, Type, Details}) when is_record(Details, tx) -> + console(S, {Status, Type, hb_util:id(Details)}); +``` + +### console + +```erlang +console(_S, {Status, Type, Details}) -> + io:format("### MU PUSH REPORT ~p ###~n~p: ~p~n~p~n~n", + [self(), Status, Type, Details]); +``` + +### console + +```erlang +console(_S, Act) -> +``` + +--- + +*Generated from [hb_logger.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_logger.erl)* diff --git a/docs/book/src/hb_maps.erl.md b/docs/book/src/hb_maps.erl.md new file mode 100644 index 000000000..ff082504f --- /dev/null +++ b/docs/book/src/hb_maps.erl.md @@ -0,0 +1,398 @@ +# hb_maps + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_maps.erl) + +An abstraction for working with maps in HyperBEAM, matching the +generic `maps` module, but additionally supporting the resolution of +links as they are encountered. These functions must be used extremely +carefully. In virtually all circumstances, the `hb_ao:resolve/3` or +`hb_ao:get/3` functions should be used instead, as they will execute the +full AO-Core protocol upon requests (normalizing keys, applying the +appropriate device's functions, as well as resolving links). By using this +module's functions, you are implicitly making the assumption that the message +in question is of the `~message@1.0` form, ignoring any other keys that its +actual device may present. This module is intended for the extremely rare +circumstances in which the additional overhead of the full AO-Core +execution cycle is not acceptable, and the data in question is known to +conform to the `~message@1.0` form. +If you do not understand any/all of the above, you are in the wrong place! +Utilise the `hb_ao` module and read the documentation therein, saving +yourself from the inevitable issues that will arise from using this +module without understanding the full implications. You have been warned. + +--- + +## Exported Functions + +- `filter/2` +- `filter/3` +- `filtermap/2` +- `filtermap/3` +- `find/2` +- `find/3` +- `fold/3` +- `fold/4` +- `from_list/1` +- `get/2` +- `get/3` +- `get/4` +- `is_key/2` +- `is_key/3` +- `keys/1` +- `keys/2` +- `map/2` +- `map/3` +- `merge/2` +- `merge/3` +- `put/3` +- `put/4` +- `remove/2` +- `remove/3` +- `size/1` +- `size/2` +- `take/2` +- `take/3` +- `to_list/1` +- `to_list/2` +- `update_with/3` +- `update_with/4` +- `values/1` +- `values/2` +- `with/2` +- `with/3` +- `without/2` +- `without/3` + +--- + +### get + +Get a value from a map, resolving links as they are encountered in both + +```erlang +-spec get( + Key :: term(), + Map :: map(), + Default :: term(), + Opts :: map() +) -> term(). +``` + +```erlang +get(Key, Map, Default, Opts) -> + hb_cache:ensure_loaded( + maps:get( + Key, + hb_cache:ensure_loaded(Map, Opts), + Default + ), + Opts + ). +``` + +### put + +```erlang +-spec put( + Key :: term(), + Value :: term(), + Map :: map(), + Opts :: map() +) -> map(). +``` + +```erlang +put(Key, Value, Map, Opts) -> + maps:put(Key, Value, hb_cache:ensure_loaded(Map, Opts)). +``` + +### map + +```erlang +-spec map( + Fun :: fun((Key :: term(), Value :: term()) -> term()), + Map :: map() +) -> map(). +``` + +```erlang +map(Fun, Map) -> + map(Fun, Map, #{}). +``` + +### map + +```erlang +-spec map( + Fun :: fun((Key :: term(), Value :: term()) -> term()), + Map :: map(), + Opts :: map() +) -> map(). +``` + +```erlang +map(Fun, Map, Opts) -> + maps:map( + fun(K, V) -> Fun(K, hb_cache:ensure_loaded(V, Opts)) end, + hb_cache:ensure_loaded(Map, Opts) + ). +``` + +### filter + +```erlang +-spec filter( + Fun :: fun((Key :: term(), Value :: term()) -> boolean()), + Map :: map() +) -> map(). +``` + +```erlang +filter(Fun, Map) -> + filter(Fun, Map, #{}). +``` + +### filter + +```erlang +-spec filter( + Fun :: fun((Key :: term(), Value :: term()) -> boolean()), + Map :: map(), + Opts :: map() +) -> map(). +``` + +```erlang +filter(Fun, Map, Opts) -> + maps:filtermap( + fun(K, V) -> + case Fun(K, Loaded = hb_cache:ensure_loaded(V, Opts)) of + true -> {true, Loaded}; + false -> false + end + end, + hb_cache:ensure_loaded(Map, Opts) + ). +``` + +### filtermap + +```erlang +-spec filtermap( + Fun :: fun((Key :: term(), Value :: term()) -> {boolean(), term()}), + Map :: map() +) -> map(). +``` + +```erlang +filtermap(Fun, Map) -> + filtermap(Fun, Map, #{}). +``` + +### filtermap + +```erlang +-spec filtermap( + Fun :: fun((Key :: term(), Value :: term()) -> {boolean(), term()}), + Map :: map(), + Opts :: map() +) -> map(). +``` + +```erlang +filtermap(Fun, Map, Opts) -> + maps:filtermap( + fun(K, V) -> Fun(K, hb_cache:ensure_loaded(V, Opts)) end, + hb_cache:ensure_loaded(Map, Opts) + ). +``` + +### fold + +```erlang +-spec fold( + Fun :: fun((Key :: term(), Value :: term(), Acc :: term()) -> term()), + Acc :: term(), + Map :: map() +) -> term(). +``` + +```erlang +fold(Fun, Acc, Map) -> + fold(Fun, Acc, Map, #{}). +``` + +### fold + +```erlang +-spec fold( + Fun :: fun((Key :: term(), Value :: term(), Acc :: term()) -> term()), + Acc :: term(), + Map :: map(), + Opts :: map() +) -> term(). +``` + +```erlang +fold(Fun, Acc, Map, Opts) -> + maps:fold( + fun(K, V, CurrAcc) -> Fun(K, hb_cache:ensure_loaded(V, Opts), CurrAcc) end, + Acc, + hb_cache:ensure_loaded(Map, Opts) + ). +``` + +### update_with + +```erlang +-spec update_with( + Key :: term(), + Fun :: fun((Value :: term()) -> term()), + Map :: map() +) -> map(). +``` + +```erlang +update_with(Key, Fun, Map) -> + update_with(Key, Fun, Map, #{}). +``` + +### update_with + +```erlang +-spec update_with( + Key :: term(), + Fun :: fun((Value :: term()) -> term()), + Map :: map(), + Opts :: map() +) -> map(). +``` + +```erlang +update_with(Key, Fun, Map, Opts) -> + maps:update_with(Key, Fun, hb_cache:ensure_loaded(Map, Opts), Opts). +``` + +### get_with_link_test + +```erlang +-spec to_list(Map :: map(), Opts :: map()) -> [{Key :: term(), Value :: term()}]. +to_list(Map, Opts) -> + maps:to_list(hb_cache:ensure_loaded(Map, Opts)). +``` + +```erlang +get_with_link_test() -> + Bin = <<"TEST DATA">>, + Opts = #{}, + {ok, Location} = hb_cache:write(Bin, Opts), + Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, + ?assertEqual(Bin, get(2, Map)). +``` + +### map_with_link_test + +```erlang +map_with_link_test() -> + Bin = <<"TEST DATA">>, + Opts = #{}, + {ok, Location} = hb_cache:write(Bin, Opts), + Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, + ?assertEqual(#{1 => 1, 2 => Bin, 3 => 3}, map(fun(_K, V) -> V end, Map, #{})). +``` + +### get_with_typed_link_test + +```erlang +get_with_typed_link_test() -> + Bin = <<"123">>, + Opts = #{}, + {ok, Location} = hb_cache:write(Bin, Opts), + Map = #{ 1 => 1, 2 => {link, Location, #{ <<"type">> => integer }}, 3 => 3 }, + ?assertEqual(123, get(2, Map, undefined)). +``` + +### resolve_on_link_test + +```erlang +resolve_on_link_test() -> + Msg = #{ <<"test-key">> => <<"test-value">> }, + Opts = #{}, + {ok, ID} = hb_cache:write(Msg, Opts), + ?assertEqual( + {ok, <<"test-value">>}, + hb_ao:resolve({link, ID, #{}}, <<"test-key">>, #{}) + ). +``` + +### filter_with_link_test + +```erlang +filter_with_link_test() -> + Bin = <<"TEST DATA">>, + Opts = #{}, + {ok, Location} = hb_cache:write(Bin, Opts), + Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, + ?assertEqual(#{1 => 1, 3 => 3}, filter(fun(_, V) -> V =/= Bin end, Map)). +``` + +### filtermap_with_link_test + +```erlang +filtermap_with_link_test() -> + Bin = <<"TEST DATA">>, + Opts = #{}, + {ok, Location} = hb_cache:write(Bin, Opts), + Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, + ?assertEqual( + #{2 => <<"FOUND">>}, + filtermap( + fun(_, <<"TEST DATA">>) -> {true, <<"FOUND">>}; + (_K, _V) -> false + end, + Map + ) + ). +``` + +### fold_with_typed_link_test + +```erlang +fold_with_typed_link_test() -> + Bin = <<"123">>, + Opts = #{}, + {ok, Location} = hb_cache:write(Bin, Opts), + Map = #{ 1 => 1, 2 => {link, Location, #{ <<"type">> => integer }}, 3 => 3 }, + ?assertEqual(127, fold(fun(_, V, Acc) -> V + Acc end, 0, Map)). +``` + +### filter_passively_loads_test + +```erlang +filter_passively_loads_test() -> + Bin = <<"TEST DATA">>, + Opts = #{}, + {ok, Location} = hb_cache:write(Bin, Opts), + Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, + ?assertEqual( + #{1 => 1, 2 => <<"TEST DATA">>, 3 => 3}, + filter(fun(_, _) -> true end, Map) + ). +``` + +### filtermap_passively_loads_test + +```erlang +filtermap_passively_loads_test() -> + Bin = <<"TEST DATA">>, + Opts = #{}, + {ok, Location} = hb_cache:write(Bin, Opts), + Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, + ?assertEqual( + #{ 1 => 1, 2 => <<"TEST DATA">>, 3 => 3 }, + filtermap(fun(_, V) -> {true, V} end, Map) +``` + +--- + +*Generated from [hb_maps.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_maps.erl)* diff --git a/docs/book/src/hb_message.erl.md b/docs/book/src/hb_message.erl.md new file mode 100644 index 000000000..e890e1dee --- /dev/null +++ b/docs/book/src/hb_message.erl.md @@ -0,0 +1,1150 @@ +# hb_message + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_message.erl) + +This module acts an adapter between messages, as modeled in the +AO-Core protocol, and their uderlying binary representations and formats. +Unless you are implementing a new message serialization codec, you should +not need to interact with this module directly. Instead, use the +`hb_ao` interfaces to interact with all messages. The `dev_message` +module implements a device interface for abstracting over the different +message formats. +`hb_message` and the HyperBEAM caches can interact with multiple different +types of message formats: + - Richly typed AO-Core structured messages. + - Arweave transations. + - ANS-104 data items. + - HTTP Signed Messages. + - Flat Maps. +This module is responsible for converting between these formats. It does so +by normalizing messages to a common format: `Type Annotated Binary Messages` +(TABM). TABMs are deep Erlang maps with keys than only contain either other +TABMs or binary values. By marshalling all messages into this format, they +can easily be coerced into other output formats. For example, generating a +`HTTP Signed Message` format output from an Arweave transaction. TABM is +also a simple format from a computational perspective (only binary literals +and O(1) access maps), such that operations upon them are efficient. +The structure of the conversions is as follows: +
+    Arweave TX/ANS-104 ==> dev_codec_ans104:from/1 ==> TABM
+    HTTP Signed Message ==> dev_codec_httpsig_conv:from/1 ==> TABM
+    Flat Maps ==> dev_codec_flat:from/1 ==> TABM
+    TABM ==> dev_codec_structured:to/1 ==> AO-Core Message
+    AO-Core Message ==> dev_codec_structured:from/1 ==> TABM
+    TABM ==> dev_codec_ans104:to/1 ==> Arweave TX/ANS-104
+    TABM ==> dev_codec_httpsig_conv:to/1 ==> HTTP Signed Message
+    TABM ==> dev_codec_flat:to/1 ==> Flat Maps
+    ...
+
+Additionally, this module provides a number of utility functions for +manipulating messages. For example, `hb_message:sign/2` to sign a message of +arbitrary type, or `hb_formatter:format_msg/1` to print an AO-Core/TABM message in +a human-readable format. +The `hb_cache` module is responsible for storing and retrieving messages in +the HyperBEAM stores configured on the node. Each store has its own storage +backend, but each works with simple key-value pairs. Subsequently, the +`hb_cache` module uses TABMs as the internal format for storing and +retrieving messages. +Test vectors to ensure the functioning of this module and the codecs that +interact with it are found in `hb_message_test_vectors.erl`. + +--- + +## Exported Functions + +- `commit/2` +- `commit/3` +- `commitment_devices/2` +- `commitment/2` +- `commitment/3` +- `commitments/3` +- `committed/3` +- `convert/3` +- `convert/4` +- `default_tx_list/0` +- `diff/3` +- `filter_default_keys/1` +- `find_target/3` +- `id/1` +- `id/2` +- `id/3` +- `is_signed_key/3` +- `match/2` +- `match/3` +- `match/4` +- `minimize/1` +- `normalize_commitments/2` +- `print/1` +- `signers/2` +- `type/1` +- `uncommitted/1` +- `uncommitted/2` +- `verify/1` +- `verify/2` +- `verify/3` +- `with_commitments/3` +- `with_only_committed/2` +- `with_only_committers/2` +- `with_only_committers/3` +- `without_commitments/3` +- `without_unless_signed/3` + +--- + +### convert + +This module acts an adapter between messages, as modeled in the +Convert a message from one format to another. Taking a message in the + +```erlang +convert(Msg, TargetFormat, Opts) -> + convert(Msg, TargetFormat, <<"structured@1.0">>, Opts). +``` + +### convert + +This module acts an adapter between messages, as modeled in the +Convert a message from one format to another. Taking a message in the + +```erlang +convert(Msg, TargetFormat, tabm, Opts) -> + OldPriv = + if is_map(Msg) -> maps:get(<<"priv">>, Msg, #{}); + true -> #{} + end, + from_tabm(Msg, TargetFormat, OldPriv, Opts); +``` + +### convert + +This module acts an adapter between messages, as modeled in the +Convert a message from one format to another. Taking a message in the + +```erlang +convert(Msg, TargetFormat, SourceFormat, Opts) -> + OldPriv = + if is_map(Msg) -> maps:get(<<"priv">>, Msg, #{}); + true -> #{} + end, + TABM = + to_tabm( + case is_map(Msg) of + true -> hb_maps:without([<<"priv">>], Msg, Opts); + false -> Msg + end, + SourceFormat, + Opts + ), + case TargetFormat of + tabm -> restore_priv(TABM, OldPriv, Opts); + _ -> from_tabm(TABM, TargetFormat, OldPriv, Opts) + end. +``` + +### to_tabm + +```erlang +to_tabm(Msg, SourceFormat, Opts) -> + {SourceCodecMod, Params} = conversion_spec_to_req(SourceFormat, Opts), + % We use _from_ here because the codecs are labelled from the perspective + % of their own format. `dev_codec_ans104:from/1' will convert _from_ + % an ANS-104 message _into_ a TABM. +``` + +### from_tabm + +```erlang +from_tabm(Msg, TargetFormat, OldPriv, Opts) -> + {TargetCodecMod, Params} = conversion_spec_to_req(TargetFormat, Opts), + % We use the _to_ function here because each of the codecs we may call in + % this step are labelled from the perspective of the target format. For + % example, `dev_codec_httpsig:to/1' will convert _from_ a TABM to an + % HTTPSig message. +``` + +### restore_priv + +Add the existing `priv` sub-map back to a converted message, honoring + +```erlang +restore_priv(Msg, EmptyPriv, _Opts) when map_size(EmptyPriv) == 0 -> Msg; +``` + +### restore_priv + +Add the existing `priv` sub-map back to a converted message, honoring +Get a codec device and request params from the given conversion request. + +```erlang +restore_priv(Msg, OldPriv, Opts) -> + MsgPriv = hb_maps:get(<<"priv">>, Msg, #{}, Opts), + ?event({restoring_priv, {msg_priv, MsgPriv}, {old_priv, OldPriv}}), + NewPriv = hb_util:deep_merge(MsgPriv, OldPriv, Opts), + ?event({new_priv, NewPriv}), + Msg#{ <<"priv">> => NewPriv }. +``` + +### conversion_spec_to_req + +Add the existing `priv` sub-map back to a converted message, honoring +Get a codec device and request params from the given conversion request. + +```erlang +conversion_spec_to_req(Spec, Opts) when is_binary(Spec) or (Spec == tabm) -> + conversion_spec_to_req(#{ <<"device">> => Spec }, Opts); +``` + +### conversion_spec_to_req + +Add the existing `priv` sub-map back to a converted message, honoring +Get a codec device and request params from the given conversion request. + +```erlang +conversion_spec_to_req(Spec, Opts) -> + try + Device = + hb_maps:get( + <<"device">>, + Spec, + no_codec_device_in_conversion_spec, + Opts + ), + { + case Device of + tabm -> tabm; + _ -> + hb_ao:message_to_device( + #{ + <<"device">> => Device + }, + Opts + ) + end, + hb_maps:without([<<"device">>], Spec, Opts) + } + catch _:_ -> + throw({message_codec_not_extractable, Spec}) + end. +``` + +### id + +Return the ID of a message. + +```erlang +id(Msg) -> id(Msg, uncommitted). +``` + +### id + +Return the ID of a message. + +```erlang +id(Msg, Opts) when is_map(Opts) -> id(Msg, uncommitted, Opts); +``` + +### id + +Return the ID of a message. + +```erlang +id(Msg, Committers) -> id(Msg, Committers, #{}). +``` + +### id + +Return the ID of a message. + +```erlang +id(Msg, RawCommitters, Opts) -> + CommSpec = + case RawCommitters of + none -> #{ <<"committers">> => <<"none">> }; + uncommitted -> #{ <<"committers">> => <<"none">> }; + unsigned -> #{ <<"committers">> => <<"none">> }; + all -> #{ <<"committers">> => <<"all">> }; + signed -> #{ <<"committers">> => <<"all">> }; + List when is_list(List) -> #{ <<"committers">> => List } + end, + ?event({getting_id, {msg, Msg}, {spec, CommSpec}}), + {ok, ID} = + dev_message:id( + Msg, + CommSpec#{ <<"path">> => <<"id">> }, + Opts + ), + hb_util:human_id(ID). +``` + +### normalize_commitments + +Normalize the IDs in a message, ensuring that there is at least one + +```erlang +normalize_commitments(Msg, Opts) when is_map(Msg) -> + NormMsg = + maps:map( + fun(Key, Val) when Key == <<"commitments">> orelse Key == <<"priv">> -> + Val; + (_Key, Val) -> normalize_commitments(Val, Opts) + end, + Msg + ), + case hb_maps:get(<<"commitments">>, NormMsg, not_found, Opts) of + not_found -> + {ok, #{ <<"commitments">> := Commitments }} = + dev_message:commit( + NormMsg, + #{ <<"type">> => <<"unsigned">> }, + Opts + ), + NormMsg#{ <<"commitments">> => Commitments }; + _ -> NormMsg + end; +``` + +### normalize_commitments + +Normalize the IDs in a message, ensuring that there is at least one + +```erlang +normalize_commitments(Msg, Opts) when is_list(Msg) -> + lists:map(fun(X) -> normalize_commitments(X, Opts) end, Msg); +``` + +### normalize_commitments + +Normalize the IDs in a message, ensuring that there is at least one + +```erlang +normalize_commitments(Msg, _Opts) -> + Msg. +``` + +### with_only_committed + +Return a message with only the committed keys. If no commitments are + +```erlang +with_only_committed(Msg, Opts) when is_map(Msg) -> + ?event({with_only_committed, {msg, Msg}, {opts, Opts}}), + Comms = hb_maps:get(<<"commitments">>, Msg, not_found, Opts), + case is_map(Msg) andalso Comms /= not_found of + true -> + try + CommittedKeys = + hb_message:committed( + Msg, + #{ <<"commitments">> => <<"all">> }, + Opts + ), + % Add the ao-body-key to the committed list if it is not + % already present. +``` + +### with_only_committed + +```erlang +with_only_committed(Msg, _) -> + % If the message is not a map, it cannot be signed. +``` + +### with_links + +Filter keys from a map that do not match either the list of keys or + +```erlang +with_links(Keys, Map, Opts) -> + hb_maps:with( + Keys ++ + lists:map( + fun(Key) -> + <<(hb_link:remove_link_specifier(Key))/binary, "+link">> + end, + Keys + ), + Map, + Opts + ). +``` + +### with_only_committers + +Return the message with only the specified committers attached. + +```erlang +with_only_committers(Msg, Committers) -> + with_only_committers(Msg, Committers, #{}). +``` + +### with_only_committers + +```erlang +with_only_committers(Msg, Committers, Opts) when is_map(Msg) -> + NewCommitments = + hb_maps:filter( + fun(_, #{ <<"committer">> := Committer }) -> + lists:member(Committer, Committers); + (_, _) -> false + end, + hb_maps:get(<<"commitments">>, Msg, #{}, Opts), + Opts + ), + Msg#{ <<"commitments">> => NewCommitments }; +``` + +### with_only_committers + +```erlang +with_only_committers(Msg, _Committers, _Opts) -> + throw({unsupported_message_type, Msg}). +``` + +### is_signed_key + +Determine whether a specific key is part of a message's commitments. + +```erlang +is_signed_key(Key, Msg, Opts) -> + lists:member(Key, hb_message:committed(Msg, all, Opts)). +``` + +### without_unless_signed + +Remove the any of the given keys that are not signed from a message. + +```erlang +without_unless_signed(Key, Msg, Opts) when not is_list(Key) -> + without_unless_signed([Key], Msg, Opts); +``` + +### without_unless_signed + +Remove the any of the given keys that are not signed from a message. + +```erlang +without_unless_signed(Keys, Msg, Opts) -> + SignedKeys = hb_message:committed(Msg, all, Opts), + maps:without( + lists:filter(fun(K) -> not lists:member(K, SignedKeys) end, Keys), + Msg + ). +``` + +### commit + +Sign a message with the given wallet. + +```erlang +commit(Msg, WalletOrOpts) -> + commit( + Msg, + WalletOrOpts, + hb_opts:get( + commitment_device, + no_viable_commitment_device, + case is_map(WalletOrOpts) of + true -> WalletOrOpts; + false -> #{ priv_wallet => WalletOrOpts } + end + ) + ). +``` + +### commit + +```erlang +commit(Msg, Wallet, Format) when not is_map(Wallet) -> + commit(Msg, #{ priv_wallet => Wallet }, Format); +``` + +### commit + +```erlang +commit(Msg, Opts, CodecName) when is_binary(CodecName) -> + commit(Msg, Opts, #{ <<"commitment-device">> => CodecName }); +``` + +### commit + +```erlang +commit(Msg, Opts, Spec) -> + {ok, Signed} = + dev_message:commit( + Msg, + Spec#{ + <<"commitment-device">> => + case hb_maps:get(<<"commitment-device">>, Spec, none, Opts) of + none -> + case hb_maps:get(<<"device">>, Spec, none, Opts) of + none -> + throw( + { + no_commitment_device_in_codec_spec, + Spec + } + ); + Device -> Device + end; + CommitmentDevice -> CommitmentDevice + end + }, + Opts + ), + Signed. +``` + +### committed + +Return the list of committed keys from a message. + +```erlang +committed(Msg, all, Opts) -> + committed(Msg, #{ <<"committers">> => <<"all">> }, Opts); +``` + +### committed + +Return the list of committed keys from a message. + +```erlang +committed(Msg, none, Opts) -> + committed(Msg, #{ <<"committers">> => <<"none">> }, Opts); +``` + +### committed + +Return the list of committed keys from a message. + +```erlang +committed(Msg, List, Opts) when is_list(List) -> + committed(Msg, #{ <<"commitments">> => List }, Opts); +``` + +### committed + +Return the list of committed keys from a message. + +```erlang +committed(Msg, CommittersMsg, Opts) -> + ?event( + {committed, + {msg, {explicit, Msg}}, + {committers_msg, {explicit, CommittersMsg}}, + {opts, Opts} + } + ), + {ok, CommittedKeys} = dev_message:committed(Msg, CommittersMsg, Opts), + CommittedKeys. +``` + +### verify + +wrapper function to verify a message. + +```erlang +verify(Msg) -> verify(Msg, all). +``` + +### verify + +wrapper function to verify a message. + +```erlang +verify(Msg, Committers) -> + verify(Msg, Committers, #{}). +``` + +### verify + +```erlang +verify(Msg, all, Opts) -> + verify(Msg, <<"all">>, Opts); +``` + +### verify + +```erlang +verify(Msg, signers, Opts) -> + verify(Msg, hb_message:signers(Msg, Opts), Opts); +``` + +### verify + +```erlang +verify(Msg, Committers, Opts) when not is_map(Committers) -> + verify( + Msg, + #{ + <<"committers">> => + case ?IS_ID(Committers) of + true -> [Committers]; + false -> Committers + end + }, + Opts + ); +``` + +### verify + +```erlang +verify(Msg, Spec, Opts) -> + ?event(verify, {verify, {spec, Spec}}), + {ok, Res} = + dev_message:verify( + Msg, + Spec, + Opts + ), + Res. +``` + +### uncommitted + +Return the unsigned version of a message in AO-Core format. + +```erlang +uncommitted(Msg) -> uncommitted(Msg, #{}). +``` + +### uncommitted + +Return the unsigned version of a message in AO-Core format. + +```erlang +uncommitted(Bin, _Opts) when is_binary(Bin) -> Bin; +``` + +### uncommitted + +Return the unsigned version of a message in AO-Core format. +Return all of the committers on a message that have 'normal', 256 bit, + +```erlang +uncommitted(Msg, Opts) -> + hb_maps:remove(<<"commitments">>, Msg, Opts). +``` + +### signers + +Return the unsigned version of a message in AO-Core format. +Return all of the committers on a message that have 'normal', 256 bit, + +```erlang +signers(Msg, Opts) -> + hb_util:ok(dev_message:committers(Msg, #{}, Opts)). +``` + +### print + +Pretty-print a message. + +```erlang +print(Msg) -> print(Msg, 0). +``` + +### print + +Pretty-print a message. +Return the type of an encoded message. + +```erlang +print(Msg, Indent) -> + io:format(standard_error, "~s", [lists:flatten(hb_format:message(Msg, #{}, Indent))]). +``` + +### type + +Pretty-print a message. +Return the type of an encoded message. + +```erlang +type(TX) when is_record(TX, tx) -> tx; +``` + +### type + +Pretty-print a message. +Return the type of an encoded message. + +```erlang +type(Binary) when is_binary(Binary) -> binary; +``` + +### type + +Pretty-print a message. +Return the type of an encoded message. + +```erlang +type(Msg) when is_map(Msg) -> + IsDeep = lists:any( + fun({_, Value}) -> is_map(Value) end, + lists:filter( + fun({Key, _}) -> not hb_private:is_private(Key) end, + hb_maps:to_list(Msg) + ) + ), + case IsDeep of + true -> deep; + false -> shallow + end. +``` + +### match + +Check if two maps match, including recursively checking nested maps. + +```erlang +match(Map1, Map2) -> + match(Map1, Map2, strict). +``` + +### match + +```erlang +match(Map1, Map2, Mode) -> + match(Map1, Map2, Mode, #{}). +``` + +### match + +```erlang +match(Map1, Map2, Mode, Opts) -> + try unsafe_match(Map1, Map2, Mode, [], Opts) + catch _:Details -> Details + end. +``` + +### unsafe_match + +Match two maps, returning `true` if they match, or throwing an error + +```erlang +unsafe_match(Map1, Map2, Mode, Path, Opts) -> + Keys1 = + hb_maps:keys( + NormMap1 = hb_util:lower_case_key_map(minimize( + normalize(hb_ao:normalize_keys(Map1, Opts), Opts), + [<<"content-type">>, <<"ao-body-key">>] + ), Opts) + ), + Keys2 = + hb_maps:keys( + NormMap2 = hb_util:lower_case_key_map(minimize( + normalize(hb_ao:normalize_keys(Map2, Opts), Opts), + [<<"content-type">>, <<"ao-body-key">>] + ), Opts) + ), + PrimaryKeysPresent = + (Mode == primary) andalso + lists:all( + fun(Key) -> lists:member(Key, Keys1) end, + Keys1 + ), + ?event(match, + {match, + {keys1, Keys1}, + {keys2, Keys2}, + {mode, Mode}, + {primary_keys_present, PrimaryKeysPresent}, + {msg1, Map1}, + {msg2, Map2} + } + ), + case (Keys1 == Keys2) or (Mode == only_present) or PrimaryKeysPresent of + true -> + lists:all( + fun(Key) -> + ?event(match, {matching_key, Key}), + Val1 = + hb_ao:normalize_keys( + hb_maps:get(Key, NormMap1, not_found, Opts), + Opts + ), + Val2 = + hb_ao:normalize_keys( + hb_maps:get(Key, NormMap2, not_found, Opts), + Opts + ), + BothPresent = (Val1 =/= not_found) and (Val2 =/= not_found), + case (not BothPresent) and (Mode == only_present) of + true -> true; + false -> + case is_map(Val1) andalso is_map(Val2) of + true -> + unsafe_match(Val1, Val2, Mode, Path ++ [Key], Opts); + false -> + case {Val1, Val2} of + {V, V} -> true; + {V, '_'} when V =/= not_found -> true; + {'_', V} when V =/= not_found -> true; + {'_', '_'} -> true; + _ -> + throw( + {value_mismatch, + hb_format:short_id( + hb_path:to_binary( + Path ++ [Key] + ) + ), + {val1, Val1}, + {val2, Val2} + } + ) + end + end + end + end, + Keys1 + ); + false -> + throw( + {keys_mismatch, + {path, hb_format:short_id(hb_path:to_binary(Path))}, + {keys1, Keys1}, + {keys2, Keys2} + } + ) + end. +``` + +### matchable_keys + +```erlang +matchable_keys(Map) -> + lists:sort(lists:map(fun hb_ao:normalize_key/1, hb_maps:keys(Map))). +``` + +### diff + +Return the numeric differences between two messages, matching deeply + +```erlang +diff(Msg1, Msg2, Opts) when is_map(Msg1) andalso is_map(Msg2) -> + maps:filtermap( + fun(Key, Val2) -> + case hb_maps:get(Key, Msg1, not_found, Opts) of + Val2 -> + % The key is present in both maps, and the values match. +``` + +### diff + +```erlang +diff(_Val1, _Val2, _Opts) -> + not_found. +``` + +### with_commitments + +Filter messages that do not match the 'spec' given. The underlying match + +```erlang +with_commitments(ID, Msg, Opts) when ?IS_ID(ID) -> + with_commitments([ID], Msg, Opts); +``` + +### with_commitments + +Filter messages that do not match the 'spec' given. The underlying match + +```erlang +with_commitments(Spec, Msg = #{ <<"commitments">> := Commitments }, Opts) -> + ?event({with_commitments, {spec, Spec}, {commitments, Commitments}}), + FilteredCommitments = + hb_maps:filter( + fun(ID, CommMsg) -> + if is_list(Spec) -> + lists:member(ID, Spec); + is_map(Spec) -> + match(Spec, CommMsg, primary, Opts) == true + end + end, + Commitments, + Opts + ), + ?event({with_commitments, {filtered_commitments, FilteredCommitments}}), + Msg#{ <<"commitments">> => FilteredCommitments }; +``` + +### with_commitments + +Filter messages that do not match the 'spec' given. The underlying match + +```erlang +with_commitments(_Spec, Msg, _Opts) -> + Msg. +``` + +### without_commitments + +Filter messages that match the 'spec' given. Inverts the `with_commitments/2` + +```erlang +without_commitments(Spec, Msg = #{ <<"commitments">> := Commitments }, Opts) -> + ?event({without_commitments, {spec, Spec}, {msg, Msg}, {commitments, Commitments}}), + FilteredCommitments = + hb_maps:without( + hb_maps:keys( + hb_maps:get( + <<"commitments">>, + with_commitments(Spec, Msg, Opts), + #{}, + Opts + ) + ), + Commitments + ), + ?event({without_commitments, {filtered_commitments, FilteredCommitments}}), + Msg#{ <<"commitments">> => FilteredCommitments }; +``` + +### without_commitments + +Filter messages that match the 'spec' given. Inverts the `with_commitments/2` + +```erlang +without_commitments(_Spec, Msg, _Opts) -> + Msg. +``` + +### commitment + +Extract a commitment from a message given a `committer` or `commitment` + +```erlang +commitment(ID, Msg) -> + commitment(ID, Msg, #{}). +``` + +### commitment + +```erlang +commitment(ID, Link, Opts) when ?IS_LINK(Link) -> + commitment(ID, hb_cache:ensure_loaded(Link, Opts), Opts); +``` + +### commitment + +```erlang +commitment(ID, #{ <<"commitments">> := Commitments }, Opts) + when is_binary(ID), is_map_key(ID, Commitments) -> + hb_maps:get( + ID, + Commitments, + not_found, + Opts + ); +``` + +### commitment + +```erlang +commitment(Spec, Msg, Opts) -> + Matches = commitments(Spec, Msg, Opts), + ?event(debug_commitment, {commitment, {spec, Spec}, {matches, Matches}}), + if + map_size(Matches) == 0 -> not_found; + map_size(Matches) == 1 -> + CommID = hd(hb_maps:keys(Matches)), + {ok, CommID, hb_util:ok(hb_maps:find(CommID, Matches, Opts))}; + true -> + ?event(commitment, {multiple_matches, {matches, Matches}}), + multiple_matches + end; +``` + +### commitment + +```erlang +commitment(_Spec, _Msg, _Opts) -> + % The message has no commitments, so the spec can never match. +``` + +### commitments + +Return a list of all commitments that match the spec. + +```erlang +commitments(ID, Link, Opts) when ?IS_LINK(Link) -> + commitments(ID, hb_cache:ensure_loaded(Link, Opts), Opts); +``` + +### commitments + +Return a list of all commitments that match the spec. + +```erlang +commitments(CommitterID, Msg, Opts) when is_binary(CommitterID) -> + commitments(#{ <<"committer">> => CommitterID }, Msg, Opts); +``` + +### commitments + +Return a list of all commitments that match the spec. + +```erlang +commitments(Spec, #{ <<"commitments">> := Commitments }, Opts) -> + hb_maps:filtermap( + fun(_ID, CommMsg) -> + case match(Spec, CommMsg, primary, Opts) of + true -> {true, CommMsg}; + _ -> false + end + end, + Commitments, + Opts + ); +``` + +### commitments + +Return a list of all commitments that match the spec. + +```erlang +commitments(_Spec, _Msg, _Opts) -> + #{}. +``` + +### commitment_devices + +Return the devices for which there are commitments on a message. + +```erlang +commitment_devices(#{ <<"commitments">> := Commitments }, Opts) -> + lists:map( + fun(CommMsg) -> + hb_ao:get(<<"commitment-device">>, CommMsg, Opts) + end, + maps:values(Commitments) + ); +``` + +### commitment_devices + +Return the devices for which there are commitments on a message. + +```erlang +commitment_devices(_Msg, _Opts) -> + []. +``` + +### find_target + +Implements a standard pattern in which the target for an operation is + +```erlang +find_target(Self, Req, Opts) -> + GetOpts = Opts#{ + hashpath => ignore, + cache_control => [<<"no-cache">>, <<"no-store">>] + }, + {ok, + case hb_maps:get(<<"target">>, Req, <<"self">>, GetOpts) of + <<"self">> -> Self; + Key -> + hb_maps:get( + Key, + Req, + hb_maps:get(<<"body">>, Req, GetOpts), + GetOpts + ) + end + }. +``` + +### minimize + +Remove keys from the map that can be regenerated. Optionally takes an + +```erlang +minimize(Msg) -> minimize(Msg, []). +``` + +### minimize + +Remove keys from the map that can be regenerated. Optionally takes an + +```erlang +minimize(RawVal, _) when not is_map(RawVal) -> RawVal; +``` + +### minimize + +Remove keys from the map that can be regenerated. Optionally takes an + +```erlang +minimize(Map, ExtraKeys) -> + NormKeys = + lists:map(fun hb_ao:normalize_key/1, ?REGEN_KEYS) + ++ lists:map(fun hb_ao:normalize_key/1, ExtraKeys), + maps:filter( + fun(Key, _) -> + (not lists:member(hb_ao:normalize_key(Key), NormKeys)) + andalso (not hb_private:is_private(Key)) + end, + maps:map(fun(_K, V) -> minimize(V) end, Map) + ). +``` + +### normalize + +Return a map with only the keys that necessary, without those that can + +```erlang +normalize(Map, Opts) when is_map(Map) orelse is_list(Map) -> + NormalizedMap = hb_ao:normalize_keys(Map, Opts), + FilteredMap = filter_default_keys(NormalizedMap), + hb_maps:with(matchable_keys(FilteredMap), FilteredMap); +``` + +### normalize + +Return a map with only the keys that necessary, without those that can + +```erlang +normalize(Other, _Opts) -> + Other. +``` + +### filter_default_keys + +Remove keys from a map that have the default values found in the tx + +```erlang +filter_default_keys(Map) -> + DefaultsMap = default_tx_message(), + maps:filter( + fun(Key, Value) -> + case hb_maps:find(hb_ao:normalize_key(Key), DefaultsMap) of + {ok, Value} -> false; + _ -> true + end + end, + Map + ). +``` + +### default_tx_message + +Get the normalized fields and default values of the tx record. + +```erlang +default_tx_message() -> + hb_maps:from_list(default_tx_list()). +``` + +### default_tx_list + +Get the ordered list of fields as AO-Core keys and default values of + +```erlang +default_tx_list() -> + Keys = lists:map(fun hb_ao:normalize_key/1, record_info(fields, tx)), +``` + +--- + +*Generated from [hb_message.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_message.erl)* diff --git a/docs/book/src/hb_message_test_vectors.erl.md b/docs/book/src/hb_message_test_vectors.erl.md new file mode 100644 index 000000000..36e40e41e --- /dev/null +++ b/docs/book/src/hb_message_test_vectors.erl.md @@ -0,0 +1,1736 @@ +# hb_message_test_vectors + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_message_test_vectors.erl) + +A battery of test vectors for message codecs, implementing the +`message@1.0` encoding and commitment APIs. Additionally, this module +houses tests that ensure the general functioning of the `hb_message` API. + +--- + +### run_test + +A battery of test vectors for message codecs, implementing the +Test invocation function, making it easier to run a specific test. + +```erlang +run_test() -> + hb:init(), + nested_structured_fields_test( + #{ <<"device">> => <<"json@1.0">>, <<"bundle">> => true }, + test_opts(normal) + ). +``` + +### test_codecs + +Return a list of codecs to test. Disable these as necessary if you need + +```erlang +test_codecs() -> + [ + <<"structured@1.0">>, + <<"httpsig@1.0">>, + #{ <<"device">> => <<"httpsig@1.0">>, <<"bundle">> => true }, + <<"flat@1.0">>, + <<"ans104@1.0">>, + #{ <<"device">> => <<"ans104@1.0">>, <<"bundle">> => true }, + <<"json@1.0">>, + #{ <<"device">> => <<"json@1.0">>, <<"bundle">> => true } + ]. +``` + +### suite_test_opts + +Return a set of options for testing, taking the codec name as an + +```erlang +suite_test_opts() -> + [ + #{ + name => normal, + desc => <<"Default opts">>, + opts => test_opts(normal) + } + ]. +``` + +### suite_test_opts + +```erlang +suite_test_opts(OptsName) -> + [ O || O = #{ name := OName } <- suite_test_opts(), OName == OptsName ]. +``` + +### test_opts + +```erlang +test_opts(normal) -> + #{ + store => hb_test_utils:test_store(), + priv_wallet => hb:wallet() + }. +``` + +### test_suite + +```erlang +test_suite() -> + [ + % Basic operations + {<<"Binary to binary">>, + fun binary_to_binary_test/2}, + {<<"Match">>, + fun match_test/2}, + {<<"Basic message encoding and decoding">>, + fun basic_message_codec_test/2}, + {<<"Priv survives conversion">>, + fun priv_survives_conversion_test/2}, + {<<"Message with body">>, + fun set_body_codec_test/2}, + {<<"Message with large keys">>, + fun message_with_large_keys_test/2}, + {<<"Structured field atom parsing">>, + fun structured_field_atom_parsing_test/2}, + {<<"Structured field decimal parsing">>, + fun structured_field_decimal_parsing_test/2}, + {<<"Unsigned id">>, + fun unsigned_id_test/2}, + % Nested structures + {<<"Simple nested message">>, + fun simple_nested_message_test/2}, + {<<"Message with simple embedded list">>, + fun message_with_simple_embedded_list_test/2}, + {<<"Nested empty map">>, + fun nested_empty_map_test/2}, + {<<"Empty body">>, + fun empty_body_test/2}, + {<<"Nested structured fields">>, + fun nested_structured_fields_test/2}, + {<<"Single layer message to encoding">>, + fun single_layer_message_to_encoding_test/2}, + {<<"Nested body list">>, + fun nested_body_list_test/2}, + {<<"Empty string in nested tag">>, + fun empty_string_in_nested_tag_test/2}, + {<<"Deep typed message ID">>, + fun deep_typed_message_id_test/2}, + {<<"Encode small balance table">>, + fun encode_small_balance_table_test/2}, + {<<"Encode large balance table">>, + fun encode_large_balance_table_test/2}, + {<<"Normalize commitments">>, + fun normalize_commitments_test/2}, + % Signed messages + {<<"Signed message to message and back">>, + fun signed_message_encode_decode_verify_test/2}, + {<<"Specific order signed message">>, + fun specific_order_signed_message_test/2}, + {<<"Specific order deeply nested signed message">>, + fun specific_order_deeply_nested_signed_message_test/2}, + {<<"Signed only committed data field">>, + fun signed_only_committed_data_field_test/2}, + {<<"Signed simple nested message">>, + fun simple_signed_nested_message_test/2}, + {<<"Signed nested message">>, + fun signed_nested_message_with_child_test/2}, + {<<"Committed keys">>, + fun committed_keys_test/2}, + {<<"Committed empty keys">>, + fun committed_empty_keys_test/2}, + {<<"Signed list HTTP response">>, + fun signed_list_test/2}, + {<<"Sign node message">>, + fun sign_node_message_test/2}, + {<<"Complex signed message">>, + fun complex_signed_message_test/2}, + {<<"Nested message with large keys">>, + fun nested_message_with_large_keys_test/2}, + {<<"Signed nested complex signed message">>, + fun verify_nested_complex_signed_test/2}, + % Complex structures + {<<"Nested message with large keys and content">>, + fun nested_message_with_large_keys_and_content_test/2}, + {<<"Nested message with large content">>, + fun nested_message_with_large_content_test/2}, + {<<"Deeply nested message with content">>, + fun deeply_nested_message_with_content_test/2}, + {<<"Deeply nested message with only content">>, + fun deeply_nested_message_with_only_content/2}, + {<<"Signed deep serialize and deserialize">>, + fun signed_deep_message_test/2}, + {<<"Signed nested data key">>, + fun signed_nested_data_key_test/2}, + {<<"Signed message with hashpath">>, + fun hashpath_sign_verify_test/2}, + {<<"Message with derived components">>, + fun signed_message_with_derived_components_test/2}, + {<<"Large body committed keys">>, + fun large_body_committed_keys_test/2}, + {<<"Signed with inner signed">>, + fun signed_with_inner_signed_message_test/2}, + {<<"Recursive nested list">>, + fun recursive_nested_list_test/2}, + {<<"Sign links">>, + fun sign_links_test/2}, + {<<"ID of linked message">>, + fun id_of_linked_message_test/2}, + {<<"Sign deep message from lazy cache read">>, + fun sign_deep_message_from_lazy_cache_read_test/2}, + {<<"ID of deep message and link message match">>, + fun id_of_deep_message_and_link_message_match_test/2}, + {<<"Signed non-bundle is bundlable">>, + fun signed_non_bundle_is_bundlable_test/2}, + {<<"Bundled ordering">>, + fun bundled_ordering_test/2}, + {<<"Codec round-trip conversion is idempotent">>, + fun codec_roundtrip_conversion_is_idempotent_test/2}, + {<<"Bundled and unbundled IDs differ">>, + fun bundled_and_unbundled_ids_differ_test/2}, + {<<"Tabm conversion is idempotent">>, + fun tabm_conversion_is_idempotent_test/2} + ]. +``` + +### suite_test_ + +Organizes a test battery for the `hb_message` module and its codecs. + +```erlang +suite_test_() -> + hb_test_utils:suite_with_opts( + codec_test_suite( + test_codecs(), + normal + ), + suite_test_opts(normal) + ). +``` + +### codec_test_suite + +Run the test suite for a set of codecs, using the given options type. + +```erlang +codec_test_suite(Codecs, OptsType) -> + lists:flatmap( + fun(CodecName) -> + lists:map(fun({Desc, Test}) -> + TestName = + binary_to_list( + << (suite_name(CodecName))/binary, ": ", Desc/binary >> + ), + TestSpecificOpts = test_opts(OptsType), + { + Desc, + TestName, + fun(_SuiteOpts) -> Test(CodecName, TestSpecificOpts) end + } + end, test_suite()) + end, + Codecs + ). +``` + +### suite_name + +Create a name for a suite from a codec spec. + +```erlang +suite_name(CodecSpec) when is_binary(CodecSpec) -> CodecSpec; +``` + +### suite_name + +Create a name for a suite from a codec spec. + +```erlang +suite_name(CodecSpec) when is_map(CodecSpec) -> + CodecName = maps:get(<<"device">>, CodecSpec, <<"[! NO CODEC !]">>), + case maps:get(<<"bundle">>, CodecSpec, false) of + false -> CodecName; + true -> << CodecName/binary, " (bundle)">> + end. +``` + +### is_idempotent + +Tests a message transforming function to ensure that it is idempotent. + +```erlang +is_idempotent(Func, Msg, Opts) -> + Run = fun(M) -> case Func(M) of {ok, Res} -> Res; Res -> Res end end, + After1 = Run(Msg), + After2 = Run(After1), + After3 = Run(After2), + MatchRes1 = hb_message:match(After1, After2, strict, Opts), + MatchRes2 = hb_message:match(After2, After3, strict, Opts), + ?event({is_idempotent, {match_res1, MatchRes1}, {match_res2, MatchRes2}}), + MatchRes1 andalso MatchRes2. +``` + +### tabm_conversion_is_idempotent_test + +Ensure that converting a message to/from TABM multiple times repeatedly + +```erlang +tabm_conversion_is_idempotent_test(_Codec, Opts) -> + From = fun(M) -> hb_message:convert(M, <<"structured@1.0">>, tabm, Opts) end, + To = fun(M) -> hb_message:convert(M, tabm, <<"structured@1.0">>, Opts) end, + SimpleMsg = #{ <<"a">> => <<"x">>, <<"b">> => <<"y">>, <<"c">> => <<"z">> }, + ComplexMsg = + #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + Signed = hb_message:commit( + #{ + <<"type">> => <<"Message">>, + <<"function">> => <<"fac">>, + <<"parameters">> => #{ + <<"a">> => 1 + }, + <<"content-type">> => <<"application/html">>, + <<"body">> => + << + """ + +

Hello, multiline message

+ + """ + >> + }, + Opts, + <<"structured@1.0">> + ) + }, + ?assert(is_idempotent(From, SimpleMsg, Opts)), + ?assert(is_idempotent(From, Signed, Opts)), + ?assert(is_idempotent(From, ComplexMsg, Opts)), + ?assert(is_idempotent(To, SimpleMsg, Opts)), + ?assert(is_idempotent(To, Signed, Opts)), + ?assert(is_idempotent(To, ComplexMsg, Opts)). +``` + +### codec_roundtrip_conversion_is_idempotent_test + +Ensure that converting a message to a codec, then back to TABM multiple + +```erlang +codec_roundtrip_conversion_is_idempotent_test(Codec, Opts) -> + Roundtrip = + fun(M) -> + hb_message:convert( + hb_message:convert(M, Codec, <<"structured@1.0">>, Opts), + <<"structured@1.0">>, + Codec, + Opts + ) + end, + SimpleMsg = #{ <<"a">> => <<"x">>, <<"b">> => <<"y">>, <<"c">> => <<"z">> }, + ComplexMsg = + #{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + Signed = hb_message:commit( + #{ + <<"type">> => <<"Message">>, + <<"function">> => <<"fac">>, + <<"parameters">> => #{ + <<"a">> => 1 + }, + <<"content-type">> => <<"application/html">>, + <<"body">> => + << + """ + +

Hello, multiline message

+ + """ + >> + }, + Opts, + Codec + ) + }, + ?assert(is_idempotent(Roundtrip, SimpleMsg, Opts)), + ?assert(is_idempotent(Roundtrip, Signed, Opts)), + ?assert(is_idempotent(Roundtrip, ComplexMsg, Opts)). +``` + +### default_keys_removed_test + +Test that the filter_default_keys/1 function removes TX fields + +```erlang +default_keys_removed_test() -> + TX = #tx { unsigned_id = << 1:256 >>, anchor = << 2:256 >> }, + TXMap = #{ + <<"unsigned_id">> => TX#tx.unsigned_id, + <<"anchor">> => TX#tx.anchor, + <<"owner">> => TX#tx.owner, + <<"target">> => TX#tx.target, + <<"data">> => TX#tx.data + }, + FilteredMap = hb_message:filter_default_keys(TXMap), + ?assertEqual(<< 1:256 >>, hb_maps:get(<<"unsigned_id">>, FilteredMap)), + ?assertEqual(<< 2:256 >>, hb_maps:get(<<"anchor">>, FilteredMap, not_found)), + ?assertEqual(not_found, hb_maps:get(<<"owner">>, FilteredMap, not_found)), + ?assertEqual(not_found, hb_maps:get(<<"target">>, FilteredMap, not_found)). +``` + +### minimization_test + +Test that the filter_default_keys/1 function removes TX fields + +```erlang +minimization_test() -> + Msg = #{ + <<"unsigned_id">> => << 1:256 >>, + <<"id">> => << 2:256 >> + }, + MinimizedMsg = hb_message:minimize(Msg), + ?event({minimized, MinimizedMsg}), + ?assertEqual(1, hb_maps:size(MinimizedMsg)). +``` + +### match_modes_test + +```erlang +match_modes_test() -> + Msg1 = #{ <<"a">> => 1, <<"b">> => 2 }, + Msg2 = #{ <<"a">> => 1 }, + Msg3 = #{ <<"a">> => 1, <<"b">> => 2, <<"c">> => 3 }, + ?assert(hb_message:match(Msg1, Msg2, only_present)), + ?assert(hb_message:match(Msg2, Msg1, strict) =/= true), + ?assert(hb_message:match(Msg1, Msg3, primary)), + ?assert(hb_message:match(Msg3, Msg1, primary) =/= true). +``` + +### basic_message_codec_test + +```erlang +basic_message_codec_test(Codec, Opts) -> + Msg = #{ <<"normal_key">> => <<"NORMAL_VALUE">> }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### set_body_codec_test + +```erlang +set_body_codec_test(Codec, Opts) -> + Msg = #{ <<"body">> => <<"NORMAL_VALUE">>, <<"test-key">> => <<"Test-Value">> }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### single_layer_message_to_encoding_test + +Test that we can convert a message into a tx record and back. + +```erlang +single_layer_message_to_encoding_test(Codec, Opts) -> + Msg = #{ + <<"anchor">> => << 2:256 >>, + <<"target">> => << 4:256 >>, + <<"data">> => <<"DATA">>, + <<"special-key">> => <<"SPECIAL_VALUE">> + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?event({matching, {input, Msg}, {output, Decoded}}), + MatchRes = hb_message:match(Msg, Decoded, strict, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes). +``` + +### signed_only_committed_data_field_test + +```erlang +signed_only_committed_data_field_test(Codec, Opts) -> + Msg = hb_message:commit(#{ <<"data">> => <<"DATA">> }, Opts, Codec), + ?event({signed_msg, Msg}), + {ok, OnlyCommitted} = hb_message:with_only_committed(Msg, Opts), + ?event({only_committed, OnlyCommitted}), + Encoded = hb_message:convert(OnlyCommitted, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + MatchRes = hb_message:match(Msg, OnlyCommitted, strict, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes), + ?assert(hb_message:verify(OnlyCommitted, all, Opts)). +``` + +### signed_nested_data_key_test + +```erlang +signed_nested_data_key_test(Codec, Opts) -> + Msg = + #{ + <<"outer-data">> => <<"outer">>, + <<"body">> => + #{ + <<"inner-data">> => <<"inner">>, + <<"data">> => <<"DATA">> + } + }, + Signed = hb_message:commit(Msg, Opts, Codec), + ?event({signed, Signed}), + Encoded = hb_message:convert(Signed, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + LoadedMsg = hb_cache:ensure_all_loaded(Decoded, Opts), + ?event({matching, {input, Msg}, {output, LoadedMsg}}), + ?assert(hb_message:match(Msg, LoadedMsg, primary, Opts)). +``` + +### match_test + +Test that the message matching function works. + +```erlang +match_test(Codec, Opts) -> + Msg = #{ <<"a">> => 1, <<"b">> => 2 }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### binary_to_binary_test + +```erlang +binary_to_binary_test(Codec, Opts) -> + % Serialization must be able to turn a raw binary into a TX, then turn + % that TX back into a binary and have the result match the original. +``` + +### structured_field_atom_parsing_test + +Structured field parsing tests. + +```erlang +structured_field_atom_parsing_test(Codec, Opts) -> + Msg = #{ highly_unusual_http_header => highly_unusual_value }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### structured_field_decimal_parsing_test + +```erlang +structured_field_decimal_parsing_test(Codec, Opts) -> + Msg = #{ integer_field => 1234567890 }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### message_with_large_keys_test + +Test that the data field is correctly managed when we have multiple + +```erlang +message_with_large_keys_test(Codec, Opts) -> + Msg = #{ + <<"normal_key">> => <<"normal_value">>, + <<"large_key">> => << 0:((1 + 1024) * 8) >>, + <<"another_large_key">> => << 0:((1 + 1024) * 8) >>, + <<"another_normal_key">> => <<"another_normal_value">> + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### verify_nested_complex_signed_test + +Check that a nested signed message with an embedded typed list can + +```erlang +verify_nested_complex_signed_test(Codec, Opts) -> + Msg = + hb_message:commit(#{ + <<"path">> => <<"schedule">>, + <<"method">> => <<"POST">>, + <<"body">> => + Inner = hb_message:commit( + #{ + <<"type">> => <<"Message">>, + <<"function">> => <<"fac">>, + <<"parameters">> => #{ + <<"a">> => 1 + }, + <<"content-type">> => <<"application/html">>, + <<"body">> => + << + """ + +

Hello, multiline message

+ + """ + >> + }, + Opts, + Codec + ) + }, + Opts, + Codec + ), + ?event({signed, Msg}), + ?event({inner, Inner}), + % Ensure that the messages verify prior to conversion. +``` + +### nested_message_with_large_keys_and_content_test + +Check that large keys and data fields are correctly handled together. + +```erlang +nested_message_with_large_keys_and_content_test(Codec, Opts) -> + MainBodyKey = + case Codec of + <<"ans104@1.0">> -> <<"data">>; + _ -> <<"body">> + end, + Msg = #{ + <<"normal_key">> => <<"normal_value">>, + <<"large_key">> => << 0:(1024 * 16) >>, + <<"another_large_key">> => << 0:(1024 * 16) >>, + <<"another_normal_key">> => <<"another_normal_value">>, + MainBodyKey => <<"Hey from the data field!">> + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### simple_nested_message_test + +```erlang +simple_nested_message_test(Codec, Opts) -> + Msg = #{ + <<"a">> => <<"1">>, + <<"nested">> => #{ <<"b">> => <<"1">> }, + <<"c">> => <<"3">> + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### simple_signed_nested_message_test + +```erlang +simple_signed_nested_message_test(Codec, Opts) -> + Msg = + hb_message:commit( + #{ + <<"a">> => <<"1">>, + <<"nested">> => #{ <<"b">> => <<"1">> }, + <<"c">> => <<"3">> + }, + Opts, + Codec + ), + ?assert(hb_message:verify(Msg, all, Opts)), + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + MatchRes = hb_message:match(Msg, Decoded, primary, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes), + ?assert(hb_message:verify(Decoded, all, Opts)). +``` + +### signed_nested_message_with_child_test + +```erlang +signed_nested_message_with_child_test(Codec, Opts) -> + Msg = #{ + <<"outer-a">> => <<"1">>, + <<"nested">> => + hb_message:commit( + #{ <<"inner-b">> => <<"1">>, <<"inner-list">> => [1, 2, 3] }, + Opts, + Codec + ), + <<"outer-c">> => <<"3">> + }, + hb_cache:write(Msg, Opts), + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + MatchRes = hb_message:match(Msg, Decoded, primary, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes), + ?assert(hb_message:verify(Decoded, all, Opts)). +``` + +### nested_empty_map_test + +```erlang +nested_empty_map_test(Codec, Opts) -> + Msg = #{ <<"body">> => #{ <<"empty-map-test">> => #{}}}, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + MatchRes = hb_message:match(Msg, Decoded, strict, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes). +``` + +### empty_body_test + +```erlang +empty_body_test(Codec, Opts) -> + Msg = #{ <<"body">> => <<>> }, + Signed = hb_message:commit(Msg, Opts, Codec), + Encoded = hb_message:convert(Signed, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + MatchRes = hb_message:match(Signed, Decoded, strict, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes). +``` + +### nested_message_with_large_content_test + +Test that the data field is correctly managed when we have multiple + +```erlang +nested_message_with_large_content_test(Codec, Opts) -> + MainBodyKey = + case Codec of + <<"ans104@1.0">> -> <<"data">>; + _ -> <<"body">> + end, + Msg = #{ + <<"depth">> => <<"outer">>, + MainBodyKey => #{ + <<"map_item">> => + #{ + <<"depth">> => <<"inner">>, + <<"large_data_inner">> => << 0:((1 + 1024) * 8) >> + }, + <<"large_data_outer">> => << 0:((1 + 1024) * 8) >> + } + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### deeply_nested_message_with_content_test + +Test that we can convert a 3 layer nested message into a tx record and back. + +```erlang +deeply_nested_message_with_content_test(Codec, Opts) -> + MainBodyKey = + case Codec of + <<"ans104@1.0">> -> <<"data">>; + _ -> <<"body">> + end, + Msg = #{ + <<"depth">> => <<"outer">>, + MainBodyKey => #{ + <<"map_item">> => + #{ + <<"depth">> => <<"inner">>, + MainBodyKey => #{ + <<"depth">> => <<"innermost">>, + MainBodyKey => <<"DATA">> + } + } + } + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### deeply_nested_message_with_only_content + +```erlang +deeply_nested_message_with_only_content(Codec, Opts) -> + MainBodyKey = + case Codec of + <<"ans104@1.0">> -> <<"data">>; + _ -> <<"body">> + end, + Msg = #{ + <<"depth1">> => <<"outer">>, + MainBodyKey => #{ + MainBodyKey => #{ + MainBodyKey => <<"depth2-body">> + } + } + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### nested_structured_fields_test + +```erlang +nested_structured_fields_test(Codec, Opts) -> + NestedMsg = #{ <<"a">> => #{ <<"b">> => 1 } }, + Encoded = hb_message:convert(NestedMsg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, NestedMsg}, {output, Decoded}}), + ?assert(hb_message:match(NestedMsg, Decoded, strict, Opts)). +``` + +### nested_message_with_large_keys_test + +```erlang +nested_message_with_large_keys_test(Codec, Opts) -> + Msg = #{ + <<"a">> => <<"1">>, + <<"long_data">> => << 0:((1 + 1024) * 8) >>, + <<"nested">> => #{ <<"b">> => <<"1">> }, + <<"c">> => <<"3">> + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### signed_message_encode_decode_verify_test + +```erlang +signed_message_encode_decode_verify_test(Codec, Opts) -> + Msg = #{ + <<"test-1">> => <<"TEST VALUE 1">>, + <<"test-2">> => <<"TEST VALUE 2">>, + <<"test-3">> => <<"TEST VALUE 3">>, + <<"test-4">> => <<"TEST VALUE 4">>, + <<"test-5">> => <<"TEST VALUE 5">> + }, + SignedMsg = + hb_message:commit( + Msg, + Opts, + Codec + ), + ?event({signed_msg, SignedMsg}), + ?assertEqual(true, hb_message:verify(SignedMsg, all, Opts)), + Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), + ?event({msg_encoded_as_codec, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?assertEqual(true, hb_message:verify(Decoded, all, Opts)), + ?event({matching, {input, SignedMsg}, {encoded, Encoded}, {decoded, Decoded}}), + ?event({http, {string, dev_codec_httpsig_conv:encode_http_msg(SignedMsg, Opts)}}), + MatchRes = hb_message:match(SignedMsg, Decoded, strict, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes). +``` + +### specific_order_signed_message_test + +```erlang +specific_order_signed_message_test(RawCodec, Opts) -> + Msg = #{ + <<"key-1">> => <<"DATA-1">>, + <<"key-2">> => <<"DATA-2">>, + <<"key-3">> => <<"DATA-3">> + }, + Codec = + if is_map(RawCodec) -> RawCodec; + true -> #{ <<"device">> => RawCodec } + end, + SignedMsg = + hb_message:commit( + Msg, + Opts, + Codec#{ <<"committed">> => [<<"key-3">>, <<"key-1">>, <<"key-2">>] } + ), + ?event({signed_msg, SignedMsg}), + ?event({http, {string, dev_codec_httpsig_conv:encode_http_msg(SignedMsg, Opts)}}), + ?assert(hb_message:verify(SignedMsg, all, Opts)). +``` + +### specific_order_deeply_nested_signed_message_test + +```erlang +specific_order_deeply_nested_signed_message_test(RawCodec, Opts) -> + Msg = #{ + <<"key-1">> => <<"DATA-1">>, + <<"key-2">> => #{ <<"body">> => [1,2] }, + <<"key-3">> => <<"DATA-3">>, + <<"key-4">> => #{ <<"body">> => [1,2,3,4] }, + <<"key-5">> => <<"DATA-5">> + }, + Codec = + if is_map(RawCodec) -> RawCodec; + true -> #{ <<"device">> => RawCodec } + end, + SignedMsg = + hb_message:commit( + Msg, + Opts, + Codec#{ + <<"committed">> => + [ + <<"key-3">>, + <<"key-5">>, + <<"key-1">>, + <<"key-2">>, + <<"key-4">> + ] + } + ), + ?event({signed_msg, SignedMsg}), + ?assert(hb_message:verify(SignedMsg, all, Opts)). +``` + +### complex_signed_message_test + +```erlang +complex_signed_message_test(Codec, Opts) -> + Msg = #{ + <<"data">> => <<"TEST DATA">>, + <<"deep-data">> => #{ + <<"data">> => <<"DEEP DATA">>, + <<"complex-key">> => 1337, + <<"list">> => [1,2,3] + } + }, + SignedMsg = + hb_message:commit( + Msg, + Opts, + Codec + ), + Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?assertEqual(true, hb_message:verify(Decoded, all, Opts)), + ?event({matching, {input, SignedMsg}, {output, Decoded}}), + MatchRes = hb_message:match(SignedMsg, Decoded, strict, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes). +``` + +### deep_multisignature_test + +```erlang +deep_multisignature_test() -> + % Only the `httpsig@1.0' codec supports multisignatures. +``` + +### deep_typed_message_id_test + +```erlang +deep_typed_message_id_test(Codec, Opts) -> + Msg = #{ + <<"data">> => <<"TEST DATA">>, + <<"deep-data">> => #{ + <<"data">> => <<"DEEP DATA">>, + <<"complex-key">> => 1337, + <<"list">> => [1,2,3] + } + }, + InitID = hb_message:id(Msg, none, Opts), + ?event({init_id, InitID}), + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + DecodedID = hb_message:id(Decoded, none, Opts), + ?event({decoded_id, DecodedID}), + ?event({stages, {init, Msg}, {encoded, Encoded}, {decoded, Decoded}}), + ?assertEqual( + InitID, + DecodedID + ). +``` + +### signed_deep_message_test + +```erlang +signed_deep_message_test(Codec, Opts) -> + Msg = #{ + <<"test-key">> => <<"TEST_VALUE">>, + <<"body">> => #{ + <<"nested-1">> => + #{ + <<"body">> => <<"NESTED BODY">>, + <<"nested-2">> => <<"NESTED-2">> + }, + <<"nested-3">> => <<"NESTED-3">> + } + }, + EncDec = + hb_message:convert( + hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + <<"structured@1.0">>, + Codec, + Opts + ), + ?event({enc_dec, EncDec}), + SignedMsg = + hb_message:commit( + EncDec, + Opts, + Codec + ), + ?event({signed_msg, SignedMsg}), + {ok, Res} = dev_message:verify(SignedMsg, #{ <<"committers">> => <<"all">>}, Opts), + ?event({verify_res, Res}), + ?assertEqual(true, hb_message:verify(SignedMsg, all, Opts)), + ?event({verified, SignedMsg}), + Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + {ok, DecodedRes} = + dev_message:verify( + Decoded, + #{ <<"committers">> => <<"all">>}, + Opts + ), + ?event({verify_decoded_res, DecodedRes}), + MatchRes = hb_message:match(SignedMsg, Decoded, strict, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes). +``` + +### signed_list_test + +```erlang +signed_list_test(Codec, Opts) -> + Msg = #{ <<"key-with-list">> => [1.0, 2.0, 3.0] }, + Signed = hb_message:commit(Msg, Opts, Codec), + ?assert(hb_message:verify(Signed, all, Opts)), + Encoded = hb_message:convert(Signed, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?assert(hb_message:verify(Decoded, all, Opts)), + ?assert(hb_message:match(Signed, Decoded, strict, Opts)). +``` + +### unsigned_id_test + +```erlang +unsigned_id_test(Codec, Opts) -> + Msg = #{ <<"data">> => <<"TEST_DATA">> }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?assertEqual( + dev_message:id(Decoded, #{ <<"committers">> => <<"none">>}, Opts), + dev_message:id(Msg, #{ <<"committers">> => <<"none">>}, Opts) + ). +``` + +### message_with_simple_embedded_list_test + +```erlang +message_with_simple_embedded_list_test(Codec, Opts) -> + Msg = #{ <<"list">> => [<<"value-1">>, <<"value-2">>, <<"value-3">>] }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### empty_string_in_nested_tag_test + +```erlang +empty_string_in_nested_tag_test(Codec, Opts) -> + Msg = + #{ + <<"dev">> => + #{ + <<"stderr">> => <<"aa">>, + <<"stdin">> => <<"b">>, + <<"stdout">> => <<"c">> + } + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### hashpath_sign_verify_test + +```erlang +hashpath_sign_verify_test(Codec, Opts) -> + Msg = + #{ + <<"test_key">> => <<"TEST_VALUE">>, + <<"body">> => #{ + <<"nested_key">> => + #{ + <<"body">> => <<"NESTED_DATA">>, + <<"nested_key">> => <<"NESTED_VALUE">> + }, + <<"nested_key2">> => <<"NESTED_VALUE2">> + }, + <<"priv">> => #{ + <<"hashpath">> => + hb_path:hashpath( + hb_util:human_id(crypto:strong_rand_bytes(32)), + hb_util:human_id(crypto:strong_rand_bytes(32)), + fun hb_crypto:sha256_chain/2, + #{} + ) + } + }, + ?event({msg, {explicit, Msg}}), + SignedMsg = hb_message:commit(Msg, Opts, Codec), + ?event({signed_msg, {explicit, SignedMsg}}), + {ok, Res} = dev_message:verify(SignedMsg, #{ <<"committers">> => <<"all">>}, Opts), + ?event({verify_res, {explicit, Res}}), + ?assert(hb_message:verify(SignedMsg, all, Opts)), + ?event({verified, {explicit, SignedMsg}}), + Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?assert(hb_message:verify(Decoded, all, Opts)), + ?assert( + hb_message:match( + SignedMsg, + Decoded, + strict, + Opts + ) + ). +``` + +### normalize_commitments_test + +```erlang +normalize_commitments_test(Codec, Opts) -> + Msg = #{ + <<"a">> => #{ + <<"b">> => #{ + <<"c">> => 1, + <<"d">> => #{ + <<"e">> => 2 + }, + <<"f">> => 3 + }, + <<"g">> => 4 + }, + <<"h">> => 5 + }, + NormMsg = hb_message:normalize_commitments(Msg, Opts), + ?event({norm_msg, NormMsg}), + ?assert(hb_message:verify(NormMsg, all, Opts)), + ?assert(maps:is_key(<<"commitments">>, NormMsg)), + ?assert(maps:is_key(<<"commitments">>, maps:get(<<"a">>, NormMsg))), + ?assert( + maps:is_key( + <<"commitments">>, + maps:get(<<"b">>, maps:get(<<"a">>, NormMsg)) + ) + ). +``` + +### signed_message_with_derived_components_test + +```erlang +signed_message_with_derived_components_test(Codec, Opts) -> + Msg = #{ + <<"path">> => <<"/test">>, + <<"authority">> => <<"example.com">>, + <<"scheme">> => <<"https">>, + <<"method">> => <<"GET">>, + <<"target-uri">> => <<"/test">>, + <<"request-target">> => <<"/test">>, + <<"status">> => <<"200">>, + <<"reason-phrase">> => <<"OK">>, + <<"body">> => <<"TEST_DATA">>, + <<"content-digest">> => <<"TEST_DIGEST">>, + <<"normal">> => <<"hello">> + }, + SignedMsg = + hb_message:commit( + Msg, + Opts, + Codec + ), + ?event({signed_msg, SignedMsg}), + ?assert(hb_message:verify(SignedMsg, all, Opts)), + Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?assert(hb_message:verify(Decoded, all, Opts)), + ?assert( + hb_message:match( + SignedMsg, + Decoded, + strict, + Opts + ) + ). +``` + +### committed_keys_test + +```erlang +committed_keys_test(Codec, Opts) -> + Msg = #{ <<"a">> => 1, <<"b">> => 2, <<"c">> => 3 }, + Signed = hb_message:commit(Msg, Opts, Codec), + CommittedKeys = hb_message:committed(Signed, all, Opts), + ?event({committed_keys, CommittedKeys}), + ?assert(hb_message:verify(Signed, all, Opts)), + ?assert(lists:member(<<"a">>, CommittedKeys)), + ?assert(lists:member(<<"b">>, CommittedKeys)), + ?assert(lists:member(<<"c">>, CommittedKeys)), + MsgToFilter = Signed#{ <<"bad-key">> => <<"BAD VALUE">> }, + ?assert( + not lists:member( + <<"bad-key">>, + hb_message:committed(MsgToFilter, all, Opts) + ) + ). +``` + +### committed_empty_keys_test + +```erlang +committed_empty_keys_test(Codec, Opts) -> + Msg = #{ + <<"very">> => <<>>, + <<"exciting">> => #{}, + <<"values">> => [], + <<"non-empty">> => <<"TEST">> + }, + Signed = hb_message:commit(Msg, Opts, Codec), + ?assert(hb_message:verify(Signed, all, Opts)), + CommittedKeys = hb_message:committed(Signed, all, Opts), + ?event({committed_keys, CommittedKeys}), + ?event({signed, Signed}), + ?assert(lists:member(<<"very">>, CommittedKeys)), + ?assert(lists:member(<<"exciting">>, CommittedKeys)), + ?assert(lists:member(<<"values">>, CommittedKeys)), + ?assert(lists:member(<<"non-empty">>, CommittedKeys)). +``` + +### deeply_nested_committed_keys_test + +```erlang +deeply_nested_committed_keys_test() -> + Opts = (test_opts(normal))#{ + store => [ + #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + } + ] + }, + Msg = #{ + <<"a">> => 1, + <<"b">> => #{ <<"c">> => #{ <<"d">> => <<0:((1 + 1024) * 1024)>> } }, + <<"e">> => <<0:((1 + 1024) * 1024)>> + }, + Signed = hb_message:commit(Msg, Opts, <<"httpsig@1.0">>), + {ok, WithOnlyCommitted} = hb_message:with_only_committed(Signed, Opts), + Committed = hb_message:committed(Signed, all, Opts), + ToCompare = hb_maps:without([<<"commitments">>], WithOnlyCommitted), + ?event( + {msgs, + {base, Msg}, + {signed, Signed}, + {committed, Committed}, + {with_only_committed, WithOnlyCommitted}, + {to_compare, ToCompare} + } + ), + ?assert( + hb_message:match( + Msg, + ToCompare, + strict, + Opts + ) + ). +``` + +### signed_with_inner_signed_message_test + +```erlang +signed_with_inner_signed_message_test(Codec, Opts) -> + Msg = + hb_message:commit( + #{ + <<"a">> => 1, + <<"inner">> => + hb_maps:merge( + InnerSigned = + hb_message:commit( + #{ + <<"c">> => <<"abc">>, + <<"e">> => 5 + %<<"body">> => <<"inner-body">> + % <<"inner-2">> => #{ + % <<"body">> => <<"inner-2-body">> + % } + }, + Opts, + Codec + ), + % Uncommitted keys that should be ripped out of the inner + % message by `with_only_committed'. These should still be + % present in the `with_only_committed' outer message. +``` + +### large_body_committed_keys_test + +```erlang +large_body_committed_keys_test(Codec, Opts) -> + case Codec of + <<"httpsig@1.0">> -> + Msg = #{ + <<"a">> => 1, + <<"b">> => 2, + <<"c">> => #{ <<"d">> => << 1:((1 + 1024) * 1024) >> } + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + Signed = hb_message:commit(Decoded, Opts, Codec), + ?event({signed, Signed}), + CommittedKeys = hb_message:committed(Signed, all, Opts), + ?assert(lists:member(<<"a">>, CommittedKeys)), + ?assert(lists:member(<<"b">>, CommittedKeys)), + ?assert(lists:member(<<"c">>, CommittedKeys)), + MsgToFilter = Signed#{ <<"bad-key">> => <<"BAD VALUE">> }, + ?assert( + not lists:member( + <<"bad-key">>, + hb_message:committed(MsgToFilter, all, Opts) + ) + ); + _ -> + skip + end. +``` + +### sign_node_message_test + +```erlang +sign_node_message_test(Codec, Opts) -> + Msg = hb_message:commit(hb_opts:default_message_with_env(), Opts, Codec), + ?event({committed, Msg}), + ?assert(hb_message:verify(Msg, all, Opts)), + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({final, Decoded}), + MatchRes = hb_message:match(Msg, Decoded, strict, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes), + ?assert(hb_message:verify(Decoded, all, Opts)). +``` + +### nested_body_list_test + +```erlang +nested_body_list_test(Codec, Opts) -> + Msg = #{ + <<"body">> => + [ + #{ + <<"test-key">> => + <<"TEST VALUE #", (integer_to_binary(X))/binary>> + } + || + X <- lists:seq(1, 3) + ] + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event(encoded, {encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)). +``` + +### recursive_nested_list_test + +```erlang +recursive_nested_list_test(Codec, Opts) -> + % This test is to ensure that the codec can handle arbitrarily deep nested + % lists. +``` + +### priv_survives_conversion_test + +```erlang +priv_survives_conversion_test(<<"ans104@1.0">>, _Opts) -> skip; +``` + +### priv_survives_conversion_test + +```erlang +priv_survives_conversion_test(<<"json@1.0">>, _Opts) -> skip; +``` + +### priv_survives_conversion_test + +```erlang +priv_survives_conversion_test(#{ <<"device">> := <<"ans104@1.0">> }, _Opts) -> + skip; +``` + +### priv_survives_conversion_test + +```erlang +priv_survives_conversion_test(#{ <<"device">> := <<"json@1.0">> }, _Opts) -> + skip; +``` + +### priv_survives_conversion_test + +```erlang +priv_survives_conversion_test(Codec, Opts) -> + Msg = #{ + <<"data">> => <<"TEST_DATA">>, + <<"priv">> => #{ <<"test_key">> => <<"TEST_VALUE">> } + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({decoded, Decoded}), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)), + ?assertMatch( + #{ <<"test_key">> := <<"TEST_VALUE">> }, + maps:get(<<"priv">>, Decoded) + ). +``` + +### encode_balance_table + +```erlang +encode_balance_table(Size, Codec, Opts) -> + Msg = + #{ + hb_util:encode(crypto:strong_rand_bytes(32)) => + rand:uniform(1_000_000_000_000_000) + || + _ <- lists:seq(1, Size) + }, + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, {explicit, Encoded}}), + Decoded = + hb_message:uncommitted( + hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + Opts + ), + ?event({decoded, {explicit, Decoded}}), + ?assert(hb_message:match(Msg, Decoded, if_present, Opts)). +``` + +### encode_small_balance_table_test + +```erlang +encode_small_balance_table_test(Codec, Opts) -> + encode_balance_table(5, Codec, Opts). +``` + +### encode_large_balance_table_test + +```erlang +encode_large_balance_table_test(<<"ans104@1.0">>, _Opts) -> + skip; +``` + +### encode_large_balance_table_test + +```erlang +encode_large_balance_table_test(#{ <<"device">> := <<"ans104@1.0">> }, _Opts) -> + skip; +``` + +### encode_large_balance_table_test + +```erlang +encode_large_balance_table_test(Codec, Opts) -> + encode_balance_table(1000, Codec, Opts). +``` + +### sign_links_test + +```erlang +sign_links_test(#{ <<"bundle">> := true }, _Opts) -> + skip; +``` + +### sign_links_test + +```erlang +sign_links_test(Codec, Opts) -> + % Make a message with definitively non-accessible lazy-loadable links. Sign + % it, ensuring that we can produce signatures and IDs without having the + % data directly in memory. +``` + +### bundled_and_unbundled_ids_differ_test + +```erlang +bundled_and_unbundled_ids_differ_test(Codec = #{ <<"bundle">> := true }, Opts) -> + SignatureType = + case maps:get(<<"device">>, Codec, undefined) of + <<"ans104@1.0">> -> <<"rsa-pss-sha256">>; + _ -> <<"hmac-sha256">> + end, + Msg = #{ + <<"immediate-key">> => <<"immediate-value">>, + <<"nested">> => #{ + <<"immediate-key-2">> => <<"immediate-value-2">> + } + }, + SignedNoBundle = + hb_message:commit( + Msg, + Opts, + maps:without([<<"bundle">>], Codec) + ), + SignedBundled = hb_message:commit(Msg, Opts, Codec), + ?event({signed_no_bundle, SignedNoBundle}), + ?event({signed_bundled, SignedBundled}), + {ok, UnbundledID, _} = + hb_message:commitment( + #{ <<"type">> => SignatureType }, + SignedNoBundle, + Opts + ), + {ok, BundledID, _} = + hb_message:commitment( + #{ <<"type">> => SignatureType }, + SignedBundled, + Opts + ), + ?event({unbundled_id, UnbundledID}), + ?event({bundled_id, BundledID}), + ?assertNotEqual(UnbundledID, BundledID); +``` + +### bundled_and_unbundled_ids_differ_test + +```erlang +bundled_and_unbundled_ids_differ_test(_Codec, _Opts) -> + skip. +``` + +### id_of_linked_message_test + +```erlang +id_of_linked_message_test(#{ <<"bundle">> := true }, _Opts) -> + skip; +``` + +### id_of_linked_message_test + +```erlang +id_of_linked_message_test(Codec, Opts) -> + Msg = #{ + <<"immediate-key">> => <<"immediate-value">>, + <<"link-key">> => + {link, hb_util:human_id(crypto:strong_rand_bytes(32)), #{ + <<"type">> => <<"link">>, + <<"lazy">> => false + }} + }, + UnsignedID = hb_message:id(Msg, Opts), + ?event({id, UnsignedID}), + EncMsg = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + DecMsg = hb_message:convert(EncMsg, <<"structured@1.0">>, Codec, Opts), + UnsignedID2 = hb_message:id(DecMsg, Opts), + ?assertEqual(UnsignedID, UnsignedID2). +``` + +### sign_deep_message_from_lazy_cache_read_test + +```erlang +sign_deep_message_from_lazy_cache_read_test(#{ <<"bundle">> := true }, _Opts) -> + skip; +``` + +### sign_deep_message_from_lazy_cache_read_test + +```erlang +sign_deep_message_from_lazy_cache_read_test(Codec, Opts) -> + Msg = #{ + <<"immediate-key">> => <<"immediate-value">>, + <<"link-key">> => #{ + <<"immediate-key-2">> => <<"link-value">>, + <<"link-key-2">> => #{ + <<"immediate-key-3">> => <<"link-value-2">> + } + } + }, + % Write the message to the store to ensure that we get lazy-loadable links. +``` + +### id_of_deep_message_and_link_message_match_test + +```erlang +id_of_deep_message_and_link_message_match_test(_Codec, Opts) -> + Msg = #{ + <<"immediate-key">> => <<"immediate-value">>, + <<"link-key">> => #{ + <<"immediate-key-2">> => <<"immediate-value-2">>, + <<"link-key-2">> => #{ + <<"immediate-key-3">> => <<"immediate-value-3">> + } + } + }, + Linkified = hb_link:normalize(Msg, offload, Opts), + ?event(linkify, {test_recvd_linkified, {msg, Linkified}}), + BaseID = hb_message:id(Msg, Opts), + ?event(linkify, {test_recvd_nonlink_id, {id, BaseID}}), + LinkID = hb_message:id(Linkified, Opts), + ?event(linkify, {test_recvd_link_id, {id, LinkID}}), + ?assertEqual(BaseID, LinkID). +``` + +### signed_non_bundle_is_bundlable_test + +```erlang +signed_non_bundle_is_bundlable_test( + Codec = #{ <<"device">> := <<"httpsig@1.0">>, <<"bundle">> := true }, + Opts) -> + Msg = + hb_message:commit( + #{ + <<"target">> => hb_util:human_id(crypto:strong_rand_bytes(32)), + <<"type">> => <<"Message">>, + <<"function">> => <<"fac">>, + <<"parameters">> => [5.0] + }, + Opts, + maps:get(<<"device">>, Codec) + ), + Encoded = + hb_message:convert( + Msg, + Codec, + <<"structured@1.0">>, + Opts + ), + Decoded = + hb_message:convert( + Encoded, + <<"structured@1.0">>, + maps:get(<<"device">>, Codec), + Opts + ), + ?assert(hb_message:match(Msg, Decoded, strict, Opts)), + ?assert(hb_message:verify(Decoded, all, Opts)); +``` + +### signed_non_bundle_is_bundlable_test + +```erlang +signed_non_bundle_is_bundlable_test(_Codec, _Opts) -> + skip. +``` + +### find_multiple_commitments_test_disabled + +```erlang +find_multiple_commitments_test_disabled() -> + Opts = test_opts(normal), + Store = hb_opts:get(store, no_store, Opts), + hb_store:reset(Store), + Msg = #{ + <<"a">> => 1, + <<"b">> => 2, + <<"c">> => 3 + }, + Sig1 = hb_message:commit(Msg, Opts#{ priv_wallet => ar_wallet:new() }), + {ok, _} = hb_cache:write(Sig1, Opts), + Sig2 = hb_message:commit(Msg, Opts#{ priv_wallet => ar_wallet:new() }), + {ok, _} = hb_cache:write(Sig2, Opts), + {ok, ReadMsg} = hb_cache:read(hb_message:id(Msg, none, Opts), Opts), + LoadedCommitments = hb_cache:ensure_all_loaded(ReadMsg, Opts), + ?event(debug_commitments, {read, LoadedCommitments}), + ok. +``` + +### bundled_ordering_test + +Ensure that a httpsig@1.0 message which is bundled and requests an + +```erlang +bundled_ordering_test(Codec = #{ <<"bundle">> := true }, Opts) -> + % Opts = (test_opts(normal))#{ + % store => [ + % #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-TEST">> } + % ] + % }, + Msg = + hb_message:commit( + #{ + <<"a">> => <<"1">>, + <<"b">> => <<"2">>, + <<"b-2">> => #{ <<"nested">> => #{ <<"n">> => <<"2">> } }, + <<"c">> => <<"3">>, + <<"c-2">> => #{ <<"nested">> => #{ <<"n">> => <<"3">> } }, + <<"d">> => <<"4">> + }, + Opts, + Codec#{ + <<"committed">> => [ + <<"a">>, + <<"b">>, + <<"b-2">>, + <<"c">>, + <<"c-2">>, + <<"d">> + ] + } + ), + ?event({committed, Msg}), + Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), + ?event({encoded, Encoded}), + ?event({http, {string, dev_codec_httpsig_conv:encode_http_msg(Msg, Opts)}}), + Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), + ?event({matching, {input, Msg}, {output, Decoded}}), + MatchRes = hb_message:match(Msg, Decoded, primary, Opts), + ?event({match_result, MatchRes}), + ?assert(MatchRes), + ?assert(hb_message:verify(Decoded, all, Opts)); +``` + +### bundled_ordering_test + +Ensure that a httpsig@1.0 message which is bundled and requests an + +```erlang +bundled_ordering_test(_Codec, _Opts) -> +``` + +--- + +*Generated from [hb_message_test_vectors.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_message_test_vectors.erl)* diff --git a/docs/book/src/hb_metrics_collector.erl.md b/docs/book/src/hb_metrics_collector.erl.md new file mode 100644 index 000000000..3ca4073fc --- /dev/null +++ b/docs/book/src/hb_metrics_collector.erl.md @@ -0,0 +1,76 @@ +# hb_metrics_collector + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_metrics_collector.erl) + +## Exported Functions + + +--- + +### deregister_cleanup + +```erlang +deregister_cleanup(_) -> ok. +``` + +### collect_mf + +```erlang +collect_mf(_Registry, Callback) -> + {Uptime, _} = erlang:statistics(wall_clock), + Callback( + create_gauge( + process_uptime_seconds, + "The number of seconds the Erlang process has been up.", + Uptime + ) + ), + SystemLoad = cpu_sup:avg5(), + Callback( + create_gauge( + system_load, + "The load values are proportional to how long" + " time a runnable Unix process has to spend in the run queue" + " before it is scheduled. Accordingly, higher values mean" + " more system load", + SystemLoad + ) + ), + ok. +``` + +### collect_metrics + +```erlang +collect_metrics(system_load, SystemLoad) -> + %% Return the gauge metric with no labels + prometheus_model_helpers:gauge_metrics( + [ + {[], SystemLoad} + ] + ); +``` + +### collect_metrics + +```erlang +collect_metrics(process_uptime_seconds, Uptime) -> + %% Convert the uptime from milliseconds to seconds + UptimeSeconds = Uptime / 1000, + %% Return the gauge metric with no labels + prometheus_model_helpers:gauge_metrics( + [ + {[], UptimeSeconds} + ] + ). +``` + +### create_gauge + +```erlang +create_gauge(Name, Help, Data) -> +``` + +--- + +*Generated from [hb_metrics_collector.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_metrics_collector.erl)* diff --git a/docs/book/src/hb_name.erl.md b/docs/book/src/hb_name.erl.md new file mode 100644 index 000000000..11cc02bff --- /dev/null +++ b/docs/book/src/hb_name.erl.md @@ -0,0 +1,292 @@ +# hb_name + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_name.erl) + +An abstraction for name registration/deregistration in HyperBEAM. +Its motivation is to provide a way to register names that are not necessarily +atoms, but can be any term (for example: hashpaths or `process@1.0` IDs). +An important characteristic of these functions is that they are atomic: +There can only ever be one registrant for a given name at a time. + +--- + +## Exported Functions + +- `all/0` +- `lookup/1` +- `register/1` +- `register/2` +- `start/0` +- `unregister/1` + +--- + +### start + +An abstraction for name registration/deregistration in HyperBEAM. + +```erlang +start() -> + try ets:info(?NAME_TABLE) of + undefined -> start_ets(); + _ -> ok + catch + error:badarg -> start_ets() + end. +``` + +### start_ets + +```erlang +start_ets() -> + ets:new(?NAME_TABLE, [ + named_table, + public, + {keypos, 1}, + {write_concurrency, true}, % Safe as key-writes are atomic. +``` + +### register + +Register a name. If the name is already registered, the registration + +```erlang +register(Name) -> + start(), + ?MODULE:register(Name, self()). +``` + +### register + +```erlang +register(Name, Pid) when is_atom(Name) -> + try erlang:register(Name, Pid) of + true -> ok + catch + error:badarg -> error % Name already registered + end; +``` + +### register + +```erlang +register(Name, Pid) -> + start(), + case ets:insert_new(?NAME_TABLE, {Name, Pid}) of + true -> ok; + false -> error + end. +``` + +### unregister + +Unregister a name. + +```erlang +unregister(Name) when is_atom(Name) -> + catch erlang:unregister(Name), + ets:delete(?NAME_TABLE, Name), % Cleanup if atom was in ETS + ok; +``` + +### unregister + +Unregister a name. + +```erlang +unregister(Name) -> + start(), + ets:delete(?NAME_TABLE, Name), + ok. +``` + +### lookup + +Lookup a name -> PID. + +```erlang +lookup(Name) when is_atom(Name) -> + case whereis(Name) of + undefined -> + % Check ETS for atom-based names + start(), + ets_lookup(Name); + Pid -> Pid + end; +``` + +### lookup + +Lookup a name -> PID. + +```erlang +lookup(Name) -> + start(), + ets_lookup(Name). +``` + +### ets_lookup + +```erlang +ets_lookup(Name) -> + case ets:lookup(?NAME_TABLE, Name) of + [{Name, Pid}] -> + case is_process_alive(Pid) of + true -> Pid; + false -> + ets:delete(?NAME_TABLE, Name), + undefined + end; + [] -> undefined + end. +``` + +### all + +List the names in the registry. + +```erlang +all() -> + Registered = + ets:tab2list(?NAME_TABLE) ++ + lists:filtermap( + fun(Name) -> + case whereis(Name) of + undefined -> false; + Pid -> {true, {Name, Pid}} + end + end, + erlang:registered() + ), + lists:filter( + fun({_, Pid}) -> is_process_alive(Pid) end, + Registered + ). +``` + +### basic_test + +```erlang +basic_test(Term) -> + ?assertEqual(ok, hb_name:register(Term)), + ?assertEqual(self(), hb_name:lookup(Term)), + ?assertEqual(error, hb_name:register(Term)), + hb_name:unregister(Term), + ?assertEqual(undefined, hb_name:lookup(Term)). +``` + +### atom_test + +```erlang +atom_test() -> + basic_test(atom). +``` + +### term_test + +```erlang +term_test() -> + basic_test({term, os:timestamp()}). +``` + +### concurrency_test + +```erlang +concurrency_test() -> + Name = {concurrent_test, os:timestamp()}, + SuccessCount = length([R || R <- spawn_test_workers(Name), R =:= ok]), + ?assertEqual(1, SuccessCount), + ?assert(is_pid(hb_name:lookup(Name))), + hb_name:unregister(Name). +``` + +### spawn_test_workers + +```erlang +spawn_test_workers(Name) -> + Self = self(), + Names = + [ + case Name of + random -> {random_name, rand:uniform(1000000)}; + _ -> Name + end + || + _ <- lists:seq(1, ?CONCURRENT_REGISTRATIONS) + ], + Pids = + [ + spawn( + fun() -> + Result = hb_name:register(ProcName), + Self ! {result, self(), Result}, + % Stay alive to prevent cleanup for a period. +``` + +### dead_process_test + +```erlang +dead_process_test() -> + Name = {dead_process_test, os:timestamp()}, + {Pid, Ref} = spawn_monitor(fun() -> hb_name:register(Name), ok end), + receive {'DOWN', Ref, process, Pid, _} -> ok end, + ?assertEqual(undefined, hb_name:lookup(Name)). +``` + +### cleanup_test + +```erlang +cleanup_test() -> + {setup, + fun() -> + Name = {cleanup_test, os:timestamp()}, + {Pid, Ref} = spawn_monitor(fun() -> timer:sleep(1000) end), + ?assertEqual(ok, hb_name:register(Name, Pid)), + {Name, Pid, Ref} + end, + fun({Name, _, _}) -> + hb_name:unregister(Name) + end, + fun({Name, Pid, Ref}) -> + {"Auto-cleanup on process death", + fun() -> + exit(Pid, kill), + receive {'DOWN', Ref, process, Pid, _} -> ok end, + ?assertEqual(undefined, wait_for_cleanup(Name, 10)) + end} + end + }. +``` + +### wait_for_cleanup + +```erlang +wait_for_cleanup(Name, Retries) -> + case Retries > 0 of + true -> + case hb_name:lookup(Name) of + undefined -> undefined; + _ -> + timer:sleep(100), + wait_for_cleanup(Name, Retries - 1) + end; + false -> undefined + end. +``` + +### all_test + +```erlang +all_test() -> + hb_name:register(test_name, self()), + ?assert(lists:member({test_name, self()}, hb_name:all())), + BaseRegistered = length(hb_name:all()), + spawn_test_workers(random), + ?assertEqual(BaseRegistered + ?CONCURRENT_REGISTRATIONS, length(hb_name:all())), + timer:sleep(1000), + ?assertEqual(BaseRegistered, length(hb_name:all())). +``` + +--- + +*Generated from [hb_name.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_name.erl)* diff --git a/docs/book/src/hb_opts.erl.md b/docs/book/src/hb_opts.erl.md new file mode 100644 index 000000000..499686c9b --- /dev/null +++ b/docs/book/src/hb_opts.erl.md @@ -0,0 +1,683 @@ +# hb_opts + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_opts.erl) + +A module for interacting with local and global options inside +HyperBEAM. Options are set globally, but can also be overridden using an +an optional local `Opts` map argument. Many functions across the HyperBEAM +environment accept an `Opts` argument, which can be used to customize +behavior. +Options set in an `Opts` map must _never_ change the behavior of a function +that should otherwise be deterministic. Doing so may lead to loss of funds +by the HyperBEAM node operator, as the results of their executions will be +different than those of other node operators. If they are economically +staked on the correctness of these results, they may experience punishments +for non-verifiable behavior. Instead, if a local node setting makes +deterministic behavior impossible, the caller should fail the execution +with a refusal to execute. + +--- + +## Exported Functions + +- `as/2` +- `check_required_opts/2` +- `default_message_with_env/0` +- `default_message/0` +- `ensure_node_history/2` +- `get/1` +- `get/2` +- `get/3` +- `identities/1` +- `load_bin/2` +- `load/1` +- `load/2` +- `mimic_default_types/3` + +--- + +### default_message_with_env + +A module for interacting with local and global options inside +Return the default message with all environment variables set. + +```erlang +default_message_with_env() -> + maps:fold( + fun(Key, _Spec, NodeMsg) -> + case global_get(Key, undefined, #{}) of + undefined -> NodeMsg; + Value -> NodeMsg#{ Key => Value } + end + end, + default_message(), + ?ENV_KEYS + ). +``` + +### default_message + +The default configuration options of the hyperbeam node. + +```erlang +default_message() -> + #{ + %%%%%%%% Functional options %%%%%%%% + hb_config_location => <<"config.flat">>, + initialized => true, + %% What HTTP client should the node use? + %% Options: gun, httpc + http_client => gun, + %% Scheduling mode: Determines when the SU should inform the recipient + %% that an assignment has been scheduled for a message. +``` + +### get + +Get an option from the global options, optionally overriding with a + +```erlang +get(Key) -> ?MODULE:get(Key, undefined). +``` + +### get + +Get an option from the global options, optionally overriding with a + +```erlang +get(Key, Default) -> ?MODULE:get(Key, Default, #{}). +``` + +### get + +Get an option from the global options, optionally overriding with a + +```erlang +get(Key, Default, Opts) when is_binary(Key) -> + try binary_to_existing_atom(Key, utf8) of + AtomKey -> do_get(AtomKey, Default, Opts) + catch + error:badarg -> do_get(Key, Default, Opts) + end; +``` + +### get + +Get an option from the global options, optionally overriding with a + +```erlang +get(Key, Default, Opts) -> + do_get(Key, Default, Opts). +``` + +### do_get + +```erlang +do_get(Key, Default, Opts = #{ <<"only">> := Only }) -> + do_get(Key, Default, maps:remove(<<"only">>, Opts#{ only => Only })); +``` + +### do_get + +```erlang +do_get(Key, Default, Opts = #{ <<"prefer">> := Prefer }) -> + do_get(Key, Default, maps:remove(<<"prefer">>, Opts#{ prefer => Prefer })); +``` + +### do_get + +```erlang +do_get(Key, Default, Opts = #{ only := local }) -> + case maps:find(Key, Opts) of + {ok, Value} -> Value; + error -> + Default + end; +``` + +### do_get + +```erlang +do_get(Key, Default, Opts = #{ only := global }) -> + case global_get(Key, hb_opts_not_found, Opts) of + hb_opts_not_found -> Default; + Value -> Value + end; +``` + +### do_get + +```erlang +do_get(Key, Default, Opts = #{ prefer := global }) -> + case do_get(Key, hb_opts_not_found, #{ only => global }) of + hb_opts_not_found -> do_get(Key, Default, Opts#{ only => local }); + Value -> Value + end; +``` + +### do_get + +```erlang +do_get(Key, Default, Opts = #{ prefer := local }) -> + case do_get(Key, hb_opts_not_found, Opts#{ only => local }) of + hb_opts_not_found -> + do_get(Key, Default, Opts#{ only => global }); + Value -> Value + end; +``` + +### do_get + +```erlang +do_get(Key, Default, Opts) -> + % No preference was set in Opts, so we default to local. +``` + +### global_get + +Get an environment variable or configuration key. Depending on whether + +```erlang +global_get(Key, Default, Opts) -> + case erlang:get({processed_env, Key}) of + {cached, Value} -> Value; + undefined -> + % Thee value is not cached, so we need to process it. +``` + +### cached_os_env + +Cache the result of os:getenv/1 in the process dictionary, as it never + +```erlang +cached_os_env(Key, DefaultValue) -> + case erlang:get({os_env, Key}) of + {cached, false} -> DefaultValue; + {cached, Value} -> Value; + undefined -> + % The process dictionary returns `undefined' for a key that is not + % set, so we need to check the environment and store the result. +``` + +### normalize_default + +Get an option from environment variables, optionally consulting the + +```erlang +normalize_default({conditional, Feature, IfTest, Else}) -> + case hb_features:enabled(Feature) of + true -> IfTest; + false -> Else + end; +``` + +### normalize_default + +Get an option from environment variables, optionally consulting the +An abstraction for looking up configuration variables. In the future, + +```erlang +normalize_default(Default) -> Default. +``` + +### config_lookup + +Get an option from environment variables, optionally consulting the +An abstraction for looking up configuration variables. In the future, +Parse a `flat@1.0` encoded file into a map, matching the types of the + +```erlang +config_lookup(Key, Default, _Opts) -> maps:get(Key, default_message(), Default). +``` + +### load + +Get an option from environment variables, optionally consulting the +An abstraction for looking up configuration variables. In the future, +Parse a `flat@1.0` encoded file into a map, matching the types of the + +```erlang +load(Path) -> load(Path, #{}). +``` + +### load + +Get an option from environment variables, optionally consulting the +An abstraction for looking up configuration variables. In the future, +Parse a `flat@1.0` encoded file into a map, matching the types of the + +```erlang +load(Path, Opts) -> + {ok, Device} = path_to_device(Path), + case file:read_file(Path) of + {ok, Bin} -> + load_bin(Device, Bin, Opts); + _ -> {error, not_found} + end. +``` + +### path_to_device + +Convert a path to a device from its file extension. If no extension is + +```erlang +path_to_device(Path) -> + case binary:split(hb_util:bin(Path), <<".">>, []) of + [_, Extension] -> + ?event(debug_node_msg, + {path_to_device, + {path, Path}, + {extension, Extension} + } + ), + extension_to_device(Extension); + _ -> {ok, <<"flat@1.0">>} + end. +``` + +### extension_to_device + +Convert a file extension to a device name. + +```erlang +extension_to_device(Ext) -> + extension_to_device(Ext, maps:get(preloaded_devices, default_message())). +``` + +### extension_to_device + +```erlang +extension_to_device(_, []) -> {error, not_found}; +``` + +### extension_to_device + +```erlang +extension_to_device(Ext, [#{ <<"name">> := Name }|Rest]) -> + case binary:match(Name, Ext) of + nomatch -> extension_to_device(Ext, Rest); + {0, _} -> {ok, Name} + end. +``` + +### load_bin + +Parse a given binary with a device (defaulting to `flat@1.0`) into a + +```erlang +load_bin(Bin, Opts) -> + load_bin(<<"flat@1.0">>, Bin, Opts). +``` + +### load_bin + +Parse a given binary with a device (defaulting to `flat@1.0`) into a + +```erlang +load_bin(<<"flat@1.0">>, Bin, Opts) -> + % Trim trailing whitespace from each line in the file. +``` + +### load_bin + +```erlang +load_bin(Device, Bin, Opts) -> + try + { + ok, + mimic_default_types( + hb_cache:ensure_all_loaded( + hb_message:convert(Bin, <<"structured@1.0">>, Device, Opts), + Opts + ), + new_atoms, + Opts + ) + } + catch error:B -> {error, B} + end. +``` + +### mimic_default_types + +Mimic the types of the default message for a given map. + +```erlang +mimic_default_types(Map, Mode, Opts) -> + Default = default_message_with_env(), + hb_maps:from_list(lists:map( + fun({Key, Value}) -> + NewKey = try hb_util:key_to_atom(Key, Mode) catch _:_ -> Key end, + NewValue = + case hb_maps:get(NewKey, Default, not_found, Opts) of + not_found -> Value; + DefaultValue when is_atom(DefaultValue) -> + hb_util:atom(Value); + DefaultValue when is_integer(DefaultValue) -> + hb_util:int(Value); + DefaultValue when is_float(DefaultValue) -> + hb_util:float(Value); + DefaultValue when is_binary(DefaultValue) -> + Value; + _ -> Value + end, + {NewKey, NewValue} + end, + hb_maps:to_list(Map, Opts) + )). +``` + +### as + +Find a given identity from the `identities` map, and return the options + +```erlang +as(Identity, Opts) -> + case identities(Opts) of + #{ Identity := SubOpts } -> + ?event({found_identity_sub_opts_are, SubOpts}), + {ok, maps:merge(Opts, mimic_default_types(SubOpts, new_atoms, Opts))}; + _ -> + {error, not_found} + end. +``` + +### identities + +Find all known IDs and their sub-options from the `priv_ids` map. Allows + +```erlang +identities(Opts) -> + identities(hb:wallet(), Opts). +``` + +### identities + +```erlang +identities(Default, Opts) -> + Named = ?MODULE:get(identities, #{}, Opts), + % Generate an address-based map of identities. +``` + +### check_required_opts + +Utility function to check for required options in a list. + +```erlang +-spec check_required_opts(list({binary(), term()}), map()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +check_required_opts(KeyValuePairs, Opts) -> + MissingOpts = lists:filtermap( + fun({Name, Value}) -> + case Value of + not_found -> {true, Name}; + _ -> false + end + end, + KeyValuePairs + ), + case MissingOpts of + [] -> + {ok, Opts}; + _ -> + MissingOptsStr = binary:list_to_bin( + lists:join(<<", ">>, MissingOpts) + ), + ErrorMsg = <<"Missing required opts: ", MissingOptsStr/binary>>, + {error, ErrorMsg} + end. +``` + +### ensure_node_history + +Ensures all items in a node history meet required configuration options. + +```erlang +-spec ensure_node_history(NodeHistory :: list() | term(), RequiredOpts :: map()) -> + {ok, binary()} | {error, binary()}. +``` + +```erlang +ensure_node_history(Opts, RequiredOpts) -> + ?event(validate_history_items, {required_opts, RequiredOpts}), + maybe + % Get the node history from the options + NodeHistory = hb_opts:get(node_history, [], Opts), + % Add the Opts to the node history to validate all items + NodeHistoryWithOpts = [ Opts | NodeHistory ], + % Normalize required options + NormalizedRequiredOpts ?= hb_ao:normalize_keys(RequiredOpts), + % Normalize all node history items once + NormalizedNodeHistory ?= lists:map( + fun(Item) -> + hb_ao:normalize_keys(Item) + end, + NodeHistoryWithOpts + ), + % Get the first item (complete opts) and remaining items (differences) + [FirstItem | RemainingItems] = NormalizedNodeHistory, + % Step 2: Validate first item values match requirements + FirstItemValuesMatch = hb_message:match(NormalizedRequiredOpts, FirstItem, primary), + true ?= (FirstItemValuesMatch == true) orelse {error, values_invalid}, + % Step 3: Check that remaining items don't modify required keys + NoRequiredKeysModified = lists:all( + fun(HistoryItem) -> + % For each required key, if it exists in this history item, + % it must match the value from the first item + hb_message:match(RequiredOpts, HistoryItem, only_present) + end, + RemainingItems + ), + true ?= NoRequiredKeysModified orelse {error, required_key_modified}, + % If we've made it this far, everything is valid + ?event({validate_node_history_items, all_items_valid}), + {ok, valid} + else + {error, values_invalid} -> + ?event({validate_node_history_items, validation_failed, invalid_values}), + {error, invalid_values}; + {error, required_key_modified} -> + ?event({validate_node_history_items, validation_failed, required_key_modified}), + {error, modified_required_key}; + _ -> + ?event({validate_node_history_items, validation_failed, unknown}), + {error, validation_failed} + end. +``` + +### global_get_test + +```erlang +global_get_test() -> + ?assertEqual(debug, ?MODULE:get(mode)), + ?assertEqual(debug, ?MODULE:get(mode, production)), + ?assertEqual(undefined, ?MODULE:get(unset_global_key)), + ?assertEqual(1234, ?MODULE:get(unset_global_key, 1234)). +``` + +### local_get_test + +```erlang +local_get_test() -> + Local = #{ only => local }, + ?assertEqual(undefined, + ?MODULE:get(test_key, undefined, Local)), + ?assertEqual(correct, + ?MODULE:get(test_key, undefined, Local#{ test_key => correct })). +``` + +### local_preference_test + +```erlang +local_preference_test() -> + Local = #{ prefer => local }, + ?assertEqual(correct, + ?MODULE:get(test_key, undefined, Local#{ test_key => correct })), + ?assertEqual(correct, + ?MODULE:get(mode, undefined, Local#{ mode => correct })), + ?assertNotEqual(undefined, + ?MODULE:get(mode, undefined, Local)). +``` + +### global_preference_test + +```erlang +global_preference_test() -> + Global = #{ prefer => global }, + ?assertEqual(undefined, ?MODULE:get(test_key, undefined, Global)), + ?assertNotEqual(incorrect, + ?MODULE:get(mode, undefined, Global#{ mode => incorrect })), + ?assertNotEqual(undefined, ?MODULE:get(mode, undefined, Global)). +``` + +### load_flat_test + +```erlang +load_flat_test() -> + % File contents: + % port: 1234 + % host: https://ao.computer + % await-inprogress: false + {ok, Conf} = load("test/config.flat", #{}), + ?event({loaded, {explicit, Conf}}), + % Ensure we convert types as expected. +``` + +### load_json_test + +```erlang +load_json_test() -> + {ok, Conf} = load("test/config.json", #{}), + ?event(debug_node_msg, {loaded, Conf}), + ?assertEqual(1234, hb_maps:get(port, Conf)), + ?assertEqual(9001, hb_maps:get(example, Conf)), + % A binary + ?assertEqual(<<"https://ao.computer">>, hb_maps:get(host, Conf)), + % An atom, where the key contained a header-key `-' rather than a `_'. +``` + +### as_identity_test + +```erlang +as_identity_test() -> + DefaultWallet = ar_wallet:new(), + TestWallet1 = ar_wallet:new(), + TestWallet2 = ar_wallet:new(), + TestID2 = hb_util:human_id(TestWallet2), + Opts = #{ + test_key => 0, + priv_wallet => DefaultWallet, + identities => #{ + <<"testname-1">> => #{ + priv_wallet => TestWallet1, + test_key => 1 + }, + TestID2 => #{ + priv_wallet => TestWallet2, + test_key => 2 + } + } + }, + ?event({base_opts, Opts}), + Identities = identities(Opts), + ?event({identities, Identities}), + % The number of identities should be 5: `default`, its ID, `testname-1`, + % and its ID, and just the ID of `TestWallet2`. +``` + +### ensure_node_history_test + +```erlang +ensure_node_history_test() -> + % Define some test data + RequiredOpts = #{ + key1 => + #{ + <<"type">> => <<"string">>, + <<"value">> => <<"value1">> + }, + key2 => <<"value2">> + }, + % Test case: All items have required options + ValidOpts = + #{ + <<"key1">> => + #{ + <<"type">> => <<"string">>, + <<"value">> => <<"value1">> + }, + <<"key2">> => <<"value2">>, + <<"extra">> => <<"value">>, + node_history => [ + #{ + <<"key1">> => + #{ + <<"type">> => <<"string">>, + <<"value">> => <<"value1">> + }, + <<"key2">> => <<"value2">>, + <<"extra">> => <<"value">> + }, + #{ + <<"key1">> => + #{ + <<"type">> => <<"string">>, + <<"value">> => <<"value1">> + }, + <<"key2">> => <<"value2">> + } + ] + }, + ?assertEqual({ok, valid}, ensure_node_history(ValidOpts, RequiredOpts)), + ?event({valid_items, ValidOpts}), + % Test Missing items + MissingItems = + #{ + <<"key1">> => + #{ + <<"type">> => <<"string">>, + <<"value">> => <<"value1">> + }, + node_history => [ + #{ + <<"key1">> => + #{ + <<"type">> => <<"string">>, + <<"value">> => <<"value1">> + } + % missing key2 + } + ] + }, + ?assertEqual({error, invalid_values}, ensure_node_history(MissingItems, RequiredOpts)), + ?event({missing_items, MissingItems}), + % Test Invalid items + InvalidItems = + #{ + <<"key1">> => + #{ + <<"type">> => <<"string">>, + <<"value">> => <<"value">> + }, + <<"key2">> => <<"value2">>, + node_history => + [ + #{ + <<"key1">> => + #{ + <<"type">> => <<"string">>, + <<"value">> => <<"value2">> + }, + <<"key2">> => <<"value3">> + } + ] + }, + ?assertEqual({error, invalid_values}, ensure_node_history(InvalidItems, RequiredOpts)). +``` + +--- + +*Generated from [hb_opts.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_opts.erl)* diff --git a/docs/book/src/hb_path.erl.md b/docs/book/src/hb_path.erl.md new file mode 100644 index 000000000..dfbb24428 --- /dev/null +++ b/docs/book/src/hb_path.erl.md @@ -0,0 +1,695 @@ +# hb_path + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_path.erl) + +This module provides utilities for manipulating the paths of a +message: Its request path (referred to in messages as just the `Path`), and +its HashPath. +A HashPath is a rolling Merkle list of the messages that have been applied +in order to generate a given message. Because applied messages can +themselves be the result of message applications with the AO-Core protocol, +the HashPath can be thought of as the tree of messages that represent the +history of a given message. The initial message on a HashPath is referred to +by its ID and serves as its user-generated 'root'. +Specifically, the HashPath can be generated by hashing the previous HashPath +and the current message. This means that each message in the HashPath is +dependent on all previous messages. +
+    Msg1.HashPath = Msg1.ID
+    Msg3.HashPath = Msg1.Hash(Msg1.HashPath, Msg2.ID)
+    Msg3.{...} = AO-Core.apply(Msg1, Msg2)
+    ...
+
+A message's ID itself includes its HashPath, leading to the mixing of +a Msg2's merkle list into the resulting Msg3's HashPath. This allows a single +message to represent a history _tree_ of all of the messages that were +applied to generate it -- rather than just a linear history. +A message may also specify its own algorithm for generating its HashPath, +which allows for custom logic to be used for representing the history of a +message. When Msg2's are applied to a Msg1, the resulting Msg3's HashPath +will be generated according to Msg1's algorithm choice. + +--- + +## Exported Functions + +- `from_message/3` +- `hashpath_alg/2` +- `hashpath/2` +- `hashpath/3` +- `hashpath/4` +- `hd/2` +- `matches/2` +- `normalize/1` +- `pop_request/2` +- `priv_remaining/2` +- `priv_store_remaining/2` +- `priv_store_remaining/3` +- `push_request/2` +- `push_request/3` +- `queue_request/2` +- `queue_request/3` +- `regex_matches/2` +- `term_to_path_parts/1` +- `term_to_path_parts/2` +- `tl/2` +- `to_binary/1` +- `verify_hashpath/2` + +--- + +### hd + +This module provides utilities for manipulating the paths of a +Extract the first key from a `Message2`'s `Path` field. + +```erlang +hd(Msg2, Opts) -> + %?event({key_from_path, Msg2, Opts}), + case pop_request(Msg2, Opts) of + undefined -> undefined; + {Head, _} -> + % `term_to_path' returns the full path, so we need to take the + % `hd' of our `Head'. +``` + +### tl + +Return the message without its first path element. Note that this + +```erlang +tl(Msg2, Opts) when is_map(Msg2) -> + case pop_request(Msg2, Opts) of + undefined -> undefined; + {_, Rest} -> Rest + end; +``` + +### tl + +Return the message without its first path element. Note that this + +```erlang +tl(Path, Opts) when is_list(Path) -> + case tl(#{ <<"path">> => Path }, Opts) of + [] -> undefined; + undefined -> undefined; + #{ <<"path">> := Rest } -> Rest + end. +``` + +### priv_remaining + +Return the `Remaining-Path` of a message, from its hidden `AO-Core` +Store the remaining path of a message in its hidden `AO-Core` key. + +```erlang +priv_remaining(Msg, Opts) -> + Priv = hb_private:from_message(Msg), + AOCore = hb_maps:get(<<"ao-core">>, Priv, #{}, Opts), + hb_maps:get(<<"remaining">>, AOCore, undefined, Opts). +``` + +### priv_store_remaining + +Return the `Remaining-Path` of a message, from its hidden `AO-Core` +Store the remaining path of a message in its hidden `AO-Core` key. + +```erlang +priv_store_remaining(Msg, RemainingPath) -> + priv_store_remaining(Msg, RemainingPath, #{}). +``` + +### priv_store_remaining + +```erlang +priv_store_remaining(Msg, RemainingPath, Opts) -> + Priv = hb_private:from_message(Msg), + AOCore = hb_maps:get(<<"ao-core">>, Priv, #{}, Opts), + Msg#{ + <<"priv">> => + Priv#{ + <<"ao-core">> => + AOCore#{ + <<"remaining">> => RemainingPath + } + } + }. +``` + +### hashpath + +Add an ID of a Msg2 to the HashPath of another message. + +```erlang +hashpath(Bin, _Opts) when is_binary(Bin) -> + % Default hashpath for a binary message is its SHA2-256 hash. +``` + +### hashpath + +```erlang +hashpath(RawMsg1, Opts) -> + Msg1 = hb_ao:normalize_keys(RawMsg1, Opts), + case hb_private:from_message(Msg1) of + #{ <<"hashpath">> := HP } -> HP; + _ -> + % Note: We do not use `hb_message:id' here because it will call + % hb_ao:resolve, which will call `hashpath' recursively. +``` + +### hashpath + +```erlang +hashpath(Msg1, Msg2, Opts) when is_map(Msg1) -> + Msg1Hashpath = hashpath(Msg1, Opts), + HashpathAlg = hashpath_alg(Msg1, Opts), + hashpath(Msg1Hashpath, Msg2, HashpathAlg, Opts); +``` + +### hashpath + +```erlang +hashpath(Msg1, Msg2, Opts) -> + throw({hashpath_not_viable, Msg1, Msg2, Opts}). +``` + +### hashpath + +```erlang +hashpath(Msg1, Msg2, HashpathAlg, Opts) when is_map(Msg2) -> + Msg2WithoutMeta = hb_maps:without(?AO_CORE_KEYS, Msg2, Opts), + ReqPath = from_message(request, Msg2, Opts), + case {map_size(Msg2WithoutMeta), ReqPath} of + {0, _} when ReqPath =/= undefined -> + hashpath(Msg1, to_binary(hd(ReqPath)), HashpathAlg, Opts); + _ -> + {ok, Msg2ID} = + dev_message:id( + Msg2, + #{ <<"commitments">> => <<"all">> }, + Opts + ), + hashpath(Msg1, hb_util:human_id(Msg2ID), HashpathAlg, Opts) + end; +``` + +### hashpath + +```erlang +hashpath(Msg1Hashpath, HumanMsg2ID, HashpathAlg, Opts) -> + ?event({hashpath, {msg1hp, {explicit, Msg1Hashpath}}, {msg2id, {explicit, HumanMsg2ID}}}), + HP = + case term_to_path_parts(Msg1Hashpath, Opts) of + [_] -> + << Msg1Hashpath/binary, "/", HumanMsg2ID/binary >>; + [Prev1, Prev2] -> + % Calculate the new base of the hashpath. We check whether the key is + % a human-readable binary ID, or a path part, and convert or pass + % through accordingly. +``` + +### hashpath_alg + +Get the hashpath function for a message from its HashPath-Alg. + +```erlang +hashpath_alg(Msg, Opts) -> + case dev_message:get(<<"hashpath-alg">>, Msg, Opts) of + {ok, <<"sha-256-chain">>} -> + fun hb_crypto:sha256_chain/2; + {ok, <<"accumulate-256">>} -> + fun hb_crypto:accumulate/2; + {error, not_found} -> + fun hb_crypto:sha256_chain/2 + end. +``` + +### push_request + +Add a message to the head (next to execute) of a request path. + +```erlang +push_request(Msg, Path) -> + push_request(Msg, Path, #{}). +``` + +### push_request + +Pop the next element from a request path or path list. + +```erlang +push_request(Msg, Path, Opts) -> + hb_maps:put(<<"path">>, term_to_path_parts(Path, Opts) ++ from_message(request, Msg, Opts), Msg, Opts). +``` + +### pop_request + +Pop the next element from a request path or path list. + +```erlang +pop_request(undefined, _Opts) -> undefined; +``` + +### pop_request + +Pop the next element from a request path or path list. + +```erlang +pop_request(Msg, Opts) when is_map(Msg) -> + %?event({popping_request, {msg, Msg}, {opts, Opts}}), + case pop_request(from_message(request, Msg, Opts), Opts) of + undefined -> undefined; + {undefined, _} -> undefined; + {Head, []} -> {Head, undefined}; + {Head, Rest} -> + ?event({popped_request, Head, Rest}), + {Head, hb_maps:put(<<"path">>, Rest, Msg, Opts)} + end; +``` + +### pop_request + +Pop the next element from a request path or path list. + +```erlang +pop_request([], _Opts) -> undefined; +``` + +### pop_request + +Pop the next element from a request path or path list. + +```erlang +pop_request([Head|Rest], _Opts) -> + {Head, Rest}. +``` + +### queue_request + +Queue a message at the back of a request path. `path` is the only + +```erlang +queue_request(Msg, Path) -> + queue_request(Msg, Path, #{}). +``` + +### queue_request + +Verify the HashPath of a message, given a list of messages that + +```erlang +queue_request(Msg, Path, Opts) -> + hb_maps:put(<<"path">>, from_message(request, Msg, Opts) ++ term_to_path_parts(Path), Msg, Opts). +``` + +### verify_hashpath + +Verify the HashPath of a message, given a list of messages that + +```erlang +verify_hashpath([Msg1, Msg2, Msg3|Rest], Opts) -> + CorrectHashpath = hashpath(Msg1, Msg2, Opts), + FromMsg3 = from_message(hashpath, Msg3, Opts), + CorrectHashpath == FromMsg3 andalso + case Rest of + [] -> true; + _ -> verify_hashpath([Msg2, Msg3|Rest], Opts) + end. +``` + +### from_message + +Extract the request path or hashpath from a message. We do not use + +```erlang +from_message(Type, Link, Opts) when ?IS_LINK(Link) -> + from_message(Type, hb_cache:ensure_loaded(Link, Opts), Opts); +``` + +### from_message + +Extract the request path or hashpath from a message. We do not use + +```erlang +from_message(hashpath, Msg, Opts) -> hashpath(Msg, Opts); +``` + +### from_message + +Extract the request path or hashpath from a message. We do not use + +```erlang +from_message(request, #{ path := Path }, Opts) -> term_to_path_parts(Path, Opts); +``` + +### from_message + +Extract the request path or hashpath from a message. We do not use + +```erlang +from_message(request, #{ <<"path">> := Path }, Opts) -> term_to_path_parts(Path, Opts); +``` + +### from_message + +Extract the request path or hashpath from a message. We do not use + +```erlang +from_message(request, #{ <<"Path">> := Path }, Opts) -> term_to_path_parts(Path, Opts); +``` + +### from_message + +Extract the request path or hashpath from a message. We do not use +Convert a term into an executable path. Supports binaries, lists, and + +```erlang +from_message(request, _, _Opts) -> undefined. +``` + +### term_to_path_parts + +Extract the request path or hashpath from a message. We do not use +Convert a term into an executable path. Supports binaries, lists, and + +```erlang +term_to_path_parts(Path) -> + term_to_path_parts(Path, #{ error_strategy => throw }). +``` + +### term_to_path_parts + +```erlang +term_to_path_parts(Link, Opts) when ?IS_LINK(Link) -> + term_to_path_parts(hb_cache:ensure_loaded(Link, Opts), Opts); +``` + +### term_to_path_parts + +```erlang +term_to_path_parts([], _Opts) -> undefined; +``` + +### term_to_path_parts + +```erlang +term_to_path_parts(<<>>, _Opts) -> undefined; +``` + +### term_to_path_parts + +```erlang +term_to_path_parts(<<"/">>, _Opts) -> []; +``` + +### term_to_path_parts + +```erlang +term_to_path_parts(Binary, Opts) when is_binary(Binary) -> + case binary:match(Binary, <<"/">>) of + nomatch -> [Binary]; + _ -> + term_to_path_parts( + binary:split(Binary, <<"/">>, [global, trim_all]), + Opts + ) + end; +``` + +### term_to_path_parts + +```erlang +term_to_path_parts(Path = [ASCII | _], _Opts) when is_integer(ASCII) -> + [hb_ao:normalize_key(Path)]; +``` + +### term_to_path_parts + +```erlang +term_to_path_parts(List, Opts) when is_list(List) -> + lists:flatten(lists:map( + fun(Part) -> + term_to_path_parts(Part, Opts) + end, + List + )); +``` + +### term_to_path_parts + +```erlang +term_to_path_parts(Atom, _Opts) when is_atom(Atom) -> [Atom]; +``` + +### term_to_path_parts + +```erlang +term_to_path_parts(Integer, _Opts) when is_integer(Integer) -> + [hb_ao:normalize_key(Integer)]; +``` + +### term_to_path_parts + +```erlang +term_to_path_parts({as, DevName, Msgs}, _Opts) -> + [{as, hb_ao:normalize_key(DevName), Msgs}]. +``` + +### to_binary + +Convert a path of any form to a binary. + +```erlang +to_binary(Path) -> + Parts = binary:split(do_to_binary(Path), <<"/">>, [global, trim_all]), + iolist_to_binary(lists:join(<<"/">>, Parts)). +``` + +### do_to_binary + +Convert a path of any form to a binary. + +```erlang +do_to_binary(Path) when is_list(Path) -> + case hb_util:is_string_list(Path) of + false -> + iolist_to_binary( + lists:join( + "/", + lists:filtermap( + fun(Part) -> + case do_to_binary(Part) of + <<>> -> false; + BinPart -> {true, BinPart} + end + end, + Path + ) + ) + ); + true -> + to_binary(list_to_binary(Path)) + end; +``` + +### do_to_binary + +Convert a path of any form to a binary. + +```erlang +do_to_binary(Path) when is_binary(Path) -> + Path; +``` + +### do_to_binary + +Convert a path of any form to a binary. + +```erlang +do_to_binary(Other) -> + hb_ao:normalize_key(Other). +``` + +### matches + +Check if two keys match. + +```erlang +matches(Key1, Key2) -> + hb_util:to_lower(hb_ao:normalize_key(Key1)) == + hb_util:to_lower(hb_ao:normalize_key(Key2)). +``` + +### regex_matches + +Check if two keys match using regex. + +```erlang +regex_matches(Path1, Path2) -> + NormP1 = normalize(hb_ao:normalize_key(Path1)), + NormP2 = + case hb_ao:normalize_key(Path2) of + Normalized = <<"^", _/binary>> -> Normalized; + Normalized -> normalize(Normalized) + end, + try re:run(NormP1, NormP2) =/= nomatch + catch _A:_B:_C -> false + end. +``` + +### normalize + +Normalize a path to a binary, removing the leading slash if present. + +```erlang +normalize(Path) -> + case iolist_to_binary([Path]) of + BinPath = <<"/", _/binary>> -> BinPath; + Binary -> <<"/", Binary/binary>> + end. +``` + +### hashpath_test + +```erlang +hashpath_test() -> + Msg1 = #{ priv => #{<<"empty">> => <<"message">>} }, + Msg2 = #{ priv => #{<<"exciting">> => <<"message2">>} }, + Hashpath = hashpath(Msg1, Msg2, #{}), + ?assert(is_binary(Hashpath) andalso byte_size(Hashpath) == 87). +``` + +### hashpath_direct_msg2_test + +```erlang +hashpath_direct_msg2_test() -> + Msg1 = #{ <<"base">> => <<"message">> }, + Msg2 = #{ <<"path">> => <<"base">> }, + Hashpath = hashpath(Msg1, Msg2, #{}), + [_, KeyName] = term_to_path_parts(Hashpath), + ?assert(matches(KeyName, <<"base">>)). +``` + +### multiple_hashpaths_test + +```erlang +multiple_hashpaths_test() -> + Msg1 = #{ <<"empty">> => <<"message">> }, + Msg2 = #{ <<"exciting">> => <<"message2">> }, + Msg3 = #{ priv => #{<<"hashpath">> => hashpath(Msg1, Msg2, #{}) } }, + Msg4 = #{ <<"exciting">> => <<"message4">> }, + Msg5 = hashpath(Msg3, Msg4, #{}), + ?assert(is_binary(Msg5)). +``` + +### verify_hashpath_test + +```erlang +verify_hashpath_test() -> + Msg1 = #{ <<"test">> => <<"initial">> }, + Msg2 = #{ <<"firstapplied">> => <<"msg2">> }, + Msg3 = #{ priv => #{<<"hashpath">> => hashpath(Msg1, Msg2, #{})} }, + Msg4 = #{ priv => #{<<"hashpath">> => hashpath(Msg2, Msg3, #{})} }, + Msg3Fake = #{ priv => #{<<"hashpath">> => hashpath(Msg4, Msg2, #{})} }, + ?assert(verify_hashpath([Msg1, Msg2, Msg3, Msg4], #{})), + ?assertNot(verify_hashpath([Msg1, Msg2, Msg3Fake, Msg4], #{})). +``` + +### validate_path_transitions + +```erlang +validate_path_transitions(X, Opts) -> + {Head, X2} = pop_request(X, Opts), + ?assertEqual(<<"a">>, Head), + {H2, X3} = pop_request(X2, Opts), + ?assertEqual(<<"b">>, H2), + {H3, X4} = pop_request(X3, Opts), + ?assertEqual(<<"c">>, H3), + ?assertEqual(undefined, pop_request(X4, Opts)). +``` + +### pop_from_message_test + +```erlang +pop_from_message_test() -> + validate_path_transitions(#{ <<"path">> => [<<"a">>, <<"b">>, <<"c">>] }, #{}). +``` + +### pop_from_path_list_test + +```erlang +pop_from_path_list_test() -> + validate_path_transitions([<<"a">>, <<"b">>, <<"c">>], #{}). +``` + +### hd_test + +```erlang +hd_test() -> + ?assertEqual(<<"a">>, hd(#{ <<"path">> => [<<"a">>, <<"b">>, <<"c">>] }, #{})), + ?assertEqual(undefined, hd(#{ <<"path">> => undefined }, #{})). +``` + +### tl_test + +```erlang +tl_test() -> + ?assertMatch([<<"b">>, <<"c">>], hb_maps:get(<<"path">>, tl(#{ <<"path">> => [<<"a">>, <<"b">>, <<"c">>] }, #{}))), + ?assertEqual(undefined, tl(#{ <<"path">> => [] }, #{})), + ?assertEqual(undefined, tl(#{ <<"path">> => <<"a">> }, #{})), + ?assertEqual(undefined, tl(#{ <<"path">> => undefined }, #{})), + ?assertEqual([<<"b">>, <<"c">>], tl([<<"a">>, <<"b">>, <<"c">>], #{ })), + ?assertEqual(undefined, tl([<<"c">>], #{ })). +``` + +### to_binary_test + +```erlang +to_binary_test() -> + ?assertEqual(<<"a/b/c">>, to_binary([<<"a">>, <<"b">>, <<"c">>])), + ?assertEqual(<<"a/b/c">>, to_binary(<<"a/b/c">>)), + ?assertEqual(<<"a/b/c">>, to_binary([<<"a">>, <<"b">>, <<"c">>])), + ?assertEqual(<<"a/b/c">>, to_binary(["a", <<"b">>, <<"c">>])), + ?assertEqual(<<"a/b/b/c">>, to_binary([<<"a">>, [<<"b">>, <<"//b">>], <<"c">>])). +``` + +### term_to_path_parts_test + +```erlang +term_to_path_parts_test() -> + ?assert(matches([<<"a">>, <<"b">>, <<"c">>], + term_to_path_parts(<<"a/b/c">>))), + ?assert(matches([<<"a">>, <<"b">>, <<"c">>], + term_to_path_parts([<<"a">>, <<"b">>, <<"c">>]))), + ?assert(matches([<<"a">>, <<"b">>, <<"c">>], + term_to_path_parts(["a", <<"b">>, <<"c">>]))), + ?assert(matches([<<"a">>, <<"b">>, <<"b">>, <<"c">>], + term_to_path_parts([[<<"/a">>, [<<"b">>, <<"//b">>], <<"c">>]]))), + ?assertEqual([], term_to_path_parts(<<"/">>)). +% calculate_multistage_hashpath_test() -> +% Msg1 = #{ <<"base">> => <<"message">> }, +% Msg2 = #{ <<"path">> => <<"2">> }, +% Msg3 = #{ <<"path">> => <<"3">> }, +% Msg4 = #{ <<"path">> => <<"4">> }, +% Msg5 = hashpath(Msg1, [Msg2, Msg3, Msg4], #{}), +% ?assert(is_binary(Msg5)), +% Msg3Path = <<"3">>, +% Msg5b = hashpath(Msg1, [Msg2, Msg3Path, Msg4]), +% ?assertEqual(Msg5, Msg5b). +``` + +### regex_matches_test + +```erlang +regex_matches_test() -> + ?assert(regex_matches(<<"a/b/c">>, <<"a/.*/c">>)), + ?assertNot(regex_matches(<<"a/b/c">>, <<"a/.*/d">>)), + ?assert(regex_matches(<<"a/abcd/c">>, <<"a/abc.*/c">>)), + ?assertNot(regex_matches(<<"a/bcd/c">>, <<"a/abc.*/c">>)), + ?assert(regex_matches(<<"a/bcd/ignored/c">>, <<"a/.*/c">>)), +``` + +--- + +*Generated from [hb_path.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_path.erl)* diff --git a/docs/book/src/hb_persistent.erl.md b/docs/book/src/hb_persistent.erl.md new file mode 100644 index 000000000..16e4bcf83 --- /dev/null +++ b/docs/book/src/hb_persistent.erl.md @@ -0,0 +1,585 @@ +# hb_persistent + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_persistent.erl) + +Creates and manages long-lived AO-Core resolution processes. +These can be useful for situations where a message is large and expensive +to serialize and deserialize, or when executions should be deliberately +serialized to avoid parallel executions of the same computation. This +module is called during the core `hb_ao` execution process, so care +must be taken to avoid recursive spawns/loops. +Built using the `pg` module, which is a distributed Erlang process group +manager. + +--- + +## Exported Functions + +- `await/4` +- `default_await/5` +- `default_grouper/3` +- `default_worker/3` +- `find_or_register/3` +- `forward_work/2` +- `group/3` +- `notify/4` +- `start_monitor/0` +- `start_monitor/1` +- `start_worker/2` +- `start_worker/3` +- `stop_monitor/1` +- `unregister_notify/4` + +--- + +### start + +Creates and manages long-lived AO-Core resolution processes. +Ensure that the `pg` module is started. +Start a monitor that prints the current members of the group every + +```erlang +start() -> hb_name:start(). +``` + +### start_monitor + +Creates and manages long-lived AO-Core resolution processes. +Ensure that the `pg` module is started. +Start a monitor that prints the current members of the group every + +```erlang +start_monitor() -> + start_monitor(global). +``` + +### start_monitor + +```erlang +start_monitor(Group) -> + start_monitor(Group, #{}). +``` + +### start_monitor + +```erlang +start_monitor(Group, Opts) -> + start(), + ?event({worker_monitor, {start_monitor, Group, hb_name:all()}}), + spawn(fun() -> do_monitor(Group, #{}, Opts) end). +``` + +### stop_monitor + +```erlang +stop_monitor(PID) -> + PID ! stop. +``` + +### do_monitor + +```erlang +do_monitor(Group, Last, Opts) -> + Groups = lists:map(fun({Name, _}) -> Name end, hb_name:all()), + New = + hb_maps:from_list( + lists:map( + fun(G) -> + Pid = hb_name:lookup(G), + { + G, + #{ + pid => Pid, + messages => + case Pid of + undefined -> 0; + _ -> + length( + element(2, + erlang:process_info(Pid, messages) + ) + ) + end + } + } + end, + case Group of + global -> Groups; + TargetGroup -> + case lists:member(TargetGroup, Groups) of + true -> [TargetGroup]; + false -> [] + end + end + ) + ), + Delta = + hb_maps:filter( + fun(G, NewState) -> + case hb_maps:get(G, Last, []) of + NewState -> false; + _ -> true + end + end, + New, + Opts + ), + case hb_maps:size(Delta, Opts) of + 0 -> ok; + Deltas -> + io:format(standard_error, "== Sitrep ==> ~p named processes. ~p changes. ~n", + [hb_maps:size(New, Opts), Deltas]), + hb_maps:map( + fun(G, #{pid := P, messages := Msgs}) -> + io:format(standard_error, "[~p: ~p] #M: ~p~n", [G, P, Msgs]) + end, + Delta, + Opts + ), + io:format(standard_error, "~n", []) + end, + timer:sleep(1000), + receive stop -> stopped + after 0 -> do_monitor(Group, New, Opts) + end. +``` + +### find_or_register + +Register the process to lead an execution if none is found, otherwise + +```erlang +find_or_register(Msg1, Msg2, Opts) -> + GroupName = group(Msg1, Msg2, Opts), + find_or_register(GroupName, Msg1, Msg2, Opts). +``` + +### find_or_register + +```erlang +find_or_register(ungrouped_exec, _Msg1, _Msg2, _Opts) -> + {leader, ungrouped_exec}; +``` + +### find_or_register + +```erlang +find_or_register(GroupName, _Msg1, _Msg2, Opts) -> + case hb_opts:get(await_inprogress, false, Opts) of + false -> {leader, GroupName}; + _ -> + Self = self(), + case find_execution(GroupName, Opts) of + {ok, Leader} when Leader =/= Self -> + ?event({found_leader, GroupName, {leader, Leader}}), + {wait, Leader}; + {ok, Leader} when Leader =:= Self -> + {infinite_recursion, GroupName}; + _ -> + ?event({register_resolver, {group, GroupName}}), + register_groupname(GroupName, Opts), + {leader, GroupName} + end + end. +``` + +### unregister_notify + +Unregister as the leader for an execution and notify waiting processes. + +```erlang +unregister_notify(ungrouped_exec, _Msg2, _Msg3, _Opts) -> ok; +``` + +### unregister_notify + +Unregister as the leader for an execution and notify waiting processes. + +```erlang +unregister_notify(GroupName, Msg2, Msg3, Opts) -> + unregister_groupname(GroupName, Opts), + notify(GroupName, Msg2, Msg3, Opts). +``` + +### find_execution + +Find a group with the given name. + +```erlang +find_execution(Groupname, _Opts) -> + start(), + case hb_name:lookup(Groupname) of + undefined -> not_found; + Pid -> {ok, Pid} + end. +``` + +### group + +Calculate the group name for a Msg1 and Msg2 pair. Uses the Msg1's + +```erlang +group(Msg1, Msg2, Opts) -> + Grouper = + hb_maps:get(grouper, hb_ao:info(Msg1, Opts), fun default_grouper/3, Opts), + apply( + Grouper, + hb_ao:truncate_args(Grouper, [Msg1, Msg2, Opts]) + ). +``` + +### register_groupname + +Register for performing an AO-Core resolution. + +```erlang +register_groupname(Groupname, _Opts) -> + ?event({registering_as, Groupname}), + hb_name:register(Groupname). +``` + +### unregister + +Unregister for being the leader on an AO-Core resolution. + +```erlang +unregister(Msg1, Msg2, Opts) -> + start(), + unregister_groupname(group(Msg1, Msg2, Opts), Opts). +``` + +### unregister_groupname + +```erlang +unregister_groupname(Groupname, _Opts) -> + ?event({unregister_resolver, {explicit, Groupname}}), + hb_name:unregister(Groupname). +``` + +### await + +If there was already an Erlang process handling this execution, + +```erlang +await(Worker, Msg1, Msg2, Opts) -> + % Get the device's await function, if it exists. +``` + +### default_await + +Default await function that waits for a resolution from a worker. + +```erlang +default_await(Worker, GroupName, Msg1, Msg2, Opts) -> + % Wait for the result. +``` + +### notify + +Check our inbox for processes that are waiting for the resolution + +```erlang +notify(GroupName, Msg2, Msg3, Opts) -> + case is_binary(GroupName) of + true -> + ?event({notifying_all, {group, GroupName}}); + false -> + ok + end, + receive + {resolve, Listener, GroupName, Msg2, _ListenerOpts} -> + ?event({notifying_listener, {listener, Listener}, {group, GroupName}}), + send_response(Listener, GroupName, Msg2, Msg3), + notify(GroupName, Msg2, Msg3, Opts) + after 0 -> + ?event(finished_notify), + ok + end. +``` + +### forward_work + +Forward requests to a newly delegated execution process. + +```erlang +forward_work(NewPID, Opts) -> + Gather = + fun Gather() -> + receive + Req = {resolve, _, _, _, _} -> [Req | Gather()] + after 0 -> [] + end + end, + ToForward = Gather(), + lists:foreach( + fun(Req) -> + NewPID ! Req + end, + ToForward + ), + case length(ToForward) > 0 of + true -> + ?event({fwded, {reqs, length(ToForward)}, {pid, NewPID}}, Opts); + false -> ok + end, + ok. +``` + +### send_response + +Helper function that wraps responding with a new Msg3. + +```erlang +send_response(Listener, GroupName, Msg2, Msg3) -> + ?event(worker, + {send_response, + {listener, Listener}, + {group, GroupName} + } + ), + Listener ! {resolved, self(), GroupName, Msg2, Msg3}. +``` + +### start_worker + +Start a worker process that will hold a message in memory for + +```erlang +start_worker(Msg, Opts) -> + start_worker(group(Msg, undefined, Opts), Msg, Opts). +``` + +### start_worker + +```erlang +start_worker(_, NotMsg, _) when not is_map(NotMsg) -> not_started; +``` + +### start_worker + +```erlang +start_worker(GroupName, Msg, Opts) -> + start(), + ?event(worker_spawns, + {starting_worker, {group, GroupName}, {msg, Msg}, {opts, Opts}} + ), + WorkerPID = spawn( + fun() -> + % If the device's info contains a `worker' function we + % use that instead of the default implementation. +``` + +### default_worker + +A server function for handling persistent executions. + +```erlang +default_worker(GroupName, Msg1, Opts) -> + Timeout = hb_opts:get(worker_timeout, 10000, Opts), + worker_event(GroupName, default_worker_waiting_for_req, Msg1, undefined, Opts), + receive + {resolve, Listener, GroupName, Msg2, ListenerOpts} -> + ?event(worker, + {work_received, + {listener, Listener}, + {group, GroupName} + } + ), + Res = + hb_ao:resolve( + Msg1, + Msg2, + hb_maps:merge(ListenerOpts, Opts, Opts) + ), + send_response(Listener, GroupName, Msg2, Res), + notify(GroupName, Msg2, Res, Opts), + case hb_opts:get(static_worker, false, Opts) of + true -> + % Reregister for the existing group name. +``` + +### default_grouper + +Create a group name from a Msg1 and Msg2 pair as a tuple. + +```erlang +default_grouper(Msg1, Msg2, Opts) -> + %?event({calculating_default_group_name, {msg1, Msg1}, {msg2, Msg2}}), + % Use Erlang's `phash2' to hash the result of the Grouper function. +``` + +### worker_event + +Log an event with the worker process. If we used the default grouper + +```erlang +worker_event(Group, Data, Msg1, Msg2, Opts) when is_integer(Group) -> + ?event(worker, {worker_event, Group, Data, {msg1, Msg1}, {msg2, Msg2}}, Opts); +``` + +### worker_event + +Log an event with the worker process. If we used the default grouper + +```erlang +worker_event(Group, Data, _, _, Opts) -> + ?event(worker, {worker_event, Group, Data}, Opts). +``` + +### test_device + +```erlang +test_device() -> test_device(#{}). +``` + +### test_device + +```erlang +test_device(Base) -> + #{ + info => + fun() -> + hb_maps:merge( + #{ + grouper => + fun(M1, _M2, _Opts) -> + erlang:phash2(M1) + end + }, + Base + ) + end, + slow_key => + fun(_, #{ <<"wait">> := Wait }) -> + ?event({slow_key_wait_started, Wait}), + receive after Wait -> + {ok, + #{ + waited => Wait, + pid => self(), + random_bytes => + hb_util:encode(crypto:strong_rand_bytes(4)) + } + } + end + end, + self => + fun(M1, #{ <<"wait">> := Wait }) -> + ?event({self_waiting, {wait, Wait}}), + receive after Wait -> + ?event({self_returning, M1, {wait, Wait}}), + {ok, M1} + end + end + }. +``` + +### spawn_test_client + +```erlang +spawn_test_client(Msg1, Msg2) -> + spawn_test_client(Msg1, Msg2, #{}). +``` + +### spawn_test_client + +```erlang +spawn_test_client(Msg1, Msg2, Opts) -> + Ref = make_ref(), + TestParent = self(), + spawn_link(fun() -> + ?event({new_concurrent_test_resolver, Ref, {executing, Msg2}}), + Res = hb_ao:resolve(Msg1, Msg2, Opts), + ?event({test_worker_got_result, Ref, {result, Res}}), + TestParent ! {result, Ref, Res} + end), + Ref. +``` + +### wait_for_test_result + +```erlang +wait_for_test_result(Ref) -> + receive {result, Ref, Res} -> Res end. +``` + +### deduplicated_execution_test + +Test merging and returning a value with a persistent worker. + +```erlang +deduplicated_execution_test() -> + TestTime = 200, + Msg1 = #{ <<"device">> => test_device() }, + Msg2 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => TestTime }, + T0 = hb:now(), + Ref1 = spawn_test_client(Msg1, Msg2), + receive after 100 -> ok end, + Ref2 = spawn_test_client(Msg1, Msg2), + Res1 = wait_for_test_result(Ref1), + Res2 = wait_for_test_result(Ref2), + T1 = hb:now(), + % Check the result is the same. +``` + +### persistent_worker_test + +Test spawning a default persistent worker. + +```erlang +persistent_worker_test() -> + TestTime = 200, + Msg1 = #{ <<"device">> => test_device() }, + link(start_worker(Msg1, #{ static_worker => true })), + receive after 10 -> ok end, + Msg2 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => TestTime }, + Msg3 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => trunc(TestTime*1.1) }, + Msg4 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => trunc(TestTime*1.2) }, + T0 = hb:now(), + Ref1 = spawn_test_client(Msg1, Msg2), + Ref2 = spawn_test_client(Msg1, Msg3), + Ref3 = spawn_test_client(Msg1, Msg4), + Res1 = wait_for_test_result(Ref1), + Res2 = wait_for_test_result(Ref2), + Res3 = wait_for_test_result(Ref3), + T1 = hb:now(), + ?assertNotEqual(Res1, Res2), + ?assertNotEqual(Res2, Res3), + ?assert(T1 - T0 >= (3*TestTime)). +``` + +### spawn_after_execution_test + +```erlang +spawn_after_execution_test() -> + ?event(<<"">>), + TestTime = 500, + Msg1 = #{ <<"device">> => test_device() }, + Msg2 = #{ <<"path">> => <<"self">>, <<"wait">> => TestTime }, + Msg3 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => trunc(TestTime*1.1) }, + Msg4 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => trunc(TestTime*1.2) }, + T0 = hb:now(), + Ref1 = + spawn_test_client( + Msg1, + Msg2, + #{ + spawn_worker => true, + static_worker => true, + hashpath => ignore + } + ), + receive after 10 -> ok end, + Ref2 = spawn_test_client(Msg1, Msg3), + Ref3 = spawn_test_client(Msg1, Msg4), + Res1 = wait_for_test_result(Ref1), + Res2 = wait_for_test_result(Ref2), + Res3 = wait_for_test_result(Ref3), + T1 = hb:now(), + ?assertNotEqual(Res1, Res2), + ?assertNotEqual(Res2, Res3), +``` + +--- + +*Generated from [hb_persistent.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_persistent.erl)* diff --git a/docs/book/src/hb_private.erl.md b/docs/book/src/hb_private.erl.md new file mode 100644 index 000000000..353a46deb --- /dev/null +++ b/docs/book/src/hb_private.erl.md @@ -0,0 +1,279 @@ +# hb_private + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_private.erl) + +This module provides basic helper utilities for managing the +private element of a message, which can be used to store state that is +not included in serialized messages, or those granted to users via the +APIs. Private elements of a message can be useful for storing state that +is only relevant temporarily. For example, a device might use the private +element to store a cache of values that are expensive to recompute. They +should _not_ be used for encoding state that makes the execution of a +device non-deterministic (unless you are sure you know what you are doing). +The `set` and `get` functions of this module allow you to run those keys +as AO-Core paths if you would like to have private `devices` in the +messages non-public zone. +See `hb_ao` for more information about the AO-Core protocol +and private elements of messages. + +--- + +## Exported Functions + +- `from_message/1` +- `get/3` +- `get/4` +- `is_private/1` +- `merge/3` +- `opts/1` +- `reset/1` +- `set_priv/2` +- `set/3` +- `set/4` + +--- + +### from_message + +This module provides basic helper utilities for managing the +Return the `private` key from a message. If the key does not exist, an + +```erlang +from_message(Msg) when is_map(Msg) -> + case maps:is_key(<<"priv">>, Msg) of + true -> maps:get(<<"priv">>, Msg, #{}); + false -> maps:get(priv, Msg, #{}) + end; +``` + +### from_message + +This module provides basic helper utilities for managing the +Return the `private` key from a message. If the key does not exist, an +Helper for getting a value from the private element of a message. Uses + +```erlang +from_message(_NonMapMessage) -> #{}. +``` + +### get + +This module provides basic helper utilities for managing the +Return the `private` key from a message. If the key does not exist, an +Helper for getting a value from the private element of a message. Uses + +```erlang +get(Key, Msg, Opts) -> + get(Key, Msg, not_found, Opts). +``` + +### get + +```erlang +get(InputPath, Msg, Default, Opts) -> + % Resolve the path against the private element of the message. +``` + +### set + +Helper function for setting a key in the private element of a message. + +```erlang +set(Msg, InputPath, Value, Opts) -> + Path = remove_private_specifier(InputPath, Opts), + Priv = from_message(Msg), + ?event({set_private, {in, Path}, {out, Path}, {value, Value}, {opts, Opts}}), + NewPriv = hb_util:deep_set(Path, Value, Priv, opts(Opts)), + ?event({set_private_res, {out, NewPriv}}), + set_priv(Msg, NewPriv). +``` + +### set + +```erlang +set(Msg, PrivMap, Opts) -> + CurrentPriv = from_message(Msg), + ?event({set_private, {in, PrivMap}, {opts, Opts}}), + NewPriv = hb_util:deep_merge(CurrentPriv, PrivMap, opts(Opts)), + ?event({set_private_res, {out, NewPriv}}), + set_priv(Msg, NewPriv). +``` + +### merge + +Merge the private elements of two messages into one. The keys in the + +```erlang +merge(Msg1, Msg2, Opts) -> + % Merge the private elements of the two messages. +``` + +### set_priv + +Helper function for setting the complete private element of a message. + +```erlang +set_priv(Msg, PrivMap) + when map_size(PrivMap) =:= 0 andalso not is_map_key(<<"priv">>, Msg) -> + Msg; +``` + +### set_priv + +Helper function for setting the complete private element of a message. +Check if a key is private. + +```erlang +set_priv(Msg, PrivMap) -> + Msg#{ <<"priv">> => PrivMap }. +``` + +### is_private + +Helper function for setting the complete private element of a message. +Check if a key is private. + +```erlang +is_private(Key) -> + try hb_util:bin(Key) of + <<"priv", _/binary>> -> true; + _ -> false + catch _:_ -> false + end. +``` + +### remove_private_specifier + +Remove the first key from the path if it is a private specifier. + +```erlang +remove_private_specifier(InputPath, Opts) -> + case is_private(hd(Path = hb_path:term_to_path_parts(InputPath, Opts))) of + true -> tl(Path); + false -> Path + end. +``` + +### opts + +The opts map that should be used when resolving paths against the + +```erlang +opts(Opts) -> + PrivStore = + case hb_opts:get(priv_store, undefined, Opts) of + undefined -> []; + PrivateStores when is_list(PrivateStores) -> PrivateStores; + PrivateStore -> [PrivateStore] + end, + BaseStore = + case hb_opts:get(store, [], Opts) of + SingleStore when is_map(SingleStore) -> [SingleStore]; + Stores when is_list(Stores) -> Stores + end, + NormStore = PrivStore ++ BaseStore, + Opts#{ + hashpath => ignore, + cache_control => [<<"no-cache">>, <<"no-store">>], + store => NormStore + }. +``` + +### reset + +Unset all of the private keys in a message or deep Erlang term. + +```erlang +reset(Msg) when is_map(Msg) -> + maps:map( + fun(_Key, Val) -> reset(Val) end, + maps:without( + lists:filter(fun is_private/1, maps:keys(Msg)), + Msg + ) + ); +``` + +### reset + +Unset all of the private keys in a message or deep Erlang term. + +```erlang +reset(List) when is_list(List) -> + % Check if any of the terms in the list are private specifiers, return an + % empty list if so. +``` + +### reset + +```erlang +reset(Tuple) when is_tuple(Tuple) -> + list_to_tuple(reset(tuple_to_list(Tuple))); +``` + +### reset + +```erlang +reset(NonMapMessage) -> + NonMapMessage. +``` + +### set_private_test + +```erlang +set_private_test() -> + ?assertEqual( + #{<<"a">> => 1, <<"priv">> => #{<<"b">> => 2}}, + set(#{<<"a">> => 1}, <<"b">>, 2, #{}) + ), + Res = set(#{<<"a">> => 1}, <<"a">>, 1, #{}), + ?assertEqual(#{<<"a">> => 1, <<"priv">> => #{<<"a">> => 1}}, Res), + ?assertEqual( + #{<<"a">> => 1, <<"priv">> => #{<<"a">> => 1}}, + set(Res, <<"a">>, 1, #{}) + ). +``` + +### get_private_key_test + +```erlang +get_private_key_test() -> + M1 = #{<<"a">> => 1, <<"priv">> => #{<<"b">> => 2}}, + ?assertEqual(not_found, get(<<"a">>, M1, #{})), + {ok, [<<"a">>]} = hb_ao:resolve(M1, <<"keys">>, #{}), + ?assertEqual(2, get(<<"b">>, M1, #{})), + {error, _} = hb_ao:resolve(M1, <<"priv/a">>, #{}), + {error, _} = hb_ao:resolve(M1, <<"priv">>, #{}). +``` + +### get_deep_key_test + +```erlang +get_deep_key_test() -> + M1 = #{<<"a">> => 1, <<"priv">> => #{<<"b">> => #{<<"c">> => 3}}}, + ?assertEqual(3, get(<<"b/c">>, M1, #{})). +``` + +### priv_opts_store_read_link_test + +```erlang +priv_opts_store_read_link_test() -> + % Write a message to the public store. +``` + +### priv_opts_cache_read_message_test + +```erlang +priv_opts_cache_read_message_test() -> + hb:init(), + PublicStore = [hb_test_utils:test_store(hb_store_lmdb)], + OnlyPrivStore = [hb_test_utils:test_store(hb_store_fs)], + Opts = #{ store => PublicStore, priv_store => OnlyPrivStore }, + PrivOpts = opts(Opts), + % Use the `~scheduler@1.0' and `~process@1.0' infrastructure to write a + % complex message into the public store. +``` + +--- + +*Generated from [hb_private.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_private.erl)* diff --git a/docs/book/src/hb_process_monitor.erl.md b/docs/book/src/hb_process_monitor.erl.md new file mode 100644 index 000000000..ea96dae16 --- /dev/null +++ b/docs/book/src/hb_process_monitor.erl.md @@ -0,0 +1,106 @@ +# hb_process_monitor + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_process_monitor.erl) + +## Exported Functions + +- `start/1` +- `start/2` +- `start/3` +- `stop/1` + +--- + +### start + +```erlang +start(ProcID) -> + start(ProcID, hb_opts:get(default_cron_rate)). +``` + +### start + +```erlang +start(ProcID, Rate) -> + start(ProcID, Rate, hb_client:cron_cursor(ProcID)). +``` + +### start + +```erlang +start(ProcID, Rate, Cursor) -> + Logger = hb_logger:start(), + Monitor = spawn( + fun() -> + server( + #state{ + proc_id = ProcID, + cursor = Cursor, + logger = Logger + } + ) + end), + Ticker = spawn(fun() -> ticker(Monitor, Rate) end), + hb_logger:register(Monitor), + hb_logger:log(Monitor, {ok, started_monitor, {ProcID, Rate, Cursor}}), + hb_logger:register(Ticker), + {Monitor, Logger}. +``` + +### stop + +```erlang +stop(PID) -> + PID ! stop. +``` + +### server + +```erlang +server(State) -> + receive + stop -> ok; + tick ->server(handle_crons(State)) + end. +``` + +### handle_crons + +```erlang +handle_crons(State) -> + case hb_client:cron(State#state.proc_id, State#state.cursor) of + {ok, HasNextPage, Results, Cursor} -> + lists:map( + fun(Res) -> + % TODO: Validate this + dev_mu:push(#{ message => Res }, State) + end, + Results + ), + NS = State#state{cursor = Cursor}, + case HasNextPage of + true -> NS; + false -> handle_crons(NS) + end; + Error -> + hb_logger:log(State#state.logger, Error), + State + end. +``` + +### ticker + +```erlang +ticker(Monitor, Rate) -> + case erlang:is_process_alive(Monitor) of + true -> + timer:sleep(Rate), + Monitor ! tick, + ticker(Monitor, Rate); + false -> + ok +``` + +--- + +*Generated from [hb_process_monitor.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_process_monitor.erl)* diff --git a/docs/book/src/hb_router.erl.md b/docs/book/src/hb_router.erl.md new file mode 100644 index 000000000..06c229ae3 --- /dev/null +++ b/docs/book/src/hb_router.erl.md @@ -0,0 +1,44 @@ +# hb_router + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_router.erl) + +Locate a service in the AO network. This module uses +URLs to locate services, so it can be used to locate +nodes using IP addresses or domain names. This also +allows us to use different protocols later, potentially. + +--- + +## Exported Functions + +- `find/2` +- `find/3` + +--- + +### find + +```erlang +find(Type, ID) -> + find(Type, ID, '_'). +``` + +### find + +```erlang +find(Type, ID, Address) -> + find(Type, ID, Address, #{}). +``` + +### find + +```erlang +find(Type, _ID, Address, Opts) -> + case hb_maps:get(Type, hb_opts:get(nodes), undefined, Opts) of + #{ Address := Node } -> {ok, Node}; + undefined -> {error, service_type_not_found} +``` + +--- + +*Generated from [hb_router.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_router.erl)* diff --git a/docs/book/src/hb_singleton.erl.md b/docs/book/src/hb_singleton.erl.md new file mode 100644 index 000000000..12a3cdbca --- /dev/null +++ b/docs/book/src/hb_singleton.erl.md @@ -0,0 +1,1104 @@ +# hb_singleton + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_singleton.erl) + +A parser that translates AO-Core HTTP API requests in TABM format +into an ordered list of messages to evaluate. The details of this format +are described in `docs/ao-core-http-api.md`. +Syntax overview: +
+    Singleton: Message containing keys and a `path` field,
+               which may also contain a query string of key-value pairs.
+    Path:
+        - /Part1/Part2/.../PartN/ => [Part1, Part2, ..., PartN]
+        - /ID/Part2/.../PartN => [ID, Part2, ..., PartN]
+    Part: (Key + Resolution), Device?, #{ K => V}?
+        - Part => #{ path => Part }
+        - `Part&Key=Value => #{ path => Part, Key => Value }`
+        - `Part=Value&... => #{ path => Part, Part => Value, ... }`
+        - `Part&Key => #{ path => Part, Key => true }`
+        - `Part&k1=v1&k2=v2 => #{ path => Part, k1 => `<<"v1">>`, k2 => `<<"v2">>` }'
+        - `Part~Device => {as, Device, #{ path => Part }}`
+        - `Part~D&K1=V1 => {as, D, #{ path => Part, K1 => `<<"v1">>` }}'
+        - `pt&k1+int=1 => #{ path => pt, k1 => 1 }`
+        - `pt~d&k1+int=1 => {as, d, #{ path => pt, k1 => 1 }}`
+        - `(/nested/path) => Resolution of the path /nested/path`
+        - `(/nested/path&k1=v1) => (resolve /nested/path)#{k1 => v1}`
+        - `(/nested/path~D&K1=V1) => (resolve /nested/path)#{K1 => V1}`
+        - `pt&k1+res=(/a/b/c) => #{ path => pt, k1 => (resolve /a/b/c) }`
+    Key:
+        - key: `<<"value">>` => #{ key => `<<"value">>`, ... } for all messages
+        - n.key: `<<"value">>` => #{ key => `<<"value">>`, ... } for Nth message
+        - key+int: 1 => #{ key => 1, ... }
+        - key+res: /nested/path => #{ key => (resolve /nested/path), ... }
+        - N.Key+res=(/a/b/c) => #{ Key => (resolve /a/b/c), ... }
+
+ +--- + +## Exported Functions + +- `from_path/1` +- `from/2` +- `to/1` + +--- + +### append_path + +```erlang +-spec to(list(ao_message())) -> tabm_message(). +to(Messages) -> + % Iterate through all AO-Core messages folding them into the TABM message + % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} + % that allows to scope keys to the given stage. +``` + +```erlang +append_path(PathPart, #{<<"path">> := Path} = Message) -> + hb_maps:put(<<"path">>, <>, Message); +``` + +### append_path + +```erlang +-spec to(list(ao_message())) -> tabm_message(). +to(Messages) -> + % Iterate through all AO-Core messages folding them into the TABM message + % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} + % that allows to scope keys to the given stage. +``` + +```erlang +append_path(PathPart, Message) -> + hb_maps:put(<<"path">>, <<"/", PathPart/binary>>, Message). +``` + +### type + +```erlang +-spec to(list(ao_message())) -> tabm_message(). +to(Messages) -> + % Iterate through all AO-Core messages folding them into the TABM message + % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} + % that allows to scope keys to the given stage. +``` + +```erlang +type(Value) when is_binary(Value) -> binary; +``` + +### type + +```erlang +-spec to(list(ao_message())) -> tabm_message(). +to(Messages) -> + % Iterate through all AO-Core messages folding them into the TABM message + % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} + % that allows to scope keys to the given stage. +``` + +```erlang +type(Value) when is_integer(Value) -> integer; +``` + +### type + +Normalize a singleton TABM message into a list of executable AO-Core + +```erlang +-spec to(list(ao_message())) -> tabm_message(). +to(Messages) -> + % Iterate through all AO-Core messages folding them into the TABM message + % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} + % that allows to scope keys to the given stage. +``` + +```erlang +type(_Value) -> unknown. +``` + +### from + +Normalize a singleton TABM message into a list of executable AO-Core + +```erlang +-spec to(list(ao_message())) -> tabm_message(). +to(Messages) -> + % Iterate through all AO-Core messages folding them into the TABM message + % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} + % that allows to scope keys to the given stage. +``` + +```erlang +from(RawMsg, Opts) when is_binary(RawMsg) -> + from(#{ <<"path">> => RawMsg }, Opts); +``` + +### from + +Normalize a singleton TABM message into a list of executable AO-Core + +```erlang +-spec to(list(ao_message())) -> tabm_message(). +to(Messages) -> + % Iterate through all AO-Core messages folding them into the TABM message + % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} + % that allows to scope keys to the given stage. +``` + +```erlang +from(RawMsg, Opts) -> + RawPath = hb_maps:get(<<"path">>, RawMsg, <<>>), + ?event(parsing, {raw_path, RawPath}), + {ok, Path, Query} = from_path(RawPath), + ?event(parsing, {parsed_path, Path, Query}), + MsgWithoutBasePath = + hb_maps:merge( + hb_maps:remove(<<"path">>, RawMsg), + Query + ), + % 2. Decode, split, and sanitize path segments. Each yields one step message. +``` + +### from_path + +Parse the relative reference into path, query, and fragment. + +```erlang +from_path(RelativeRef) -> + %?event(parsing, {raw_relative_ref, RawRelativeRef}), + %RelativeRef = hb_escape:decode(RawRelativeRef), + Decoded = decode_string(RelativeRef), + ?event(parsing, {parsed_relative_ref, Decoded}), + {Path, QKVList} = + case hb_util:split_depth_string_aware_single("?", Decoded) of + {_Sep, P, QStr} -> {P, cowboy_req:parse_qs(#{ qs => QStr })}; + {no_match, P, <<>>} -> {P, []} + end, + { + ok, + path_parts($/, Path), + hb_maps:from_list(QKVList) + }. +``` + +### path_messages + +Step 2: Decode, split and sanitize the path. Split by `/` but avoid + +```erlang +path_messages(Bin, Opts) when is_binary(Bin) -> + lists:map(fun(Part) -> parse_part(Part, Opts) end, path_parts([$/], Bin)). +``` + +### normalize_base + +Normalize the base path. + +```erlang +normalize_base([]) -> []; +``` + +### normalize_base + +Normalize the base path. + +```erlang +normalize_base([First|Rest]) when ?IS_ID(First) -> [First|Rest]; +``` + +### normalize_base + +Normalize the base path. + +```erlang +normalize_base([{as, DevID, First}|Rest]) -> [{as, DevID, First}|Rest]; +``` + +### normalize_base + +Normalize the base path. + +```erlang +normalize_base([Subres = {resolve, _}|Rest]) -> [Subres|Rest]; +``` + +### normalize_base + +Normalize the base path. +Split the path into segments, filtering out empty segments and + +```erlang +normalize_base(Rest) -> [#{}|Rest]. +``` + +### path_parts + +Normalize the base path. +Split the path into segments, filtering out empty segments and + +```erlang +path_parts(Sep, PathBin) when is_binary(PathBin) -> + Res = lists:filtermap( + fun(Part) -> + case byte_size(Part) of + 0 -> false; + TooLong when TooLong > ?MAX_SEGMENT_LENGTH -> + throw({error, segment_too_long, Part}); + _ -> {true, Part} + end + end, + all_path_parts(Sep, PathBin) + ), + ?event({path_parts, Res}), + Res. +``` + +### all_path_parts + +Extract all of the parts from the binary, given (a list of) separators. + +```erlang +all_path_parts(_Sep, <<>>) -> []; +``` + +### all_path_parts + +Extract all of the parts from the binary, given (a list of) separators. + +```erlang +all_path_parts(Sep, Bin) -> + hb_util:split_depth_string_aware(Sep, Bin). +``` + +### part + +Extract the characters from the binary until a separator is found. + +```erlang +part(Sep, Bin) when not is_list(Sep) -> + part([Sep], Bin); +``` + +### part + +Extract the characters from the binary until a separator is found. + +```erlang +part(Seps, Bin) -> + hb_util:split_depth_string_aware_single(Seps, Bin). +``` + +### apply_types + +Step 3: Apply types to values and remove specifiers. + +```erlang +apply_types(Msg, Opts) -> + hb_maps:fold( + fun(Key, Val, Acc) -> + {_, K, V} = maybe_typed(Key, Val, Opts), + hb_maps:put(K, V, Acc, Opts) + end, + #{}, + Msg, + Opts + ). +``` + +### group_scoped + +Step 4: Group headers/query by N-scope. + +```erlang +group_scoped(Map, Msgs) -> + {NScope, Global} = + hb_maps:fold( + fun(KeyBin, Val, {Ns, Gs}) -> + case parse_scope(KeyBin) of + {OkN, RealKey} when OkN > 0 -> + Curr = hb_maps:get(OkN, Ns, #{}), + Ns2 = hb_maps:put(OkN, hb_maps:put(RealKey, Val, Curr), Ns), + {Ns2, Gs}; + global -> {Ns, hb_maps:put(KeyBin, Val, Gs)} + end + end, + {#{}, #{}}, + Map + ), + [ + hb_maps:merge(Global, hb_maps:get(N, NScope, #{})) + || + N <- lists:seq(1, length(Msgs)) + ]. +``` + +### parse_scope + +Get the scope of a key. Adds 1 to account for the base message. + +```erlang +parse_scope(KeyBin) -> + case binary:split(KeyBin, <<".">>, [global]) of + [Front, Remainder] -> + case catch erlang:binary_to_integer(Front) of + NInt when is_integer(NInt) -> {NInt + 1, Remainder}; + _ -> throw({error, invalid_scope, KeyBin}) + end; + _ -> global + end. +``` + +### build_messages + +Step 5: Merge the base message with the scoped messages. + +```erlang +build_messages(Msgs, ScopedModifications, Opts) -> + do_build(1, Msgs, ScopedModifications, Opts). +``` + +### do_build + +```erlang +do_build(_, [], _, _) -> []; +``` + +### do_build + +```erlang +do_build(I, [{as, DevID, RawMsg} | Rest], ScopedKeys, Opts) when is_map(RawMsg) -> + % We are processing an `as' message. If the path is empty, we need to + % remove it from the message and the additional message, such that AO-Core + % returns only the message with the device specifier changed. If the message + % does have a path, AO-Core will subresolve it. +``` + +### do_build + +```erlang +do_build(I, [Msg | Rest], ScopedKeys, Opts) when not is_map(Msg) -> + [Msg | do_build(I + 1, Rest, ScopedKeys, Opts)]; +``` + +### do_build + +```erlang +do_build(I, [Msg | Rest], ScopedKeys, Opts) -> + Additional = lists:nth(I, ScopedKeys), + Merged = hb_maps:merge(Additional, Msg, Opts), + StepMsg = hb_message:convert( + Merged, + <<"structured@1.0">>, + Opts#{ topic => ao_internal } + ), + ?event(parsing, {build_messages, {base, Msg}, {additional, Additional}}), + [StepMsg | do_build(I + 1, Rest, ScopedKeys, Opts)]. +``` + +### parse_part + +Parse a path part into a message or an ID. + +```erlang +parse_part(ID, _Opts) when ?IS_ID(ID) -> ID; +``` + +### parse_part + +Parse a path part into a message or an ID. + +```erlang +parse_part(Part, Opts) -> + case maybe_subpath(Part, Opts) of + {resolve, Subpath} -> {resolve, Subpath}; + Part -> + case part([$&, $~, $+, $ , $=], Part) of + {no_match, PartKey, <<>>} -> + #{ <<"path">> => PartKey }; + {Sep, PartKey, PartModBin} -> + parse_part_mods( + << Sep:8/integer, PartModBin/binary >>, + #{ <<"path">> => PartKey }, + Opts + ) + end + end. +``` + +### parse_part_mods + +Parse part modifiers: + +```erlang +parse_part_mods([], Msg, _Opts) -> Msg; +``` + +### parse_part_mods + +Parse part modifiers: + +```erlang +parse_part_mods(<<>>, Msg, _Opts) -> Msg; +``` + +### parse_part_mods + +Parse part modifiers: + +```erlang +parse_part_mods(<<"~", PartMods/binary>>, Msg, Opts) -> + % Get the string until the end of the device specifier or end of string. +``` + +### parse_part_mods + +```erlang +parse_part_mods(<< "&", InlinedMsgBin/binary >>, Msg, Opts) -> + InlinedKeys = path_parts($&, InlinedMsgBin), + MsgWithInlined = + lists:foldl( + fun(InlinedKey, Acc) -> + {Key, Val} = parse_inlined_key_val(InlinedKey, Opts), + ?event({inlined_key, {explicit, Key}, {explicit, Val}}), + hb_maps:put(Key, Val, Acc) + end, + Msg, + InlinedKeys + ), + MsgWithInlined; +``` + +### parse_part_mods + +```erlang +parse_part_mods(<<$=, InlinedMsgBin/binary>>, M = #{ <<"path">> := Path }, Opts) + when map_size(M) =:= 1, is_binary(Path) -> + parse_part_mods(<< "&", Path/binary, "=", InlinedMsgBin/binary >>, M, Opts); +``` + +### parse_part_mods + +Extrapolate the inlined key-value pair from a path segment. If the + +```erlang +parse_part_mods(<<$+, InlinedMsgBin/binary>>, M = #{ <<"path">> := Path }, Opts) + when map_size(M) =:= 1, is_binary(InlinedMsgBin) -> + parse_part_mods(<< "&", Path/binary, "+", InlinedMsgBin/binary >>, M, Opts). +``` + +### parse_inlined_key_val + +Extrapolate the inlined key-value pair from a path segment. If the + +```erlang +parse_inlined_key_val(Bin, Opts) -> + case part([$=, $&], Bin) of + {no_match, K, <<>>} -> {K, true}; + {$=, K, RawV} -> + V = unquote(RawV), + {_, Key, Val} = maybe_typed(K, maybe_subpath(V, Opts), Opts), + {Key, Val} + end. +``` + +### unquote + +Unquote a string. + +```erlang +unquote(<<"\"", Inner/binary>>) -> + case binary:last(Inner) of + $" -> binary:part(Inner, 0, byte_size(Inner) - 1); + _ -> Inner + end; +``` + +### unquote + +Unquote a string. +Attempt Cowboy URL decode, then sanitize the result. + +```erlang +unquote(Bin) -> Bin. +``` + +### decode_string + +Unquote a string. +Attempt Cowboy URL decode, then sanitize the result. + +```erlang +decode_string(B) -> + case catch uri_string:unquote(B) of + DecodedBin when is_binary(DecodedBin) -> DecodedBin; + _ -> throw({error, cannot_decode, B}) + end. +``` + +### maybe_subpath + +Check if the string is a subpath, returning it in parsed form, + +```erlang +maybe_subpath(Str, Opts) when byte_size(Str) >= 2 -> + case {binary:first(Str), binary:last(Str)} of + {$(, $)} -> + Inside = binary:part(Str, 1, byte_size(Str) - 2), + {resolve, from(#{ <<"path">> => Inside }, Opts)}; + _ -> Str + end; +``` + +### maybe_subpath + +Check if the string is a subpath, returning it in parsed form, +Parse a key's type (applying it to the value) and device name if present. + +```erlang +maybe_subpath(Other, _Opts) -> Other. +``` + +### maybe_typed + +Check if the string is a subpath, returning it in parsed form, +Parse a key's type (applying it to the value) and device name if present. + +```erlang +maybe_typed(Key, Value, Opts) -> + case part([$+, $ ], Key) of + {no_match, OnlyKey, <<>>} -> {untyped, OnlyKey, Value}; + {_, OnlyKey, Type} -> + case {Type, Value} of + {<<"resolve">>, Subpath} -> + % If the value needs to be resolved before it is converted, + % use the `Codec/1.0' device to resolve it. +``` + +### maybe_join + +Join a list of items with a separator, or return the first item if there + +```erlang +maybe_join(Items, Sep) -> + case length(Items) of + 0 -> <<>>; + 1 -> hd(Items); + _ -> iolist_to_binary(lists:join(Sep, Items)) + end. +``` + +### parse_explicit_message_test + +```erlang +parse_explicit_message_test() -> + Singleton1 = #{ + <<"path">> => <<"/a">>, + <<"a">> => <<"b">> + }, + ?assertEqual( + [ + #{ <<"a">> => <<"b">>}, + #{ <<"path">> => <<"a">>, <<"a">> => <<"b">> } + ], + from(Singleton1, #{}) + ), + DummyID = hb_util:human_id(crypto:strong_rand_bytes(32)), + Singleton2 = #{ + <<"path">> => <<"/", DummyID/binary, "/a">> + }, + ?assertEqual([DummyID, #{ <<"path">> => <<"a">> }], from(Singleton2, #{})), + Singleton3 = #{ + <<"path">> => <<"/", DummyID/binary, "/a">>, + <<"a">> => <<"b">> + }, + ?assertEqual( + [DummyID, #{ <<"path">> => <<"a">>, <<"a">> => <<"b">> }], + from(Singleton3, #{}) + ). +``` + +### to_suite_test_ + +```erlang +to_suite_test_() -> + [ + fun simple_to_test/0, + fun multiple_messages_to_test/0, + fun basic_hashpath_to_test/0, + fun scoped_key_to_test/0, + fun typed_key_to_test/0, + fun subpath_in_key_to_test/0, + fun subpath_in_path_to_test/0, + fun inlined_keys_to_test/0, + fun multiple_inlined_keys_to_test/0, + fun subpath_in_inlined_to_test/0 + ]. +``` + +### simple_to_test + +```erlang +simple_to_test() -> + Messages = [ + #{<<"test-key">> => <<"test-value">>}, + #{<<"path">> => <<"a">>, <<"test-key">> => <<"test-value">>} + ], + Expected = #{<<"path">> => <<"/a">>, <<"test-key">> => <<"test-value">>}, + ?assertEqual(Expected, to(Messages)), + ?assertEqual(Messages, from(to(Messages), #{})). +``` + +### multiple_messages_to_test + +```erlang +multiple_messages_to_test() -> + Messages = + [ + #{<<"test-key">> => <<"test-value">>}, + #{<<"path">> => <<"a">>, <<"test-key">> => <<"test-value">>}, + #{<<"path">> => <<"b">>, <<"test-key">> => <<"test-value">>}, + #{<<"path">> => <<"c">>, <<"test-key">> => <<"test-value">>} + ], + Expected = #{ + <<"path">> => <<"/a/b/c">>, + <<"test-key">> => <<"test-value">> + }, + ?assertEqual(Expected, to(Messages)), + ?assertEqual(Messages, from(to(Messages), #{})). +``` + +### basic_hashpath_to_test + +```erlang +basic_hashpath_to_test() -> + Messages = [ + <<"e5ohB7TgMYRoc0BLllkmAqkqLy1SrliEkOPJlNPXBQ8">>, + #{<<"method">> => <<"GET">>, <<"path">> => <<"some-other">>} + ], + Expected = #{ + <<"path">> => <<"/e5ohB7TgMYRoc0BLllkmAqkqLy1SrliEkOPJlNPXBQ8/some-other">>, + <<"method">> => <<"GET">> + }, + ?assertEqual(Expected, to(Messages)), + ?assertEqual(Messages, from(to(Messages), #{})). +``` + +### scoped_key_to_test + +```erlang +scoped_key_to_test() -> + Messages = [ + #{}, + #{<<"path">> => <<"a">>}, + #{<<"path">> => <<"b">>, <<"test-key">> => <<"test-value">>}, + #{<<"path">> => <<"c">>} + ], + Expected = #{<<"2.test-key">> => <<"test-value">>, <<"path">> => <<"/a/b/c">>}, + ?assertEqual(Expected, to(Messages)), + ?assertEqual(Messages, from(to(Messages), #{})). +``` + +### typed_key_to_test + +```erlang +typed_key_to_test() -> + Messages = + [ + #{}, + #{<<"path">> => <<"a">>}, + #{<<"path">> => <<"b">>, <<"test-key">> => 123}, + #{<<"path">> => <<"c">>} + ], + Expected = #{<<"2.test-key+integer">> => <<"123">>, <<"path">> => <<"/a/b/c">>}, + ?assertEqual(Expected, to(Messages)), + ?assertEqual(Messages, from(to(Messages), #{})). +``` + +### subpath_in_key_to_test + +```erlang +subpath_in_key_to_test() -> + Messages = [ + #{}, + #{<<"path">> => <<"a">>}, + #{ + <<"path">> => <<"b">>, + <<"test-key">> => + {resolve, + [ + #{}, + #{<<"path">> => <<"x">>}, + #{<<"path">> => <<"y">>}, + #{<<"path">> => <<"z">>} + ] + } + }, + #{<<"path">> => <<"c">>} + ], + Expected = #{<<"2.test-key+resolve">> => <<"/x/y/z">>, <<"path">> => <<"/a/b/c">>}, + ?assertEqual(Expected, to(Messages)), + ?assertEqual(Messages, from(to(Messages), #{})). +``` + +### subpath_in_path_to_test + +```erlang +subpath_in_path_to_test() -> + Messages = [ + #{}, + #{<<"path">> => <<"a">>}, + {resolve, + [ + #{}, + #{<<"path">> => <<"x">>}, + #{<<"path">> => <<"y">>}, + #{<<"path">> => <<"z">>} + ] + }, + #{<<"path">> => <<"z">>} + ], + Expected = #{ + <<"path">> => <<"/a/(x/y/z)/z">> + }, + ?assertEqual(Expected, to(Messages)), + ?assertEqual(Messages, from(to(Messages), #{})). +``` + +### inlined_keys_to_test + +```erlang +inlined_keys_to_test() -> + Messages = + [ + #{<<"method">> => <<"POST">>}, + #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"a">> + }, + #{ + <<"k1">> => <<"v1">>, + <<"method">> => <<"POST">>, + <<"path">> => <<"b">> + }, + #{ + <<"k2">> => <<"v2">>, + <<"method">> => <<"POST">>, + <<"path">> => <<"c">> + } + ], + % NOTE: The implementation above does not convert the given list of messages + % into the original format, however it assures that the `to/1' and `from/1' + % operations are idempotent. +``` + +### multiple_inlined_keys_to_test + +```erlang +multiple_inlined_keys_to_test() -> + Messages = [ + #{<<"method">> => <<"POST">>}, + #{<<"method">> => <<"POST">>, <<"path">> => <<"a">>}, + #{ + <<"k1">> => <<"v1">>, + <<"k2">> => <<"v2">>, + <<"method">> => <<"POST">>, + <<"path">> => <<"b">> + } + ], + % NOTE: The implementation above does not convert the given list of messages + % into the original format, however it assures that the `to/1' and `from/1' + % operations are idempotent. +``` + +### subpath_in_inlined_to_test + +```erlang +subpath_in_inlined_to_test() -> + Messages = [ + #{}, + #{<<"path">> => <<"part1">>}, + #{<<"b">> => + {resolve, + [#{}, + #{<<"path">> => <<"x">>}, + #{<<"path">> => <<"y">>}]}, + <<"path">> => <<"part2">>, + <<"test">> => <<"1">>}, + #{<<"path">> => <<"part3">>}], + % NOTE: The implementation above does not convert the given list of messages + % into the original format, however it assures that the `to/1' and `from/1' + % operations are idempotent. +``` + +### single_message_test + +```erlang +single_message_test() -> + % This is a singleton TABM message + Req = #{ + <<"path">> => <<"/a">>, + <<"test-key">> => <<"test-value">> + }, + Msgs = from(Req, #{}), + ?assertEqual(2, length(Msgs)), + ?assert(is_map(hd(Msgs))), + ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, hd(Msgs))). +``` + +### basic_hashpath_test + +```erlang +basic_hashpath_test() -> + Hashpath = hb_util:human_id(crypto:strong_rand_bytes(32)), + Path = <<"/", Hashpath/binary, "/some-other">>, + Req = #{ + <<"path">> => Path, + <<"method">> => <<"GET">> + }, + Msgs = from(Req, #{}), + ?assertEqual(2, length(Msgs)), + [Base, Msg2] = Msgs, + ?assertEqual(Base, Hashpath), + ?assertEqual(<<"GET">>, hb_maps:get(<<"method">>, Msg2)), + ?assertEqual(<<"some-other">>, hb_maps:get(<<"path">>, Msg2)). +``` + +### multiple_messages_test + +```erlang +multiple_messages_test() -> + Req = #{ + <<"path">> => <<"/a/b/c">>, + <<"test-key">> => <<"test-value">> + }, + Msgs = from(Req, #{}), + ?assertEqual(4, length(Msgs)), + [_Base, Msg1, Msg2, Msg3] = Msgs, + ?assert(lists:all(fun is_map/1, Msgs)), + ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, Msg1)), + ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, Msg2)), + ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, Msg3)). +%%% Advanced key syntax tests +``` + +### scoped_key_test + +```erlang +scoped_key_test() -> + Req = #{ + <<"path">> => <<"/a/b/c">>, + <<"2.test-key">> => <<"test-value">> + }, + Msgs = from(Req, #{}), + ?assertEqual(4, length(Msgs)), + [_, Msg1, Msg2, Msg3] = Msgs, + ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg1, not_found)), + ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, Msg2, not_found)), + ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg3, not_found)). +``` + +### typed_key_test + +```erlang +typed_key_test() -> + Req = #{ + <<"path">> => <<"/a/b/c">>, + <<"2.test-key+integer">> => <<"123">> + }, + Msgs = from(Req, #{}), + ?assertEqual(4, length(Msgs)), + [_, Msg1, Msg2, Msg3] = Msgs, + ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg1, not_found)), + ?assertEqual(123, hb_maps:get(<<"test-key">>, Msg2, not_found)), + ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg3, not_found)). +``` + +### subpath_in_key_test + +```erlang +subpath_in_key_test() -> + Req = #{ + <<"path">> => <<"/a/b/c">>, + <<"2.test-key+resolve">> => <<"/x/y/z">> + }, + Msgs = from(Req, #{}), + ?assertEqual(4, length(Msgs)), + [_, Msg1, Msg2, Msg3] = Msgs, + ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg1, not_found)), + ?assertEqual( + {resolve, + [ + #{}, + #{ <<"path">> => <<"x">> }, + #{ <<"path">> => <<"y">> }, + #{ <<"path">> => <<"z">> } + ] + }, + hb_maps:get(<<"test-key">>, Msg2, not_found) + ), + ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg3, not_found)). +%%% Advanced path syntax tests +``` + +### subpath_in_path_test + +```erlang +subpath_in_path_test() -> + Req = #{ + <<"path">> => <<"/a/(x/y/z)/z">> + }, + Msgs = from(Req, #{}), + ?assertEqual(4, length(Msgs)), + [_, Msg1, Msg2, Msg3] = Msgs, + ?assertEqual(<<"a">>, hb_maps:get(<<"path">>, Msg1)), + ?assertEqual( + {resolve, + [ + #{}, + #{ <<"path">> => <<"x">> }, + #{ <<"path">> => <<"y">> }, + #{ <<"path">> => <<"z">> } + ] + }, + Msg2 + ), + ?assertEqual(<<"z">>, hb_maps:get(<<"path">>, Msg3)). +``` + +### inlined_keys_test + +```erlang +inlined_keys_test() -> + Req = #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"/a/b&k1=v1/c&k2=v2">> + }, + Msgs = from(Req, #{}), + ?assertEqual(4, length(Msgs)), + [_, Msg1, Msg2, Msg3] = Msgs, + ?assertEqual(<<"v1">>, hb_maps:get(<<"k1">>, Msg2)), + ?assertEqual(<<"v2">>, hb_maps:get(<<"k2">>, Msg3)), + ?assertEqual(not_found, hb_maps:get(<<"k1">>, Msg1, not_found)), + ?assertEqual(not_found, hb_maps:get(<<"k2">>, Msg2, not_found)). +``` + +### inlined_quoted_key_test + +```erlang +inlined_quoted_key_test() -> + Req = #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"/a/b&k1=\"v/1\"/c&k2=v2">> + }, + Msgs = from(Req, #{}), + ?assertEqual(4, length(Msgs)), + [_, Msg1, Msg2, Msg3] = Msgs, + ?assertEqual(<<"v/1">>, hb_maps:get(<<"k1">>, Msg2)), + ?assertEqual(<<"v2">>, hb_maps:get(<<"k2">>, Msg3)), + ?assertEqual(not_found, hb_maps:get(<<"k1">>, Msg1, not_found)), + ?assertEqual(not_found, hb_maps:get(<<"k2">>, Msg2, not_found)), + ReqB = #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"/~profile@1.0/eval=%22~meta@1.0/info%22">> + }, + MsgsB = from(ReqB, #{}), + [_, Msg2b] = MsgsB, + ?assertEqual(<<"~meta@1.0/info">>, hb_maps:get(<<"eval">>, Msg2b)). +``` + +### inlined_assumed_key_test + +```erlang +inlined_assumed_key_test() -> + Req = #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"/a/b=4/c&k2=v2">> + }, + Msgs = from(Req, #{}), + ?assertEqual(4, length(Msgs)), + [_, Msg1, Msg2, Msg3] = Msgs, + ?event({parsed, Msgs}), + ?assertEqual(<<"4">>, hb_maps:get(<<"b">>, Msg2)), + ?assertEqual(not_found, hb_maps:get(<<"b">>, Msg1, not_found)), + ?assertEqual(not_found, hb_maps:get(<<"b">>, Msg3, not_found)), + ReqB = #{ + <<"method">> => <<"POST">>, + <<"path">> => <<"/a/b+integer=4/c&k2=v2">> + }, + MsgsB = from(ReqB, #{}), + [_, Msg1b, Msg2b, Msg3b] = MsgsB, + ?event({parsed, MsgsB}), + ?assertEqual(4, hb_maps:get(<<"b">>, Msg2b)), + ?assertEqual(not_found, hb_maps:get(<<"b">>, Msg1b, not_found)), + ?assertEqual(not_found, hb_maps:get(<<"b">>, Msg3b, not_found)). +``` + +### multiple_inlined_keys_test + +```erlang +multiple_inlined_keys_test() -> + Path = <<"/a/b&k1=v1&k2=v2">>, + Req = #{ + <<"method">> => <<"POST">>, + <<"path">> => Path + }, + Msgs = from(Req, #{}), + ?assertEqual(3, length(Msgs)), + [_, Msg1, Msg2] = Msgs, + ?assertEqual(not_found, hb_maps:get(<<"k1">>, Msg1, not_found)), + ?assertEqual(not_found, hb_maps:get(<<"k2">>, Msg1, not_found)), + ?assertEqual(<<"v1">>, hb_maps:get(<<"k1">>, Msg2, not_found)), + ?assertEqual(<<"v2">>, hb_maps:get(<<"k2">>, Msg2, not_found)). +``` + +### subpath_in_inlined_test + +```erlang +subpath_in_inlined_test() -> + Path = <<"/part1/part2&test=1&b=(/x/y)/part3">>, + Req = #{ + <<"path">> => Path + }, + Msgs = from(Req, #{}), + ?assertEqual(4, length(Msgs)), + [_, First, Second, Third] = Msgs, + ?assertEqual(<<"part1">>, hb_maps:get(<<"path">>, First)), + ?assertEqual(<<"part3">>, hb_maps:get(<<"path">>, Third)), + ?assertEqual( + {resolve, [#{}, #{ <<"path">> => <<"x">> }, #{ <<"path">> => <<"y">> }] }, + hb_maps:get(<<"b">>, Second) + ). +``` + +### path_parts_test + +```erlang +path_parts_test() -> + ?assertEqual( + [<<"a">>, <<"b&c=(/d/e)">>, <<"f">>], + path_parts($/, <<"/a/b&c=(/d/e)/f">>) + ), + ?assertEqual([<<"a">>], path_parts($/, <<"/a">>)), + ?assertEqual([<<"a">>, <<"b">>, <<"c">>], path_parts($/, <<"/a/b/c">>)), + ?assertEqual( + [ + <<"IYkkrqlZNW_J-4T-5eFApZOMRl5P4VjvrcOXWvIqB1Q">>, + <<"msg2">> + ], + path_parts($/, <<"/IYkkrqlZNW_J-4T-5eFApZOMRl5P4VjvrcOXWvIqB1Q/msg2">>) + ), + ?assertEqual( + [<<"a">>, <<"b&K1=V1">>, <<"c&K2=V2">>], + path_parts($/, <<"/a/b&K1=V1/c&K2=V2">>) + ), + ?assertEqual( + [<<"a">>, <<"(x/y/z)">>, <<"c">>], + path_parts($/, <<"/a/(x/y/z)/c">>) + ), +``` + +--- + +*Generated from [hb_singleton.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_singleton.erl)* diff --git a/docs/book/src/hb_store.erl.md b/docs/book/src/hb_store.erl.md new file mode 100644 index 000000000..0592fa33e --- /dev/null +++ b/docs/book/src/hb_store.erl.md @@ -0,0 +1,1123 @@ +# hb_store + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store.erl) + +A simple abstraction layer for AO key value store operations. +This interface allows us to swap out the underlying store implementation(s) +as desired, without changing the API that `hb_cache` employs. Additionally, +it enables node operators to customize their configuration to maximize +performance, data availability, and other factors. +Stores can be represented in a node's configuration as either a single +message, or a (`structured@1.0`) list of store messages. If a list of stores +is provided, the node will cycle through each until a viable store is found +to execute the given function. +A valid store must implement a _subset_ of the following functions: +``` + start/1: Initialize the store. + stop/1: Stop any processes (etc.) that manage the store. + reset/1: Restore the store to its original, empty state. + scope/0: A tag describing the 'scope' of a stores search: `in_memory`, + `local`, `remote`, `arweave`, etc. Used in order to allow + node operators to prioritize their stores for search. + make_group/2: Create a new group of keys in the store with the given ID. + make_link/3: Create a link (implying one key should redirect to another) + from `existing` to `new` (in that order). + type/2: Return whether the value found at the given key is a + `composite` (group) type, or a `simple` direct binary. + read/2: Read the data at the given location, returning a binary + if it is a `simple` value, or a message if it is a complex + term. + write/3: Write the given `key` with the associated `value` (in that + order) to the store. + list/2: For `composite` type keys, return a list of its child keys. + path/2: Optionally transform a list of path parts into the store's + canonical form. +''' +Each function takes a `store` message first, containing an arbitrary set +of its necessary configuration keys, as well as the `store-module` key which +refers to the Erlang module that implements the store. +All functions must return `ok` or `{ok, Result}`, as appropriate. Other +results will lead to the store manager (this module) iterating to the next +store message given by the user. If none of the given store messages are +able to execute a requested service, the store manager will return +`not_found`. + +--- + +## Exported Functions + +- `add_path/2` +- `add_path/3` +- `behavior_info/1` +- `filter/2` +- `find/1` +- `generate_test_suite/1` +- `generate_test_suite/2` +- `join/1` +- `list/2` +- `make_group/2` +- `make_link/3` +- `match/2` +- `path/1` +- `path/2` +- `read/2` +- `reset/1` +- `resolve/2` +- `scope/2` +- `sort/2` +- `start/1` +- `stop/1` +- `test_stores/0` +- `type/2` +- `write/3` + +--- + +### behavior_info + +A simple abstraction layer for AO key value store operations. +The number of write and read operations to perform in the benchmark. + +```erlang +behavior_info(callbacks) -> + [ + {start, 1}, {stop, 1}, {reset, 1}, {make_group, 2}, {make_link, 3}, + {type, 2}, {read, 2}, {write, 3}, + {list, 2}, {match, 2}, {path, 2}, {add_path, 3} + ]. +``` + +### set + +Store access policies to function names. +Set the instance options for a given store module and name combination. + +```erlang +set(StoreOpts, InstanceTerm) -> + Mod = maps:get(<<"store-module">>, StoreOpts), + set( + Mod, + maps:get(<<"name">>, StoreOpts, Mod), + InstanceTerm + ). +``` + +### set + +```erlang +set(StoreMod, Name, undefined) -> + StoreRef = {store, StoreMod, Name}, + erlang:erase(StoreRef), + persistent_term:erase(StoreRef); +``` + +### set + +```erlang +set(StoreMod, Name, InstanceTerm) -> + StoreRef = {store, StoreMod, Name}, + put(StoreRef, InstanceTerm), + persistent_term:put(StoreRef, InstanceTerm), + ok. +``` + +### find + +Find or spawn a store instance by its store opts. + +```erlang +find(StoreOpts) -> + {Time, Result} = timer:tc(fun() -> do_find(StoreOpts) end), + hb_event:increment(<<"store_duration">>, <<"find">>, #{}, Time), + hb_event:increment(<<"store">>, <<"find">>, #{}, 1), + Result. +``` + +### find + +```erlang +find(StoreOpts) -> + do_find(StoreOpts). +``` + +### do_find + +```erlang +do_find(StoreOpts = #{ <<"store-module">> := Mod }) -> + Name = maps:get(<<"name">>, StoreOpts, Mod), + LookupName = {store, Mod, Name}, + case get(LookupName) of + undefined -> + try persistent_term:get(LookupName) of + Instance1 -> + EnsuredInstance = ensure_instance_alive(StoreOpts, Instance1), + put(LookupName, EnsuredInstance), + EnsuredInstance + catch + error:badarg -> spawn_instance(StoreOpts) + end; + InstanceMessage -> + ensure_instance_alive(StoreOpts, InstanceMessage) + end. +``` + +### spawn_instance + +Create a new instance of a store and return its term. + +```erlang +spawn_instance(StoreOpts = #{ <<"store-module">> := Mod }) -> + Name = maps:get(<<"name">>, StoreOpts, Mod), + try Mod:start(StoreOpts) of + ok -> ok; + {ok, InstanceMessage} -> + set(Mod, Name, InstanceMessage), + InstanceMessage; + {error, Reason} -> + ?event(error, {store_start_failed, {Mod, Name, Reason}}), + throw({store_start_failed, {Mod, Name, Reason}}) + catch error:undef -> + ok + end. +``` + +### ensure_instance_alive + +Handle a found instance message. If it contains a PID, we check if it + +```erlang +ensure_instance_alive(StoreOpts, InstanceMessage = #{ <<"pid">> := Pid }) -> + case is_process_alive(Pid) of + true -> InstanceMessage; + false -> spawn_instance(StoreOpts) + end; +``` + +### ensure_instance_alive + +Handle a found instance message. If it contains a PID, we check if it + +```erlang +ensure_instance_alive(_, InstanceMessage) -> + InstanceMessage. +``` + +### start + +Ensure that a store, or list of stores, have all been started. + +```erlang +start(StoreOpts) when not is_list(StoreOpts) -> start([StoreOpts]); +``` + +### start + +Ensure that a store, or list of stores, have all been started. + +```erlang +start([]) -> ok; +``` + +### start + +Ensure that a store, or list of stores, have all been started. + +```erlang +start([StoreOpts | Rest]) -> + find(StoreOpts), + start(Rest). +``` + +### stop + +```erlang +stop(Modules) -> + call_function(Modules, stop, []). +``` + +### filter + +Takes a store object and a filter function or match spec, returning a + +```erlang +filter(Module, Filter) when not is_list(Module) -> + filter([Module], Filter); +``` + +### filter + +Takes a store object and a filter function or match spec, returning a + +```erlang +filter(Modules, Filter) -> + lists:filter( + fun(Store) -> + try Filter(get_store_scope(Store), Store) + catch _:_ -> false + end + end, + Modules + ). +``` + +### scope + +Limit the store scope to only a specific (set of) option(s). + +```erlang +scope(Opts, Scope) when is_map(Opts) -> + case hb_opts:get(store, no_viable_store, Opts) of + no_viable_store -> Opts; + Store when is_list(Store) -> + % Store is already a list, apply scope normally + Opts#{ store => scope(Store, Scope) }; + Store when is_map(Store) -> + % Check if Store already has a nested 'store' key + case maps:find(store, Store) of + {ok, _NestedStores} -> + % Already has nested structure, return as-is + Opts; + error -> + % Single store map, wrap in list before scoping + % This ensures consistent behavior + Opts#{ store => scope([Store], Scope) } + end + end; +``` + +### scope + +Limit the store scope to only a specific (set of) option(s). + +```erlang +scope(Store, Scope) -> + filter( + Store, + fun(StoreScope, _) -> + StoreScope == Scope orelse + (is_list(Scope) andalso lists:member(StoreScope, Scope)) + end + ). +``` + +### get_store_scope + +Ask a store for its own scope. If it doesn't have one, return the + +```erlang +get_store_scope(Store) -> + case call_function(Store, scope, []) of + not_found -> ?DEFAULT_SCOPE; + Scope -> Scope + end. +``` + +### sort + +Order a store by a preference of its scopes. This is useful for making + +```erlang +sort(Stores, PreferenceOrder) when is_list(PreferenceOrder) -> + sort( + Stores, + hb_maps:from_list( + [ + {Scope, -Index} + || + {Scope, Index} <- + lists:zip( + PreferenceOrder, + lists:seq(1, length(PreferenceOrder)) + ) + ] + ) + ); +``` + +### sort + +Order a store by a preference of its scopes. This is useful for making + +```erlang +sort(Stores, ScoreMap) -> + lists:sort( + fun(Store1, Store2) -> + hb_maps:get(get_store_scope(Store1), ScoreMap, 0) > + hb_maps:get(get_store_scope(Store2), ScoreMap, 0) + end, + Stores + ). +``` + +### join + +Join a list of path components together. + +```erlang +join(Path) -> hb_path:to_binary(Path). +%%% The store interface that modules should implement. +``` + +### read + +Read a key from the store. +Write a key with a value to the store. + +```erlang +read(Modules, Key) -> call_function(Modules, read, [Key]). +``` + +### write + +Read a key from the store. +Write a key with a value to the store. +Make a group in the store. A group can be seen as a namespace or + +```erlang +write(Modules, Key, Value) -> call_function(Modules, write, [Key, Value]). +``` + +### make_group + +Read a key from the store. +Write a key with a value to the store. +Make a group in the store. A group can be seen as a namespace or +Make a link from one path to another in the store. + +```erlang +make_group(Modules, Path) -> call_function(Modules, make_group, [Path]). +``` + +### make_link + +Read a key from the store. +Write a key with a value to the store. +Make a group in the store. A group can be seen as a namespace or +Make a link from one path to another in the store. + +```erlang +make_link(Modules, Existing, New) -> + call_function(Modules, make_link, [Existing, New]). +``` + +### reset + +Delete all of the keys in a store. Should be used with extreme +Get the type of element of a given path in the store. This can be + +```erlang +reset(Modules) -> call_function(Modules, reset, []). +``` + +### type + +Delete all of the keys in a store. Should be used with extreme +Get the type of element of a given path in the store. This can be +Create a path from a list of path components. If no store implements + +```erlang +type(Modules, Path) -> call_function(Modules, type, [Path]). +``` + +### path + +Delete all of the keys in a store. Should be used with extreme +Get the type of element of a given path in the store. This can be +Create a path from a list of path components. If no store implements + +```erlang +path(Path) -> join(Path). +``` + +### path + +Delete all of the keys in a store. Should be used with extreme +Get the type of element of a given path in the store. This can be +Create a path from a list of path components. If no store implements +Add two path components together. If no store implements the add_path + +```erlang +path(_, Path) -> path(Path). +``` + +### add_path + +Delete all of the keys in a store. Should be used with extreme +Get the type of element of a given path in the store. This can be +Create a path from a list of path components. If no store implements +Add two path components together. If no store implements the add_path + +```erlang +add_path(Path1, Path2) -> Path1 ++ Path2. +``` + +### add_path + +Delete all of the keys in a store. Should be used with extreme +Get the type of element of a given path in the store. This can be +Create a path from a list of path components. If no store implements +Add two path components together. If no store implements the add_path + +```erlang +add_path(Store, Path1, Path2) -> + case call_function(Store, add_path, [Path1, Path2]) of + not_found -> add_path(Path1, Path2); + Result -> Result + end. +``` + +### resolve + +Follow links through the store to resolve a path to its ultimate target. +List the keys in a group in the store. Use only in debugging. + +```erlang +resolve(Modules, Path) -> call_function(Modules, resolve, [Path]). +``` + +### list + +Follow links through the store to resolve a path to its ultimate target. +List the keys in a group in the store. Use only in debugging. +Match a series of keys and values against the store. Returns + +```erlang +list(Modules, Path) -> call_function(Modules, list, [Path]). +``` + +### match + +Follow links through the store to resolve a path to its ultimate target. +List the keys in a group in the store. Use only in debugging. +Match a series of keys and values against the store. Returns +Call a function on the first store module that succeeds. Returns its + +```erlang +match(Modules, Match) -> call_function(Modules, match, [Match]). +-ifdef(STORE_EVENTS). +``` + +### call_function + +```erlang +call_function(X, Function, Args) -> + {Time, Result} = timer:tc(fun() -> do_call_function(X, Function, Args) end), + ?event(store_events, + {store_call, + {function, Function}, + {args, Args}, + {primary_store, + case X of + [PrimaryStore | _] -> PrimaryStore; + _ -> X + end + }, + {time, Time}, + {result, Result} + } + ), + hb_event:increment(<<"store_duration">>, hb_util:bin(Function), #{}, Time), + hb_event:increment(<<"store">>, hb_util:bin(Function), #{}, 1), + Result. +``` + +### call_function + +```erlang +call_function(X, Function, Args) -> + do_call_function(X, Function, Args). +``` + +### do_call_function + +```erlang +do_call_function(X, _Function, _Args) when not is_list(X) -> + do_call_function([X], _Function, _Args); +``` + +### do_call_function + +```erlang +do_call_function([], _Function, _Args) -> + not_found; +``` + +### do_call_function + +```erlang +do_call_function([Store = #{<<"access">> := Access} | Rest], Function, Args) -> + % If the store has an access controls, check if the function is allowed from + % the stated policies. +``` + +### do_call_function + +```erlang +do_call_function([Store = #{<<"store-module">> := Mod} | Rest], Function, Args) -> + % Attempt to apply the function. If it fails, try the next store. +``` + +### apply_store_function + +Apply a store function, checking if the store returns a retry request or + +```erlang +apply_store_function(Mod, Store, Function, Args) -> + MaxAttempts = maps:get(<<"max-retries">>, Store, ?DEFAULT_RETRIES) + 1, + apply_store_function(Mod, Store, Function, Args, MaxAttempts). +``` + +### apply_store_function + +```erlang +apply_store_function(_Mod, _Store, _Function, _Args, 0) -> + % Too many attempts have already failed. Bail. +``` + +### apply_store_function + +```erlang +apply_store_function(Mod, Store, Function, Args, AttemptsRemaining) -> + try apply(Mod, Function, [Store | Args]) of + retry -> retry(Mod, Store, Function, Args, AttemptsRemaining); + Other -> Other + catch Class:Reason:Stacktrace -> + ?event(store_error, + {store_call_failed_retrying, + {store, Store}, + {function, Function}, + {args, Args}, + {class, Class}, + {reason, Reason}, + {stacktrace, {trace, Stacktrace}} + } + ), + retry(Mod, Store, Function, Args, AttemptsRemaining) + end. +``` + +### retry + +Stop and start the store, then retry. + +```erlang +retry(Mod, Store, Function, Args, AttemptsRemaining) -> + % Attempt to stop the store and start it again, then retry. +``` + +### call_all + +Call a function on all modules in the store. + +```erlang +call_all(X, _Function, _Args) when not is_list(X) -> + call_all([X], _Function, _Args); +``` + +### call_all + +Call a function on all modules in the store. + +```erlang +call_all([], _Function, _Args) -> + ok; +``` + +### call_all + +Call a function on all modules in the store. + +```erlang +call_all([Store = #{<<"store-module">> := Mod} | Rest], Function, Args) -> + try apply_store_function(Mod, Function, Store, Args) + catch + Class:Reason:Stacktrace -> + ?event(warning, {store_call_failed, {Class, Reason, Stacktrace}}), + ok + end, + call_all(Rest, Function, Args). +``` + +### test_stores + +Return a list of stores for testing. Additional individual functions are + +```erlang +test_stores() -> + [ + (hb_test_utils:test_store(hb_store_fs))#{ + <<"benchmark-scale">> => 0.001 + }, + (hb_test_utils:test_store(hb_store_lmdb))#{ + <<"benchmark-scale">> => 0.5 + }, + (hb_test_utils:test_store(hb_store_lru))#{ + <<"persistent-store">> => [ + #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/lru">> + } + ] + } + ] ++ rocks_stores(). +``` + +### rocks_stores + +```erlang +rocks_stores() -> + [ + #{ + <<"store-module">> => hb_store_rocksdb, + <<"name">> => <<"cache-TEST/rocksdb">> + } + ]. +``` + +### rocks_stores + +```erlang +rocks_stores() -> []. +-endif. +``` + +### generate_test_suite + +```erlang +generate_test_suite(Suite) -> + generate_test_suite(Suite, test_stores()). +``` + +### generate_test_suite + +```erlang +generate_test_suite(Suite, Stores) -> + hb:init(), + lists:map( + fun(Store = #{<<"store-module">> := Mod}) -> + {foreach, + fun() -> + hb_store:start(Store) + end, + fun(_) -> + hb_store:reset(Store) + % hb_store:stop(Store) + end, + [ + { + atom_to_list(Mod) ++ ": " ++ Desc, + { + timeout, + 60, + fun() -> + TestResult = Test(Store), + TestResult + end + } + } + || + {Desc, Test} <- Suite + ] + } + end, + Stores + ). +``` + +### simple_path_resolution_test + +Test path resolution dynamics. +Ensure that we can resolve links recursively. + +```erlang +simple_path_resolution_test(Store) -> + ok = hb_store:write(Store, <<"test-file">>, <<"test-data">>), + hb_store:make_link(Store, <<"test-file">>, <<"test-link">>), + ?assertEqual({ok, <<"test-data">>}, hb_store:read(Store, <<"test-link">>)). +``` + +### resursive_path_resolution_test + +Test path resolution dynamics. +Ensure that we can resolve links recursively. +Ensure that we can resolve links through a directory. + +```erlang +resursive_path_resolution_test(Store) -> + hb_store:write(Store, <<"test-file">>, <<"test-data">>), + hb_store:make_link(Store, <<"test-file">>, <<"test-link">>), + hb_store:make_link(Store, <<"test-link">>, <<"test-link2">>), + ?assertEqual({ok, <<"test-data">>}, hb_store:read(Store, <<"test-link2">>)). +``` + +### hierarchical_path_resolution_test + +Test path resolution dynamics. +Ensure that we can resolve links recursively. +Ensure that we can resolve links through a directory. + +```erlang +hierarchical_path_resolution_test(Store) -> + hb_store:make_group(Store, <<"test-dir1">>), + hb_store:write(Store, [<<"test-dir1">>, <<"test-file">>], <<"test-data">>), + hb_store:make_link(Store, [<<"test-dir1">>], <<"test-link">>), + ?assertEqual( + {ok, <<"test-data">>}, + hb_store:read(Store, [<<"test-link">>, <<"test-file">>]) + ). +``` + +### store_suite_test_ + +```erlang +store_suite_test_() -> + generate_test_suite([ + {"simple path resolution", fun simple_path_resolution_test/1}, + {"resursive path resolution", fun resursive_path_resolution_test/1}, + {"hierarchical path resolution", fun hierarchical_path_resolution_test/1} + ]). +``` + +### benchmark_suite_test_ + +```erlang +benchmark_suite_test_() -> + generate_test_suite([ + {"benchmark key read write", fun benchmark_key_read_write/1}, + {"benchmark list", fun benchmark_list/1}, + {"benchmark message read write", fun benchmark_message_read_write/1} + ]). +``` + +### benchmark_key_read_write + +Benchmark a store. By default, we write 10,000 keys and read 10,000 + +```erlang +benchmark_key_read_write(Store = #{ <<"benchmark-scale">> := Scale }) -> + benchmark_key_read_write( + Store, + erlang:ceil(Scale * ?STORE_BENCH_WRITE_OPS), + erlang:ceil(Scale * ?STORE_BENCH_READ_OPS) + ); +``` + +### benchmark_key_read_write + +Benchmark a store. By default, we write 10,000 keys and read 10,000 + +```erlang +benchmark_key_read_write(Store) -> + benchmark_key_read_write(Store, ?STORE_BENCH_WRITE_OPS, ?STORE_BENCH_READ_OPS). +``` + +### benchmark_key_read_write + +```erlang +benchmark_key_read_write(Store, WriteOps, ReadOps) -> + start(Store), + timer:sleep(100), + ?event( + {benchmarking, + {store, Store}, + {write_ops, WriteOps}, + {read_ops, ReadOps} + } + ), + % Generate random data to write and the keys to read ahead of time. +``` + +### benchmark_list + +```erlang +benchmark_list(Store = #{ <<"benchmark-scale">> := Scale }) -> + benchmark_list( + Store, + erlang:ceil(Scale * ?STORE_BENCH_LIST_KEYS), + erlang:ceil(Scale * ?STORE_BENCH_LIST_OPS), + erlang:ceil(Scale * ?STORE_BENCH_LIST_GROUP_SIZE) + ); +``` + +### benchmark_list + +```erlang +benchmark_list(Store) -> + benchmark_list( + Store, + ?STORE_BENCH_LIST_KEYS, + ?STORE_BENCH_LIST_OPS, + ?STORE_BENCH_LIST_GROUP_SIZE + ). +``` + +### benchmark_list + +```erlang +benchmark_list(Store, WriteOps, ListOps, GroupSize) -> + start(Store), + timer:sleep(100), + ?event( + {benchmarking, + {store, Store}, + {keys, hb_util:human_int(WriteOps)}, + {groups, hb_util:human_int(WriteOps div GroupSize)}, + {lists, hb_util:human_int(ListOps)} + } + ), + % Generate a random message to write and the keys to read ahead of time. +``` + +### benchmark_message_read_write + +```erlang +benchmark_message_read_write(Store = #{ <<"benchmark-scale">> := Scale }) -> + benchmark_message_read_write( + Store, + erlang:ceil(Scale * ?BENCH_MSG_WRITE_OPS), + erlang:ceil(Scale * ?BENCH_MSG_READ_OPS) + ); +``` + +### benchmark_message_read_write + +```erlang +benchmark_message_read_write(Store) -> + benchmark_message_read_write(Store, ?BENCH_MSG_WRITE_OPS, ?BENCH_MSG_READ_OPS). +``` + +### benchmark_message_read_write + +```erlang +benchmark_message_read_write(Store, WriteOps, ReadOps) -> + start(Store), + Opts = #{ store => Store, priv_wallet => hb:wallet() }, + TestDataSize = ?BENCH_MSG_DATA_SIZE * 8, % in _bits_ + timer:sleep(100), + ?event( + {benchmarking, + {store, Store}, + {write_ops, WriteOps}, + {read_ops, ReadOps} + } + ), + % Generate a random message to write and the keys to read ahead of time. +``` + +### read_only_access_test + +Test that read-only stores allow read operations but block write operations + +```erlang +read_only_access_test() -> + TestStore = hb_test_utils:test_store(hb_store_fs, <<"access-read-only">>), + ReadOnlyStore = TestStore#{<<"access">> => [<<"read">>]}, + WriteStore = hb_test_utils:test_store(hb_store_fs, <<"access-write">>), + StoreList = [ReadOnlyStore, WriteStore], + TestKey = <<"test-key">>, + TestValue = <<"test-value">>, + start(StoreList), + ?event(testing, {read_only_test_started}), + WriteResponse = write(StoreList, TestKey, TestValue), + ?assertEqual(ok, WriteResponse), + ?event(testing, {write_used_fallback_store, WriteResponse}), + ReadResponse = read(StoreList, TestKey), + ?assertEqual({ok, TestValue}, ReadResponse), + ?event(testing, {read_succeeded, ReadResponse}), + ReadOnlyStoreState = read([ReadOnlyStore], TestKey), + WriteStoreState = read([WriteStore], TestKey), + ?event(testing, { + store_state, {read_only, ReadOnlyStoreState},{ write, WriteStoreState} + }), + ?assertEqual(not_found, ReadOnlyStoreState), + ?assertEqual({ok, TestValue}, WriteStoreState). +``` + +### write_only_access_test + +Test that write-only stores allow write operations but block read operations + +```erlang +write_only_access_test() -> + WriteOnlyStore = + (hb_test_utils:test_store(hb_store_fs, <<"access-write-only">>))#{ + <<"access">> => [<<"write">>] + }, + ReadStore = hb_test_utils:test_store(hb_store_fs, <<"access-read-fallback">>), + StoreList = [WriteOnlyStore, ReadStore], + TestKey = <<"write-test-key">>, + TestValue = <<"write-test-value">>, + start(StoreList), + ?event(testing, {write_only_test_started}), + ?assertEqual(ok, write(StoreList, TestKey, TestValue)), + ?event(testing, {write_succeeded_on_write_only}), + ReadStoreState = read(StoreList, TestKey), + ?assertEqual(not_found, ReadStoreState), + ?event(testing, {read_skipped_write_only_store, ReadStoreState}), + WriteOnlyStoreNoAccess = maps:remove(<<"access">>, WriteOnlyStore), + ReadStoreNoAccess = read([WriteOnlyStoreNoAccess], TestKey), + ?event(testing, {store, ReadStoreNoAccess}), + ?assertEqual({ok, TestValue}, ReadStoreNoAccess). +``` + +### admin_only_access_test + +Test admin-only stores for start/stop/reset operations + +```erlang +admin_only_access_test() -> + AdminOnlyStore = + (hb_test_utils:test_store(hb_store_fs, <<"access-admin-only">>))#{ + <<"access">> => [<<"admin">>, <<"read">>, <<"write">>] + }, + StoreList = [AdminOnlyStore], + TestKey = <<"admin-test-key">>, + TestValue = <<"admin-test-value">>, + start(StoreList), + ?assertEqual(ok, write(StoreList, TestKey, TestValue)), + ?assertEqual({ok, TestValue}, read(StoreList, TestKey)), + reset(StoreList), + ?assertEqual(ok, start(StoreList)), + ?assertEqual(not_found, read(StoreList, TestKey)). +``` + +### multi_access_permissions_test + +Test multiple access permissions + +```erlang +multi_access_permissions_test() -> + ReadWriteStore = + (hb_test_utils:test_store(hb_store_fs, <<"access-read-write">>))#{ + <<"access">> => [<<"read">>, <<"write">>] + }, + AdminStore = + (hb_test_utils:test_store(hb_store_fs, <<"access-admin-fallback">>))#{ + <<"access">> => [<<"admin">>] + }, + StoreList = [ReadWriteStore, AdminStore], + TestKey = <<"multi-access-key">>, + TestValue = <<"multi-access-value">>, + start(StoreList), + ?event(testing, {multi_access_test_started}), + ?assertEqual(ok, write(StoreList, TestKey, TestValue)), + ?event(testing, {write_succeeded_on_read_write_store}), + ?assertEqual({ok, TestValue}, read(StoreList, TestKey)), + ?event(testing, {read_succeeded_on_read_write_store}), + reset(StoreList), + ?assertEqual(ok, start(StoreList)), + ?assertEqual(not_found, read(StoreList, TestKey)). +``` + +### store_access_list_test + +Test access control with a list of stores. + +```erlang +store_access_list_test() -> + % Chain: Read-only -> Write-only -> Unrestricted + ReadOnlyStore = + (hb_test_utils:test_store(hb_store_fs, <<"chain-read-only">>))#{ + <<"access">> => [<<"read">>] + }, + WriteOnlyStore = + (hb_test_utils:test_store(hb_store_fs, <<"chain-write-only">>))#{ + <<"access">> => [<<"write">>] + }, + UnrestrictedStore = + hb_test_utils:test_store(hb_store_fs, <<"chain-unrestricted">>), + StoreChain = [ReadOnlyStore, WriteOnlyStore, UnrestrictedStore], + TestKey = <<"chain-test-key">>, + TestValue = <<"chain-test-value">>, + start(StoreChain), + ?event(testing, {fallback_chain_test_started, length(StoreChain)}), + ?assertEqual(ok, write(StoreChain, TestKey, TestValue)), + ?event(testing, {write_used_second_store_in_chain}), + ?assertEqual(not_found, read(StoreChain, TestKey)), + ?event(testing, {read_fell_through_entire_chain}), + WriteOnlyNoAccess = maps:remove(<<"access">>, WriteOnlyStore), + ?assertEqual({ok, TestValue}, read([WriteOnlyNoAccess], TestKey)). +``` + +### invalid_access_permissions_test + +Test invalid access permissions are ignored + +```erlang +invalid_access_permissions_test() -> + InvalidAccessStore = + (hb_test_utils:test_store(hb_store_fs, <<"access-invalid">>))#{ + <<"access">> => [<<"invalid-policy">>, <<"nonexistent-policy">>] + }, + FallbackStore = hb_test_utils:test_store(hb_store_fs, <<"access-fallback">>), + StoreList = [InvalidAccessStore, FallbackStore], + TestKey = <<"invalid-access-key">>, + TestValue = <<"invalid-access-value">>, + start(StoreList), + ?event(testing, {invalid_access_test_started}), + ?assertEqual(ok, write(StoreList, TestKey, TestValue)), + ?event(testing, {write_used_fallback_store}), + ?assertEqual({ok, TestValue}, read(StoreList, TestKey)), + ?event(testing, {read_used_fallback_store}), + InvalidStoreNoAccess = maps:remove(<<"access">>, InvalidAccessStore), + start([InvalidStoreNoAccess]), + ?assertEqual(not_found, read([InvalidStoreNoAccess], TestKey)). +``` + +### list_access_control_test + +Test list operations with access control + +```erlang +list_access_control_test() -> + ReadOnlyStore = + (hb_test_utils:test_store(hb_store_fs, <<"list-read-only">>))#{ + <<"access">> => [<<"read">>] + }, + WriteStore = hb_test_utils:test_store(hb_store_fs, <<"list-write">>), + StoreList = [ReadOnlyStore, WriteStore], + ListGroup = <<"list-test-group">>, + TestKey = <<"list-test-key">>, + TestValue = <<"list-test-value">>, + start(StoreList), + ?event(testing, {list_access_test_started}), + GroupResult = make_group(StoreList, ListGroup), + ?assertEqual(ok, GroupResult), + ?event(testing, {group_created, GroupResult}), + WriteResponse = write(StoreList, [ListGroup, TestKey], TestValue), + ?assertEqual(ok, WriteResponse), + ListResult = list(StoreList, ListGroup), + ListValue = read(StoreList, [ListGroup, TestKey]), + ?event(testing, {list_result, ListResult, ListValue}), + ?assertEqual({ok,[TestKey]}, ListResult), + ?assertEqual({ok,TestValue}, ListValue). +``` + +### make_link_access_test + +Test make_link operations with write access + +```erlang +make_link_access_test() -> + WriteOnlyStore = + (hb_test_utils:test_store(hb_store_fs, <<"link-write-only">>))#{ + <<"access">> => [<<"write">>,<<"read">>] + }, + FallbackStore = hb_test_utils:test_store(hb_store_fs, <<"link-fallback">>), + StoreList = [WriteOnlyStore, FallbackStore], + SourceKey = <<"link-source">>, + TargetKey = <<"link-target">>, + TestValue = <<"link-test-value">>, + start(StoreList), + ?event(testing, {make_link_access_test_started}), + ?assertEqual(ok, write(StoreList, TargetKey, TestValue)), + LinkResult = make_link(StoreList, TargetKey, SourceKey), + ?event(testing, {make_link_result, LinkResult}), + ReadResult = read(StoreList, SourceKey), + ?event(testing, {read_linked_value, ReadResult}), + ?assertEqual({ok, TestValue}, ReadResult), +``` + +--- + +*Generated from [hb_store.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store.erl)* diff --git a/docs/book/src/hb_store_fs.erl.md b/docs/book/src/hb_store_fs.erl.md new file mode 100644 index 000000000..5f0582311 --- /dev/null +++ b/docs/book/src/hb_store_fs.erl.md @@ -0,0 +1,305 @@ +# hb_store_fs + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_fs.erl) + +A key-value store implementation, following the `hb_store` behavior +and interface. This implementation utilizes the node's local file system as +its storage mechanism, offering an alternative to other store's that require +the compilation of additional libraries in order to function. +As this store implementation operates using Erlang's native `file` and +`filelib` mechanisms, it largely inherits its performance characteristics +from those of the underlying OS/filesystem drivers. Certain filesystems can +be quite performant for the types of workload that HyperBEAM AO-Core execution +requires (many reads and writes to explicit keys, few directory 'listing' or +search operations), awhile others perform suboptimally. +Additionally, thisstore implementation offers the ability for simple +integration of HyperBEAM with other non-volatile storage media: `hb_store_fs` +will interact with any service that implements the host operating system's +native filesystem API. By mounting devices via `FUSE` (etc), HyperBEAM is +able to interact with a large number of existing storage systems (for example, +S3-compatible cloud storage APIs, etc). + +--- + +## Exported Functions + +- `list/2` +- `make_group/2` +- `make_link/3` +- `read/2` +- `reset/1` +- `resolve/2` +- `scope/0` +- `scope/1` +- `start/1` +- `stop/1` +- `type/2` +- `write/3` + +--- + +### start + +A key-value store implementation, following the `hb_store` behavior +Initialize the file system store with the given data directory. + +```erlang +start(#{ <<"name">> := DataDir }) -> + ok = filelib:ensure_dir(DataDir). +``` + +### stop + +Stop the file system store. Currently a no-op. + +```erlang +stop(#{ <<"name">> := _DataDir }) -> + ok. +``` + +### scope + +The file-based store is always local, for now. In the future, we may + +```erlang +scope() -> local. +``` + +### scope + +The file-based store is always local, for now. In the future, we may + +```erlang +scope(#{ <<"scope">> := Scope }) -> Scope; +``` + +### scope + +The file-based store is always local, for now. In the future, we may +Reset the store by completely removing its directory and recreating it. + +```erlang +scope(_) -> scope(). +``` + +### reset + +The file-based store is always local, for now. In the future, we may +Reset the store by completely removing its directory and recreating it. + +```erlang +reset(#{ <<"name">> := DataDir }) -> + % Use pattern that completely removes directory then recreates it + os:cmd(binary_to_list(<< "rm -Rf ", DataDir/binary >>)), + ?event({reset_store, {path, DataDir}}). +``` + +### read + +Read a key from the store, following symlinks as needed. + +```erlang +read(Opts, Key) -> + read(add_prefix(Opts, resolve(Opts, Key))). +``` + +### read + +```erlang +read(Path) -> + ?event({read, Path}), + case file:read_file_info(Path) of + {ok, #file_info{type = regular}} -> + {ok, _} = file:read_file(Path); + _ -> + case file:read_link(Path) of + {ok, Link} -> + ?event({link_found, Path, Link}), + read(Link); + _ -> + not_found + end + end. +``` + +### write + +Write a value to the specified path in the store. + +```erlang +write(Opts, PathComponents, Value) -> + Path = add_prefix(Opts, PathComponents), + ?event({writing, Path, byte_size(Value)}), + filelib:ensure_dir(Path), + ok = file:write_file(Path, Value). +``` + +### list + +List contents of a directory in the store. + +```erlang +list(Opts, Path) -> + case file:list_dir(add_prefix(Opts, Path)) of + {ok, Files} -> {ok, lists:map(fun hb_util:bin/1, Files)}; + {error, _} -> not_found + end. +``` + +### resolve + +Replace links in a path successively, returning the final path. + +```erlang +resolve(Opts, RawPath) -> + Res = resolve(Opts, "", hb_path:term_to_path_parts(hb_store:join(RawPath), Opts)), + ?event({resolved, RawPath, Res}), + Res. +``` + +### resolve + +```erlang +resolve(_, CurrPath, []) -> + hb_store:join(CurrPath); +``` + +### resolve + +```erlang +resolve(Opts, CurrPath, [Next|Rest]) -> + PathPart = hb_store:join([CurrPath, Next]), + ?event( + {resolving, + {accumulated_path, CurrPath}, + {next_segment, Next}, + {generated_partial_path_to_test, PathPart} + } + ), + case file:read_link(add_prefix(Opts, PathPart)) of + {ok, RawLink} -> + Link = remove_prefix(Opts, RawLink), + resolve(Opts, Link, Rest); + {error, enoent} -> + not_found; + _ -> + resolve(Opts, PathPart, Rest) + end. +``` + +### type + +Determine the type of a key in the store. + +```erlang +type(Opts, Key) -> + type(add_prefix(Opts, Key)). +``` + +### type + +```erlang +type(Path) -> + ?event({type, Path}), + case file:read_file_info(Path) of + {ok, #file_info{type = directory}} -> composite; + {ok, #file_info{type = regular}} -> simple; + _ -> + case file:read_link(Path) of + {ok, Link} -> + type(Link); + _ -> + not_found + end + end. +``` + +### make_group + +Create a directory (group) in the store. + +```erlang +make_group(Opts = #{ <<"name">> := _DataDir }, Path) -> + P = add_prefix(Opts, Path), + ?event({making_group, P}), + % We need to ensure that the parent directory exists, so that we can + % make the group. +``` + +### make_link + +Create a symlink, handling the case where the link would point to itself. + +```erlang +make_link(_, Link, Link) -> ok; +``` + +### make_link + +Create a symlink, handling the case where the link would point to itself. + +```erlang +make_link(Opts, Existing, New) -> + ?event({symlink, + add_prefix(Opts, Existing), + P2 = add_prefix(Opts, New)}), + filelib:ensure_dir(P2), + case file:make_symlink(add_prefix(Opts, Existing), N = add_prefix(Opts, New)) of + ok -> ok; + {error, eexist} -> + file:delete(N), + R = file:make_symlink(add_prefix(Opts, Existing), N), + ?event(debug_fs, + {symlink_recreated, + {existing, Existing}, + {new, New}, + {result, R} + } + ), + R + end. +``` + +### add_prefix + +Add the directory prefix to a path. + +```erlang +add_prefix(#{ <<"name">> := Prefix }, Path) -> + ?event({add_prefix, Prefix, Path}), + % Check if the prefix is an absolute path + IsAbsolute = is_binary(Prefix) andalso binary:first(Prefix) =:= $/ orelse + is_list(Prefix) andalso hd(Prefix) =:= $/, + % Join the paths + JoinedPath = hb_store:join([Prefix, Path]), + % If the prefix was absolute, ensure the joined path is also absolute + case IsAbsolute of + true -> + case is_binary(JoinedPath) of + true -> + case binary:first(JoinedPath) of + $/ -> JoinedPath; + _ -> <<"/", JoinedPath/binary>> + end; + false -> + case JoinedPath of + [$/ | _] -> JoinedPath; + _ -> [$/ | JoinedPath] + end + end; + false -> + JoinedPath + end. +``` + +### remove_prefix + +Remove the directory prefix from a path. + +```erlang +remove_prefix(#{ <<"name">> := Prefix }, Path) -> +``` + +--- + +*Generated from [hb_store_fs.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_fs.erl)* diff --git a/docs/book/src/hb_store_gateway.erl.md b/docs/book/src/hb_store_gateway.erl.md new file mode 100644 index 000000000..480ef52a6 --- /dev/null +++ b/docs/book/src/hb_store_gateway.erl.md @@ -0,0 +1,388 @@ +# hb_store_gateway + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_gateway.erl) + +A store module that reads data from the nodes Arweave gateway and +GraphQL routes, additionally including additional store-specific routes. + +--- + +## Exported Functions + +- `list/2` +- `read/2` +- `resolve/2` +- `scope/1` +- `type/2` + +--- + +### scope + +A store module that reads data from the nodes Arweave gateway and +The scope of a GraphQL store is always remote, due to performance. + +```erlang +scope(_) -> remote. +``` + +### resolve + +A store module that reads data from the nodes Arweave gateway and +The scope of a GraphQL store is always remote, due to performance. + +```erlang +resolve(_, Key) -> Key. +``` + +### list + +A store module that reads data from the nodes Arweave gateway and +The scope of a GraphQL store is always remote, due to performance. + +```erlang +list(StoreOpts, Key) -> + ?event(store_gateway, executing_list), + case read(StoreOpts, Key) of + not_found -> not_found; + {ok, Message} -> {ok, hb_maps:keys(Message, StoreOpts)} + end. +``` + +### type + +Get the type of the data at the given key. We potentially cache the + +```erlang +type(StoreOpts, Key) -> + ?event(store_gateway, executing_type), + case read(StoreOpts, Key) of + not_found -> not_found; + {ok, Data} -> + ?event({type, hb_private:reset(hb_message:uncommitted(Data, StoreOpts))}), + IsFlat = lists:all( + fun({_, Value}) -> not is_map(Value) end, + hb_maps:to_list( + hb_private:reset( + hb_message:uncommitted(Data, StoreOpts) + ), + StoreOpts + ) + ), + if + IsFlat -> simple; + true -> composite + end + end. +``` + +### read + +Read the data at the given key from the GraphQL route. Will only attempt + +```erlang +read(BaseStoreOpts, Key) -> + StoreOpts = opts(BaseStoreOpts), + case hb_path:term_to_path_parts(Key, StoreOpts) of + [ID] when ?IS_ID(ID) -> + ?event({read, StoreOpts, Key}), + case hb_gateway_client:read(Key, StoreOpts) of + {error, _} -> + ?event(store_gateway, {read_not_found, {key, ID}}), + not_found; + {ok, Message} -> + ?event(store_gateway, {read_found, {key, ID}}), + try hb_store_remote_node:maybe_cache(StoreOpts, Message) + catch _:_ -> ignored end, + {ok, Message} + end; + _ -> + ?event({ignoring_non_id, Key}), + not_found + end. +``` + +### opts + +Normalize the routes in the given `Opts`. + +```erlang +opts(Opts) -> + case hb_maps:find(<<"node">>, Opts) of + error -> Opts; + {ok, Node} -> + case hb_maps:get(<<"node-type">>, Opts, <<"arweave">>, Opts) of + <<"arweave">> -> + Opts#{ + routes => [ + #{ + % Routes for GraphQL requests to use the remote + % server's GraphQL API. +``` + +### graphql_as_store_test_ + +Store is accessible via the default options. + +```erlang +graphql_as_store_test_() -> + hb_http_server:start_node(#{}), + {timeout, 10, fun() -> + hb_http_server:start_node(#{}), + ?assertMatch( + {ok, #{ <<"app-name">> := <<"aos">> }}, + hb_store:read( + [#{ <<"store-module">> => hb_store_gateway }], + <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">> + ) + ) + end}. +``` + +### graphql_from_cache_test + +Stored messages are accessible via `hb_cache` accesses. + +```erlang +graphql_from_cache_test() -> + hb_http_server:start_node(#{}), + Opts = + #{ + store => + [ + #{ + <<"store-module">> => hb_store_gateway + } + ] + }, + ?assertMatch( + {ok, #{ <<"app-name">> := <<"aos">> }}, + hb_cache:read( + <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, + Opts + ) + ). +``` + +### manual_local_cache_test + +```erlang +manual_local_cache_test() -> + hb_http_server:start_node(#{}), + Local = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/gw-local-cache">> + }, + hb_store:reset(Local), + Gateway = #{ + <<"store-module">> => hb_store_gateway, + <<"local-store">> => Local + }, + {ok, FromRemote} = + hb_cache:read( + <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, + #{ store => [Gateway] } + ), + ?event({writing_recvd_to_local, FromRemote}), + {ok, _} = hb_cache:write(FromRemote, #{ store => [Local] }), + {ok, Read} = + hb_cache:read( + <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, + #{ store => [Local] } + ), + ?event({read_from_local, Read}), + ?assert(hb_message:match(Read, FromRemote)). +``` + +### cache_read_message_test + +Ensure that saving to the gateway store works. + +```erlang +cache_read_message_test() -> + hb_http_server:start_node(#{}), + Local = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST/1">> + }, + hb_store:reset(Local), + WriteOpts = #{ + store => + [ + #{ <<"store-module">> => hb_store_gateway, + <<"local-store">> => [Local] + } + ] + }, + {ok, Written} = + hb_cache:read( + <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, + WriteOpts + ), + {ok, Read} = + hb_cache:read( + <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, + #{ store => [Local] } + ), + ?assert(hb_message:match(Read, Written)). +``` + +### specific_route_test + +Routes can be specified in the options, overriding the default routes. + +```erlang +specific_route_test() -> + hb_http_server:start_node(#{}), + Opts = #{ + store => + [ + #{ <<"store-module">> => hb_store_gateway, + <<"routes">> => [], + <<"only">> => local + } + ] + }, + ?assertMatch( + not_found, + hb_cache:read( + <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, + Opts + ) + ). +``` + +### external_http_access_test + +Test that the default node config allows for data to be accessed. + +```erlang +external_http_access_test() -> + Node = hb_http_server:start_node( + #{ + cache_control => <<"cache">>, + store => + [ + #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + }, + #{ <<"store-module">> => hb_store_gateway } + ] + } + ), + ?assertMatch( + {ok, #{ <<"data-protocol">> := <<"ao">> }}, + hb_http:get( + Node, + <<"p45HPD-ENkLS7Ykqrx6p_DYGbmeHDeeF8LJ09N2K53g">>, + #{} + ) + ). +``` + +### store_opts_test + +Test to verify store opts is being set for Data-Protocol ao +Test that items retreived from the gateway store are verifiable. + +```erlang +store_opts_test() -> + Opts = #{ + cache_control => <<"cache">>, + store => + [ + #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-TEST">> + }, + #{ + <<"store-module">> => hb_store_gateway, + <<"local-store">> => false, + <<"subindex">> => [ + #{ + <<"name">> => <<"Data-Protocol">>, + <<"value">> => <<"ao">> + } + ] + } + ] + }, + Node = hb_http_server:start_node(Opts), + {ok, Res} = + hb_http:get( + Node, + <<"myb2p8_TSM0KSgBMoG-nu6TLuqWwPmdZM5V2QSUeNmM">>, + #{} + ), + ?event(debug_gateway, {res, Res}), + ?assertEqual(<<"Hello World">>, hb_ao:get(<<"data">>, Res)). +``` + +### verifiability_test + +Test to verify store opts is being set for Data-Protocol ao +Test that items retreived from the gateway store are verifiable. + +```erlang +verifiability_test() -> + hb_http_server:start_node(#{}), + {ok, Message} = + hb_cache:read( + <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, + #{ + store => + [ + #{ + <<"store-module">> => hb_store_gateway + } + ] + } + ), + % Ensure that the message is verifiable after being converted to + % httpsig@1.0 and back to structured@1.0. +``` + +### remote_hyperbeam_node_ans104_test + +Test that another HyperBEAM node offering the `~query@1.0` device can + +```erlang +remote_hyperbeam_node_ans104_test() -> + ServerOpts = + #{ + priv_wallet => ar_wallet:new(), + store => hb_test_utils:test_store() + }, + Server = hb_http_server:start_node(ServerOpts), + Msg = + hb_message:commit( + #{ + <<"hello">> => <<"world">> + }, + ServerOpts, + #{ <<"commitment-device">> => <<"ans104@1.0">> } + ), + {ok, ID} = hb_cache:write(Msg, ServerOpts), + {ok, ReadMsg} = hb_cache:read(ID, ServerOpts), + ?assert(hb_message:verify(ReadMsg)), + LocalStore = hb_test_utils:test_store(), + ClientOpts = + #{ + store => + [ + #{ + <<"store-module">> => hb_store_gateway, + <<"node">> => Server, + <<"node-type">> => <<"ao">>, + <<"local-store">> => [LocalStore] + } + ] + }, + {ok, Msg2} = hb_cache:read(ID, ClientOpts), + ?assert(hb_message:verify(Msg2)), +``` + +--- + +*Generated from [hb_store_gateway.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_gateway.erl)* diff --git a/docs/book/src/hb_store_lmdb.erl.md b/docs/book/src/hb_store_lmdb.erl.md new file mode 100644 index 000000000..8b287cb5b --- /dev/null +++ b/docs/book/src/hb_store_lmdb.erl.md @@ -0,0 +1,1206 @@ +# hb_store_lmdb + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_lmdb.erl) + +## Exported Functions + +- `add_path/3` +- `list/2` +- `make_group/2` +- `make_link/3` +- `match/2` +- `path/2` +- `read/2` +- `reset/1` +- `resolve/2` +- `scope/0` +- `scope/1` +- `start/1` +- `stop/1` +- `type/2` +- `write/3` + +--- + +### start + +An LMDB (Lightning Memory Database) implementation of the HyperBeam store interface. +Start the LMDB storage system for a given database configuration. + +```erlang +start(Opts = #{ <<"name">> := DataDir }) -> + % Ensure the directory exists before opening LMDB environment + DataDirPath = hb_util:list(DataDir), + ok = filelib:ensure_dir(filename:join(DataDirPath, "dummy")), + % Create the LMDB environment with specified size limit + {ok, Env} = + elmdb:env_open( + DataDirPath, + [ + {map_size, maps:get(<<"capacity">>, Opts, ?DEFAULT_SIZE)}, + no_mem_init, no_sync + ] + ), + {ok, DBInstance} = elmdb:db_open(Env, [create]), + % Store both environment and DB instance in persistent_term for later cleanup + StoreKey = {lmdb, ?MODULE, DataDir}, + persistent_term:put(StoreKey, {Env, DBInstance, DataDir}), + {ok, #{ <<"env">> => Env, <<"db">> => DBInstance }}; +``` + +### start + +An LMDB (Lightning Memory Database) implementation of the HyperBeam store interface. +Start the LMDB storage system for a given database configuration. +Determine whether a key represents a simple value or composite group. +Write a key-value pair to the database asynchronously. +Read a value from the database by key, with automatic link resolution. +Helper function to check if a value is a link and extract the target. + +```erlang +-spec read(map(), binary() | list()) -> {ok, binary()} | {error, term()}. +read(Opts, PathParts) when is_list(PathParts) -> + read(Opts, to_path(PathParts)); +read(Opts, Path) -> + % Try direct read first (fast path for non-link paths) + case read_with_links(Opts, Path) of + {ok, Value} -> + {ok, Value}; + not_found -> + try + PathParts = binary:split(Path, <<"/">>, [global]), + case resolve_path_links(Opts, PathParts) of + {ok, ResolvedPathParts} -> + ResolvedPathBin = to_path(ResolvedPathParts), + read_with_links(Opts, ResolvedPathBin); + {error, _} -> + not_found + end + catch + Class:Reason:Stacktrace -> + ?event(error, + { + resolve_path_links_failed, + {class, Class}, + {reason, Reason}, + {stacktrace, Stacktrace}, + {path, Path} + } + ), + % If link resolution fails, return not_found + not_found + end + end. +``` + +```erlang +start(_) -> + {error, {badarg, <<"StoreOpts must be a map">>}}. +%% +%% +%% +%% +%% +%% +%% +%% +%% +%% +%% +``` + +### is_link + +An LMDB (Lightning Memory Database) implementation of the HyperBeam store interface. +Start the LMDB storage system for a given database configuration. +Determine whether a key represents a simple value or composite group. +Write a key-value pair to the database asynchronously. +Read a value from the database by key, with automatic link resolution. +Helper function to check if a value is a link and extract the target. + +```erlang +-spec read(map(), binary() | list()) -> {ok, binary()} | {error, term()}. +read(Opts, PathParts) when is_list(PathParts) -> + read(Opts, to_path(PathParts)); +read(Opts, Path) -> + % Try direct read first (fast path for non-link paths) + case read_with_links(Opts, Path) of + {ok, Value} -> + {ok, Value}; + not_found -> + try + PathParts = binary:split(Path, <<"/">>, [global]), + case resolve_path_links(Opts, PathParts) of + {ok, ResolvedPathParts} -> + ResolvedPathBin = to_path(ResolvedPathParts), + read_with_links(Opts, ResolvedPathBin); + {error, _} -> + not_found + end + catch + Class:Reason:Stacktrace -> + ?event(error, + { + resolve_path_links_failed, + {class, Class}, + {reason, Reason}, + {stacktrace, Stacktrace}, + {path, Path} + } + ), + % If link resolution fails, return not_found + not_found + end + end. +``` + +```erlang +is_link(Value) -> + LinkPrefixSize = byte_size(<<"link:">>), + case byte_size(Value) > LinkPrefixSize andalso + binary:part(Value, 0, LinkPrefixSize) =:= <<"link:">> of + true -> + Link = + binary:part( + Value, + LinkPrefixSize, + byte_size(Value) - LinkPrefixSize + ), + {true, Link}; + false -> + false + end. +``` + +### to_path + +Helper function to convert to a path +Unified read function that handles LMDB reads with fallback to the + +```erlang +to_path(PathParts) -> + hb_util:bin(lists:join(<<"/">>, PathParts)). +``` + +### read_direct + +Helper function to convert to a path +Unified read function that handles LMDB reads with fallback to the + +```erlang +read_direct(Opts, Path) -> + #{ <<"db">> := DBInstance } = find_env(Opts), + case elmdb:get(DBInstance, Path) of + {ok, Value} -> {ok, Value}; + {error, not_found} -> not_found; % Normalize error format + not_found -> not_found % Handle both old and new format + end. +``` + +### read_with_links + +Read a value directly from the database with link resolution. + +```erlang +read_with_links(Opts, Path) -> + case read_direct(Opts, Path) of + {ok, Value} -> + % Check if this value is actually a link to another key + case is_link(Value) of + {true, Link} -> + % Extract the target key and recursively resolve the link + read_with_links(Opts, Link); + false -> + % Check if this is a group marker - groups should not be + % readable as simple values + case Value of + <<"group">> -> not_found; + _ -> {ok, Value} + end + end; + not_found -> + not_found + end. +``` + +### resolve_path_links + +Resolve links in a path, checking each segment except the last. + +```erlang +resolve_path_links(Opts, Path) -> + resolve_path_links(Opts, Path, 0). +``` + +### resolve_path_links + +```erlang +resolve_path_links(_Opts, _Path, Depth) when Depth > ?MAX_REDIRECTS -> + % Prevent infinite loops with depth limit + {error, too_many_redirects}; +``` + +### resolve_path_links + +```erlang +resolve_path_links(_Opts, [LastSegment], _Depth) -> + % Base case: only one segment left, no link resolution needed + {ok, [LastSegment]}; +``` + +### resolve_path_links + +```erlang +resolve_path_links(Opts, Path, Depth) -> + resolve_path_links_acc(Opts, Path, [], Depth). +``` + +### resolve_path_links_acc + +```erlang +resolve_path_links_acc(_Opts, [], AccPath, _Depth) -> + % No more segments to process + {ok, lists:reverse(AccPath)}; +``` + +### resolve_path_links_acc + +```erlang +resolve_path_links_acc(_, FullPath = [<<"data">>|_], [], _Depth) -> + {ok, FullPath}; +``` + +### resolve_path_links_acc + +```erlang +resolve_path_links_acc(Opts, [Head | Tail], AccPath, Depth) -> + % Build the accumulated path so far + CurrentPath = lists:reverse([Head | AccPath]), + CurrentPathBin = to_path(CurrentPath), + % Check if the accumulated path (not just the segment) is a link + case read_direct(Opts, CurrentPathBin) of + {ok, Value} -> + case is_link(Value) of + {true, Link} -> + % The accumulated path is a link! Resolve it + LinkSegments = binary:split(Link, <<"/">>, [global]), + % Replace the accumulated path with the link target and + % continue with remaining segments + NewPath = LinkSegments ++ Tail, + resolve_path_links(Opts, NewPath, Depth + 1); + false -> + % Not a link, continue accumulating + resolve_path_links_acc(Opts, Tail, [Head | AccPath], Depth) + end; + not_found -> + % Path doesn't exist as a complete link, continue accumulating + resolve_path_links_acc(Opts, Tail, [Head | AccPath], Depth) + end. +``` + +### match + +Match a series of keys and values against the database. Returns + +```erlang +-spec list(map(), binary()) -> {ok, [binary()]} | {error, term()}. +list(Opts, Path) -> + % Check if Path is a link and resolve it if necessary + ResolvedPath = + case read_direct(Opts, Path) of + {ok, Value} -> + case is_link(Value) of + {true, Link} -> + Link; + false -> + % Not a link; use original path + Path + end; + not_found -> + Path + end, + % Ensure path ends with / for elmdb:list API + SearchPath = + case ResolvedPath of + <<>> -> <<>>; % Root path + <<"/">> -> <<>>; % Root path variant + _ -> + case binary:last(ResolvedPath) of + $/ -> ResolvedPath; + _ -> <> + end + end, + % Use native elmdb:list function + #{ <<"db">> := DBInstance } = find_env(Opts), + case elmdb:list(DBInstance, SearchPath) of + {ok, Children} -> {ok, Children}; + {error, not_found} -> {ok, []}; % Normalize new error format + not_found -> {ok, []} % Handle both old and new format + end. +``` + +```erlang +match(Opts, MatchMap) when is_map(MatchMap) -> + match(Opts, maps:to_list(MatchMap)); +``` + +### match + +Match a series of keys and values against the database. Returns + +```erlang +-spec list(map(), binary()) -> {ok, [binary()]} | {error, term()}. +list(Opts, Path) -> + % Check if Path is a link and resolve it if necessary + ResolvedPath = + case read_direct(Opts, Path) of + {ok, Value} -> + case is_link(Value) of + {true, Link} -> + Link; + false -> + % Not a link; use original path + Path + end; + not_found -> + Path + end, + % Ensure path ends with / for elmdb:list API + SearchPath = + case ResolvedPath of + <<>> -> <<>>; % Root path + <<"/">> -> <<>>; % Root path variant + _ -> + case binary:last(ResolvedPath) of + $/ -> ResolvedPath; + _ -> <> + end + end, + % Use native elmdb:list function + #{ <<"db">> := DBInstance } = find_env(Opts), + case elmdb:list(DBInstance, SearchPath) of + {ok, Children} -> {ok, Children}; + {error, not_found} -> {ok, []}; % Normalize new error format + not_found -> {ok, []} % Handle both old and new format + end. +``` + +```erlang +match(Opts, MatchKVs) -> + #{ <<"db">> := DBInstance } = find_env(Opts), + WithPrefixes = + lists:map( + fun({Key, Path}) -> + {Key, <<"link:", Path/binary>>} + end, + MatchKVs + ), + ?event({elmdb_match, MatchKVs}), + case elmdb:match(DBInstance, WithPrefixes) of + {ok, Matches} -> + ?event({elmdb_matched, Matches}), + {ok, Matches}; + {error, not_found} -> not_found; + not_found -> not_found + end. +``` + +### create_parent_groups + +Helper function to recursively create parent groups. + +```erlang +-spec ensure_parent_groups(map(), binary()) -> ok. +ensure_parent_groups(Opts, Path) -> + PathParts = binary:split(Path, <<"/">>, [global]), + case PathParts of + [_] -> + % Single segment, no parents to create + ok; + _ -> + % Multiple segments, create parent groups + ParentParts = lists:droplast(PathParts), + create_parent_groups(Opts, [], ParentParts) + end. +``` + +```erlang +create_parent_groups(_Opts, _Current, []) -> + ok; +``` + +### create_parent_groups + +Helper function to recursively create parent groups. + +```erlang +-spec ensure_parent_groups(map(), binary()) -> ok. +ensure_parent_groups(Opts, Path) -> + PathParts = binary:split(Path, <<"/">>, [global]), + case PathParts of + [_] -> + % Single segment, no parents to create + ok; + _ -> + % Multiple segments, create parent groups + ParentParts = lists:droplast(PathParts), + create_parent_groups(Opts, [], ParentParts) + end. +``` + +```erlang +create_parent_groups(Opts, Current, [Next | Rest]) -> + NewCurrent = Current ++ [Next], + GroupPath = to_path(NewCurrent), + % Only create group if it doesn't already exist. +``` + +### path + +Transform a path into the store's canonical form. + +```erlang +-spec make_link(map(), binary() | list(), binary()) -> ok. +make_link(Opts, Existing, New) when is_list(Existing) -> + ExistingBin = to_path(Existing), + make_link(Opts, ExistingBin, New); +make_link(Opts, Existing, New) -> + ExistingBin = hb_util:bin(Existing), + % Ensure parent groups exist for the new link path (like filesystem ensure_dir) + ensure_parent_groups(Opts, New), + write(Opts, New, <<"link:", ExistingBin/binary>>). +``` + +```erlang +path(_Opts, PathParts) when is_list(PathParts) -> + to_path(PathParts); +``` + +### path + +Transform a path into the store's canonical form. + +```erlang +-spec make_link(map(), binary() | list(), binary()) -> ok. +make_link(Opts, Existing, New) when is_list(Existing) -> + ExistingBin = to_path(Existing), + make_link(Opts, ExistingBin, New); +make_link(Opts, Existing, New) -> + ExistingBin = hb_util:bin(Existing), + % Ensure parent groups exist for the new link path (like filesystem ensure_dir) + ensure_parent_groups(Opts, New), + write(Opts, New, <<"link:", ExistingBin/binary>>). +``` + +```erlang +path(_Opts, Path) when is_binary(Path) -> + Path. +``` + +### add_path + +Add two path components together. + +```erlang +add_path(_Opts, Path1, Path2) when is_list(Path1), is_list(Path2) -> + Path1 ++ Path2; +``` + +### add_path + +Add two path components together. + +```erlang +add_path(Opts, Path1, Path2) when is_binary(Path1), is_binary(Path2) -> + % Convert binaries to lists, concatenate, then convert back + Parts1 = binary:split(Path1, <<"/">>, [global]), + Parts2 = binary:split(Path2, <<"/">>, [global]), + path(Opts, Parts1 ++ Parts2); +``` + +### add_path + +Add two path components together. + +```erlang +add_path(Opts, Path1, Path2) when is_list(Path1), is_binary(Path2) -> + Parts2 = binary:split(Path2, <<"/">>, [global]), + path(Opts, Path1 ++ Parts2); +``` + +### add_path + +Add two path components together. + +```erlang +add_path(Opts, Path1, Path2) when is_binary(Path1), is_list(Path2) -> + Parts1 = binary:split(Path1, <<"/">>, [global]), + path(Opts, Parts1 ++ Path2). +``` + +### find_env + +Retrieve or create the LMDB environment handle for a database. + +```erlang +-spec resolve(map(), binary() | list()) -> binary(). +resolve(Opts, Path) when is_binary(Path) -> + resolve(Opts, binary:split(Path, <<"/">>, [global])); +resolve(Opts, PathParts) when is_list(PathParts) -> + % Handle list paths by resolving directly and converting to binary + case resolve_path_links(Opts, PathParts) of + {ok, ResolvedParts} -> + to_path(ResolvedParts); + {error, _} -> + % If resolution fails, return original path as binary + to_path(PathParts) + end; +resolve(_,_) -> not_found. +``` + +```erlang +find_env(Opts) -> hb_store:find(Opts). +%% Shutdown LMDB environment and cleanup resources +``` + +### stop + +Retrieve or create the LMDB environment handle for a database. + +```erlang +-spec resolve(map(), binary() | list()) -> binary(). +resolve(Opts, Path) when is_binary(Path) -> + resolve(Opts, binary:split(Path, <<"/">>, [global])); +resolve(Opts, PathParts) when is_list(PathParts) -> + % Handle list paths by resolving directly and converting to binary + case resolve_path_links(Opts, PathParts) of + {ok, ResolvedParts} -> + to_path(ResolvedParts); + {error, _} -> + % If resolution fails, return original path as binary + to_path(PathParts) + end; +resolve(_,_) -> not_found. +``` + +```erlang +stop(#{ <<"store-module">> := ?MODULE, <<"name">> := DataDir }) -> + StoreKey = {lmdb, ?MODULE, DataDir}, + close_environment(StoreKey, DataDir); +``` + +### stop + +Retrieve or create the LMDB environment handle for a database. + +```erlang +-spec resolve(map(), binary() | list()) -> binary(). +resolve(Opts, Path) when is_binary(Path) -> + resolve(Opts, binary:split(Path, <<"/">>, [global])); +resolve(Opts, PathParts) when is_list(PathParts) -> + % Handle list paths by resolving directly and converting to binary + case resolve_path_links(Opts, PathParts) of + {ok, ResolvedParts} -> + to_path(ResolvedParts); + {error, _} -> + % If resolution fails, return original path as binary + to_path(PathParts) + end; +resolve(_,_) -> not_found. +``` + +```erlang +stop(_InvalidStoreOpts) -> + ok. +``` + +### close_environment + +```erlang +close_environment(StoreKey, DataDir) -> + case safe_get_persistent_term(StoreKey) of + {ok, {Env, DBInstance}} -> + close_and_cleanup(Env, DBInstance, StoreKey, DataDir); + not_found -> + ?event({lmdb_stop_not_found_in_persistent_term, DataDir}), + safe_close_by_name(DataDir) + end, + ok. +``` + +### safe_get_persistent_term + +```erlang +safe_get_persistent_term(Key) -> + case persistent_term:get(Key, undefined) of + {Env, DBInstance, _DataDir} -> {ok, {Env, DBInstance}}; + {Env, _DataDir} -> {ok, {Env, undefined}}; % Backwards compatibility + _ -> not_found + end. +``` + +### close_and_cleanup + +```erlang +close_and_cleanup(Env, DBInstance, StoreKey, DataDir) -> + % Close DB instance first if it exists + DBCloseResult = safe_close_db(DBInstance), + ?event({db_close_result, DBCloseResult}), + % Then close the environment + EnvCloseResult = safe_close_env(Env), + persistent_term:erase(StoreKey), + case EnvCloseResult of + ok -> ?event({lmdb_stop_success, DataDir}); + {error, Reason} -> ?event({lmdb_stop_error, Reason}) + end. +``` + +### safe_close_db + +```erlang +safe_close_db(undefined) -> + ok; % No DB instance to close +``` + +### safe_close_db + +```erlang +safe_close_db(DBInstance) -> + try + elmdb:db_close(DBInstance) + catch + error:Reason -> {error, Reason} + end. +``` + +### safe_close_env + +```erlang +safe_close_env(Env) -> + try + elmdb:env_close(Env) + catch + error:Reason -> {error, Reason} + end. +``` + +### safe_close_by_name + +```erlang +safe_close_by_name(DataDir) -> + try + elmdb:env_close_by_name(binary_to_list(DataDir)) + catch + error:_ -> ok + end. +``` + +### reset + +Completely delete the database directory and all its contents. + +```erlang +reset(Opts) -> + case maps:get(<<"name">>, Opts, undefined) of + undefined -> + % No prefix specified, nothing to reset + ok; + DataDir -> + % Stop the store and remove the database. +``` + +### basic_test + +Test suite demonstrating basic store operations. +Basic store test - verifies fundamental read/write functionality. + +```erlang +basic_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store-1">> + }, + reset(StoreOpts), + Res = write(StoreOpts, <<"Hello">>, <<"World2">>), + ?assertEqual(ok, Res), + {ok, Value} = read(StoreOpts, <<"Hello">>), + ?assertEqual(Value, <<"World2">>), + ok = stop(StoreOpts). +``` + +### list_test + +List test - verifies prefix-based key listing functionality. + +```erlang +list_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store-2">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + ?assertEqual(list(StoreOpts, <<"colors">>), {ok, []}), + % Create immediate children under colors/ + write(StoreOpts, <<"colors/red">>, <<"1">>), + write(StoreOpts, <<"colors/blue">>, <<"2">>), + write(StoreOpts, <<"colors/green">>, <<"3">>), + % Create nested directories under colors/ - these should show up as immediate children + write(StoreOpts, <<"colors/multi/foo">>, <<"4">>), + write(StoreOpts, <<"colors/multi/bar">>, <<"5">>), + write(StoreOpts, <<"colors/primary/red">>, <<"6">>), + write(StoreOpts, <<"colors/primary/blue">>, <<"7">>), + write(StoreOpts, <<"colors/nested/deep/value">>, <<"8">>), + % Create other top-level directories + write(StoreOpts, <<"foo/bar">>, <<"baz">>), + write(StoreOpts, <<"beep/boop">>, <<"bam">>), + read(StoreOpts, <<"colors">>), + % Test listing colors/ - should return immediate children only + {ok, ListResult} = list(StoreOpts, <<"colors">>), + ?event({list_result, ListResult}), + % Expected: red, blue, green (files) + multi, primary, nested (directories) + % Should NOT include deeply nested items like foo, bar, deep, value + ExpectedChildren = [<<"blue">>, <<"green">>, <<"multi">>, <<"nested">>, <<"primary">>, <<"red">>], + ?assert(lists:all(fun(Key) -> lists:member(Key, ExpectedChildren) end, ListResult)), + % Test listing a nested directory - should only show immediate children + {ok, NestedListResult} = list(StoreOpts, <<"colors/multi">>), + ?event({nested_list_result, NestedListResult}), + ExpectedNestedChildren = [<<"bar">>, <<"foo">>], + ?assert(lists:all(fun(Key) -> lists:member(Key, ExpectedNestedChildren) end, NestedListResult)), + % Test listing a deeper nested directory + {ok, DeepListResult} = list(StoreOpts, <<"colors/nested">>), + ?event({deep_list_result, DeepListResult}), + ExpectedDeepChildren = [<<"deep">>], + ?assert(lists:all(fun(Key) -> lists:member(Key, ExpectedDeepChildren) end, DeepListResult)), + ok = stop(StoreOpts). +``` + +### group_test + +Group test - verifies group creation and type detection. +Link test - verifies symbolic link creation and resolution. + +```erlang +group_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store3">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + make_group(StoreOpts, <<"colors">>), + % Groups should be detected as composite types + ?assertEqual(composite, type(StoreOpts, <<"colors">>)), + % Groups should not be readable directly (like directories in filesystem) + ?assertEqual(not_found, read(StoreOpts, <<"colors">>)). +%% +``` + +### link_test + +Group test - verifies group creation and type detection. +Link test - verifies symbolic link creation and resolution. + +```erlang +link_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store3">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + write(StoreOpts, <<"foo/bar/baz">>, <<"Bam">>), + make_link(StoreOpts, <<"foo/bar/baz">>, <<"foo/beep/baz">>), + {ok, Result} = read(StoreOpts, <<"foo/beep/baz">>), + ?event({ result, Result}), + ?assertEqual(<<"Bam">>, Result). +``` + +### link_fragment_test + +Group test - verifies group creation and type detection. +Link test - verifies symbolic link creation and resolution. +Type test - verifies type detection for both simple and composite entries. + +```erlang +link_fragment_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store3">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + write(StoreOpts, [<<"data">>, <<"bar">>, <<"baz">>], <<"Bam">>), + make_link(StoreOpts, [<<"data">>, <<"bar">>], <<"my-link">>), + {ok, Result} = read(StoreOpts, [<<"my-link">>, <<"baz">>]), + ?event({ result, Result}), + ?assertEqual(<<"Bam">>, Result). +%% +``` + +### type_test + +Group test - verifies group creation and type detection. +Link test - verifies symbolic link creation and resolution. +Type test - verifies type detection for both simple and composite entries. + +```erlang +type_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store-6">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + make_group(StoreOpts, <<"assets">>), + Type = type(StoreOpts, <<"assets">>), + ?event({type, Type}), + ?assertEqual(composite, Type), + write(StoreOpts, <<"assets/1">>, <<"bam">>), + Type2 = type(StoreOpts, <<"assets/1">>), + ?event({type2, Type2}), + ?assertEqual(simple, Type2). +``` + +### link_key_list_test + +Link key list test - verifies symbolic link creation using structured key paths. +Path traversal link test - verifies link resolution during path traversal. + +```erlang +link_key_list_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store-7">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + write(StoreOpts, [ <<"parent">>, <<"key">> ], <<"value">>), + make_link(StoreOpts, [ <<"parent">>, <<"key">> ], <<"my-link">>), + {ok, Result} = read(StoreOpts, <<"my-link">>), + ?event({result, Result}), + ?assertEqual(<<"value">>, Result). +%% +%% +``` + +### path_traversal_link_test + +Link key list test - verifies symbolic link creation using structured key paths. +Path traversal link test - verifies link resolution during path traversal. + +```erlang +path_traversal_link_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store-8">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + % Create the actual data at group/key + write(StoreOpts, [<<"group">>, <<"key">>], <<"target-value">>), + % Create a link from "link" to "group" + make_link(StoreOpts, <<"group">>, <<"link">>), + % Reading via the link path should resolve to the target value + {ok, Result} = read(StoreOpts, [<<"link">>, <<"key">>]), + ?event({path_traversal_result, Result}), + ?assertEqual(<<"target-value">>, Result), + ok = stop(StoreOpts). +``` + +### exact_hb_store_test + +Test that matches the exact hb_store hierarchical test pattern + +```erlang +exact_hb_store_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store-exact">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + % Follow exact same pattern as hb_store test + ?event(step1_make_group), + make_group(StoreOpts, <<"test-dir1">>), + ?event(step2_write_file), + write(StoreOpts, [<<"test-dir1">>, <<"test-file">>], <<"test-data">>), + ?event(step3_make_link), + make_link(StoreOpts, [<<"test-dir1">>], <<"test-link">>), + % Debug: test that the link behaves like the target (groups are unreadable) + ?event(step4_check_link), + LinkResult = read(StoreOpts, <<"test-link">>), + ?event({link_result, LinkResult}), + % Since test-dir1 is a group and groups are unreadable, the link should also be unreadable + ?assertEqual(not_found, LinkResult), + % Debug: test intermediate steps + ?event(step5_test_direct_read), + DirectResult = read(StoreOpts, <<"test-dir1/test-file">>), + ?event({direct_result, DirectResult}), + % This should work: reading via the link path + ?event(step6_test_link_read), + Result = read(StoreOpts, [<<"test-link">>, <<"test-file">>]), + ?event({final_result, Result}), + ?assertEqual({ok, <<"test-data">>}, Result), + ok = stop(StoreOpts). +``` + +### cache_style_test + +Test cache-style usage through hb_store interface + +```erlang +cache_style_test() -> + hb:init(), + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store-cache-style">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + % Start the store + hb_store:start(StoreOpts), + % Test writing through hb_store interface + ok = hb_store:write(StoreOpts, <<"test-key">>, <<"test-value">>), + % Test reading through hb_store interface + Result = hb_store:read(StoreOpts, <<"test-key">>), + ?event({cache_style_read_result, Result}), + ?assertEqual({ok, <<"test-value">>}, Result), + hb_store:stop(StoreOpts). +``` + +### nested_map_cache_test + +Test nested map storage with cache-like linking behavior + +```erlang +nested_map_cache_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store-nested-cache">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + % Clean up any previous test data + reset(StoreOpts), + % Original nested map structure + OriginalMap = #{ + <<"target">> => <<"Foo">>, + <<"commitments">> => #{ + <<"key1">> => #{ + <<"alg">> => <<"rsa-pss-512">>, + <<"committer">> => <<"unique-id">> + }, + <<"key2">> => #{ + <<"alg">> => <<"hmac">>, + <<"commiter">> => <<"unique-id-2">> + } + }, + <<"other-key">> => #{ + <<"other-key-key">> => <<"other-key-value">> + } + }, + ?event({original_map, OriginalMap}), + % Step 1: Store each leaf value at data/{hash} + TargetValue = <<"Foo">>, + TargetHash = base64:encode(crypto:hash(sha256, TargetValue)), + write(StoreOpts, <<"data/", TargetHash/binary>>, TargetValue), + AlgValue1 = <<"rsa-pss-512">>, + AlgHash1 = base64:encode(crypto:hash(sha256, AlgValue1)), + write(StoreOpts, <<"data/", AlgHash1/binary>>, AlgValue1), + CommitterValue1 = <<"unique-id">>, + CommitterHash1 = base64:encode(crypto:hash(sha256, CommitterValue1)), + write(StoreOpts, <<"data/", CommitterHash1/binary>>, CommitterValue1), + AlgValue2 = <<"hmac">>, + AlgHash2 = base64:encode(crypto:hash(sha256, AlgValue2)), + write(StoreOpts, <<"data/", AlgHash2/binary>>, AlgValue2), + CommitterValue2 = <<"unique-id-2">>, + CommitterHash2 = base64:encode(crypto:hash(sha256, CommitterValue2)), + write(StoreOpts, <<"data/", CommitterHash2/binary>>, CommitterValue2), + OtherKeyValue = <<"other-key-value">>, + OtherKeyHash = base64:encode(crypto:hash(sha256, OtherKeyValue)), + write(StoreOpts, <<"data/", OtherKeyHash/binary>>, OtherKeyValue), + % Step 2: Create the nested structure with groups and links + % Create the root group + make_group(StoreOpts, <<"root">>), + % Create links for the root level keys + make_link(StoreOpts, <<"data/", TargetHash/binary>>, <<"root/target">>), + % Create the commitments subgroup + make_group(StoreOpts, <<"root/commitments">>), + % Create the key1 subgroup within commitments + make_group(StoreOpts, <<"root/commitments/key1">>), + make_link(StoreOpts, <<"data/", AlgHash1/binary>>, <<"root/commitments/key1/alg">>), + make_link(StoreOpts, <<"data/", CommitterHash1/binary>>, <<"root/commitments/key1/committer">>), + % Create the key2 subgroup within commitments + make_group(StoreOpts, <<"root/commitments/key2">>), + make_link(StoreOpts, <<"data/", AlgHash2/binary>>, <<"root/commitments/key2/alg">>), + make_link(StoreOpts, <<"data/", CommitterHash2/binary>>, <<"root/commitments/key2/commiter">>), + % Create the other-key subgroup + make_group(StoreOpts, <<"root/other-key">>), + make_link(StoreOpts, <<"data/", OtherKeyHash/binary>>, <<"root/other-key/other-key-key">>), + % Step 3: Test reading the structure back + % Verify the root is a composite + ?assertEqual(composite, type(StoreOpts, <<"root">>)), + % List the root contents + {ok, RootKeys} = list(StoreOpts, <<"root">>), + ?event({root_keys, RootKeys}), + ExpectedRootKeys = [<<"commitments">>, <<"other-key">>, <<"target">>], + ?assert(lists:all(fun(Key) -> lists:member(Key, ExpectedRootKeys) end, RootKeys)), + % Read the target directly + {ok, TargetValueRead} = read(StoreOpts, <<"root/target">>), + ?assertEqual(<<"Foo">>, TargetValueRead), + % Verify commitments is a composite + ?assertEqual(composite, type(StoreOpts, <<"root/commitments">>)), + % Verify other-key is a composite + ?assertEqual(composite, type(StoreOpts, <<"root/other-key">>)), + % Step 4: Test programmatic reconstruction of the nested map + ReconstructedMap = reconstruct_map(StoreOpts, <<"root">>), + ?event({reconstructed_map, ReconstructedMap}), + % Verify the reconstructed map matches the original structure + ?assert(hb_message:match(OriginalMap, ReconstructedMap)), + stop(StoreOpts). +``` + +### reconstruct_map + +```erlang +reconstruct_map(StoreOpts, Path) -> + case type(StoreOpts, Path) of + composite -> + % This is a group, reconstruct it as a map + {ok, ImmediateChildren} = list(StoreOpts, Path), + % The list function now correctly returns only immediate children + ?event({path, Path, immediate_children, ImmediateChildren}), + maps:from_list([ + {Key, reconstruct_map(StoreOpts, <>)} + || Key <- ImmediateChildren + ]); + simple -> + % This is a simple value, read it directly + {ok, Value} = read(StoreOpts, Path), + Value; + not_found -> + % Path doesn't exist + undefined + end. +``` + +### cache_debug_test + +Debug test to understand cache linking behavior + +```erlang +cache_debug_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/cache-debug">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + % Simulate what the cache does: + % 1. Create a group for message ID + MessageID = <<"test_message_123">>, + make_group(StoreOpts, MessageID), + % 2. Store a value at data/hash + Value = <<"test_value">>, + ValueHash = base64:encode(crypto:hash(sha256, Value)), + DataPath = <<"data/", ValueHash/binary>>, + write(StoreOpts, DataPath, Value), + % 3. Calculate a key hashpath (simplified version) + KeyHashPath = <>, + % 4. Create link from data path to key hash path + make_link(StoreOpts, DataPath, KeyHashPath), + % 5. Test what the cache would see: + ?event(debug_cache_test, {step, check_message_type}), + MsgType = type(StoreOpts, MessageID), + ?event(debug_cache_test, {message_type, MsgType}), + ?event(debug_cache_test, {step, list_message_contents}), + {ok, Subkeys} = list(StoreOpts, MessageID), + ?event(debug_cache_test, {message_subkeys, Subkeys}), + ?event(debug_cache_test, {step, read_key_hashpath}), + KeyHashResult = read(StoreOpts, KeyHashPath), + ?event(debug_cache_test, {key_hash_read_result, KeyHashResult}), + % 6. Test with path as list (what cache does): + ?event(debug_cache_test, {step, read_path_as_list}), + PathAsList = [MessageID, <<"key_hash_abc">>], + PathAsListResult = read(StoreOpts, PathAsList), + ?event(debug_cache_test, {path_as_list_result, PathAsListResult}), + stop(StoreOpts). +``` + +### isolated_type_debug_test + +Isolated test focusing on the exact cache issue + +```erlang +isolated_type_debug_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/isolated-debug">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + % Create the exact scenario from user's description: + % 1. A message ID with nested structure + MessageID = <<"message123">>, + make_group(StoreOpts, MessageID), + % 2. Create nested groups for "commitments" and "other-test-key" + CommitmentsPath = <>, + OtherKeyPath = <>, + ?event(isolated_debug, {creating_nested_groups, CommitmentsPath, OtherKeyPath}), + make_group(StoreOpts, CommitmentsPath), + make_group(StoreOpts, OtherKeyPath), + % 3. Add some actual data within those groups + write(StoreOpts, <>, <<"signature_data_1">>), + write(StoreOpts, <>, <<"nested_value">>), + % 4. Test type detection on the nested paths + ?event(isolated_debug, {testing_main_message_type}), + MainType = type(StoreOpts, MessageID), + ?event(isolated_debug, {main_message_type, MainType}), + ?event(isolated_debug, {testing_commitments_type}), + CommitmentsType = type(StoreOpts, CommitmentsPath), + ?event(isolated_debug, {commitments_type, CommitmentsType}), + ?event(isolated_debug, {testing_other_key_type}), + OtherKeyType = type(StoreOpts, OtherKeyPath), + ?event(isolated_debug, {other_key_type, OtherKeyType}), + % 5. Test what happens when reading these nested paths + ?event(isolated_debug, {reading_commitments_directly}), + CommitmentsResult = read(StoreOpts, CommitmentsPath), + ?event(isolated_debug, {commitments_read_result, CommitmentsResult}), + ?event(isolated_debug, {reading_other_key_directly}), + OtherKeyResult = read(StoreOpts, OtherKeyPath), + ?event(isolated_debug, {other_key_read_result, OtherKeyResult}), + stop(StoreOpts). +``` + +### list_with_link_test + +Test that list function resolves links correctly + +```erlang +list_with_link_test() -> + StoreOpts = #{ + <<"store-module">> => ?MODULE, + <<"name">> => <<"/tmp/store-list-link">>, + <<"capacity">> => ?DEFAULT_SIZE + }, + reset(StoreOpts), + % Create a group with some children + make_group(StoreOpts, <<"real-group">>), + write(StoreOpts, <<"real-group/child1">>, <<"value1">>), + write(StoreOpts, <<"real-group/child2">>, <<"value2">>), + write(StoreOpts, <<"real-group/child3">>, <<"value3">>), + % Create a link to the group + make_link(StoreOpts, <<"real-group">>, <<"link-to-group">>), + % List the real group to verify expected children + {ok, RealGroupChildren} = list(StoreOpts, <<"real-group">>), + ?event({real_group_children, RealGroupChildren}), + ExpectedChildren = [<<"child1">>, <<"child2">>, <<"child3">>], + ?assertEqual(ExpectedChildren, lists:sort(RealGroupChildren)), + % List via the link - should return the same children + {ok, LinkChildren} = list(StoreOpts, <<"link-to-group">>), + ?event({link_children, LinkChildren}), + ?assertEqual(ExpectedChildren, lists:sort(LinkChildren)), +``` + +--- + +*Generated from [hb_store_lmdb.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_lmdb.erl)* diff --git a/docs/book/src/hb_store_lru.erl.md b/docs/book/src/hb_store_lru.erl.md new file mode 100644 index 000000000..be7bb3405 --- /dev/null +++ b/docs/book/src/hb_store_lru.erl.md @@ -0,0 +1,1078 @@ +# hb_store_lru + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_lru.erl) + +An in-memory store implementation, following the `hb_store` behavior +and interface. This implementation uses a least-recently-used cache first, +and offloads evicted data to a specified non-volatile store over time. +This cache is registered under `{in_memory, HTTPServerID}`, in `hb_name` +so that all processes that are executing using the HTTP server’s Opts +can find it quickly. +The least-recently-used strategy (first is the most recent used, last is the +least recently used) is implemented by keeping track of the order and bytes + on ets tables: +- A cache table containing all the entries along with the value size and + key index. +- A cache indexing table containing all the index pointing to the keys. The + IDs are then sorted to ease the eviction policy. +- A cache statistics table containing all the information about the cache + size, capacity, and indexing. + +--- + +## Exported Functions + +- `list/2` +- `make_group/2` +- `make_link/3` +- `read/2` +- `reset/1` +- `resolve/2` +- `scope/1` +- `start/1` +- `stop/1` +- `type/2` +- `write/3` + +--- + +### start + +An in-memory store implementation, following the `hb_store` behavior +The default capacity is used when no capacity is provided in the store +Maximum number of retries when fetching cache entries that aren't +Start the LRU cache. + +```erlang +start(StoreOpts = #{ <<"name">> := Name }) -> + ?event(cache_lru, {starting_lru_server, Name}), + From = self(), + spawn( + fun() -> + State = init(From, StoreOpts), + server_loop(State, StoreOpts) + end + ), + receive + {ok, InstanceMessage} -> {ok, InstanceMessage} + end. +``` + +### init + +Create the `ets` tables for the LRU cache: + +```erlang +init(From, StoreOpts) -> + % Start the persistent store. +``` + +### stop + +Stop the LRU in memory by offloading the keys in the ETS tables + +```erlang +stop(Opts) -> + ?event(cache_lru, {stopping_lru_server, Opts}), + #{ <<"pid">> := CacheServer } = hb_store:find(Opts), + CacheServer ! {stop, self(), Ref = make_ref()}, + receive + {ok, Ref} -> ok + end. +``` + +### scope + +The LRU store is always local, for now. +Reset the store by completely cleaning the ETS tables and + +```erlang +scope(_) -> local. +``` + +### reset + +The LRU store is always local, for now. +Reset the store by completely cleaning the ETS tables and + +```erlang +reset(Opts) -> + #{ <<"pid">> := CacheServer } = hb_store:find(Opts), + CacheServer ! {reset, self(), Ref = make_ref()}, + receive + {ok, Ref} -> + ?event({reset_store, {in_memory, CacheServer}}), + case get_persistent_store(Opts) of + no_store -> + ok; + Store -> + hb_store:reset(Store) + end + end. +``` + +### server_loop + +```erlang +server_loop(State = + #{cache_table := CacheTable, + stats_table := StatsTable, + index_table := IndexTable}, + Opts) -> + receive + {sync, From} -> + From ! {ok, self()}, + server_loop(State, Opts); + {get_cache_table, From} -> + From ! CacheTable; + {put, Key, Value, From, Ref} -> + put_cache_entry(State, Key, Value, Opts), + ?event(debug_lru, {put, {key, Key}, {value, Value}}), + From ! {ok, Ref}; + {link, Existing, New, From, Ref} -> + link_cache_entry(State, Existing, New, Opts), + From ! {ok, Ref}; + {make_group, Key, From, Ref} -> + ?event(debug_lru, {make_group, Key}), + ensure_dir(State, Key), + From ! {ok, Ref}; + {update_recent, Key, Entry, From, Ref} -> + update_recently_used(State, Key, Entry), + From ! {ok, Ref}; + {reset, From, Ref} -> + ets:delete_all_objects(CacheTable), + ets:delete_all_objects(StatsTable), + ets:delete_all_objects(IndexTable), + From ! {ok, Ref}; + {stop, From, Ref} -> + evict_all_entries(State, Opts), + From ! {ok, Ref}, + exit(self(), ok) + end, + server_loop(State, Opts). +``` + +### sync + +Force the caller to wait until the server has fully processed all + +```erlang +sync(Server) -> + Server ! {sync, self()}, + receive + {ok, Server} -> ok + end. +``` + +### write + +Write an entry in the cache. + +```erlang +write(Opts, RawKey, Value) -> + Key = hb_store:join(RawKey), + #{ <<"pid">> := CacheServer } = hb_store:find(Opts), + CacheServer ! {put, Key, Value, self(), Ref = make_ref()}, + receive + {ok, Ref} -> ok + end. +``` + +### read + +Retrieve value in the cache from the given key. + +```erlang +read(Opts, RawKey) -> + #{ <<"pid">> := Server } = hb_store:find(Opts), + Key = resolve(Opts, RawKey), + case fetch_cache_with_retry(Opts, Key) of + nil -> + case get_persistent_store(Opts) of + no_store -> + not_found; + PersistentStore -> + % FIXME: It might happens some links can be in LRU while data on + % the permanent store and resolve doesn't produce the same key. +``` + +### resolve + +```erlang +resolve(Opts, Key) -> + Res = resolve(Opts, "", hb_path:term_to_path_parts(hb_store:join(Key), Opts)), + ?event({resolved, Key, Res}), + Res. +``` + +### resolve + +```erlang +resolve(_, CurrPath, []) -> + hb_store:join(CurrPath); +``` + +### resolve + +```erlang +resolve(Opts, CurrPath, [Next|Rest]) -> + PathPart = hb_store:join([CurrPath, Next]), + ?event( + {resolving, + {accumulated_path, CurrPath}, + {next_segment, Next}, + {generated_partial_path_to_test, PathPart} + } + ), + case fetch_cache_with_retry(Opts, PathPart) of + {link, Link} -> + resolve(Opts, Link, Rest); + _ -> + resolve(Opts, PathPart, Rest) + end. +``` + +### make_link + +Make a link from a key to another in the store. + +```erlang +make_link(_, Link, Link) -> + ok; +``` + +### make_link + +Make a link from a key to another in the store. + +```erlang +make_link(Opts, RawExisting, New) -> + #{ <<"pid">> := Server } = hb_store:find(Opts), + ExistingKeyBin = convert_if_list(RawExisting), + NewKeyBin = convert_if_list(New), + case fetch_cache_with_retry(Opts, ExistingKeyBin) of + nil -> + case get_persistent_store(Opts) of + no_store -> + not_found; + Store -> + hb_store:make_link(Store, ExistingKeyBin, NewKeyBin) + end; + _ -> + Server ! {link, ExistingKeyBin, NewKeyBin, self(), Ref = make_ref()}, + receive + {ok, Ref} -> + ok + end + end. +``` + +### list + +List all the keys registered. + +```erlang +list(Opts, Path) -> + PersistentKeys = + case get_persistent_store(Opts) of + no_store -> + not_found; + Store -> + ResolvedPath = hb_store:resolve(Store, Path), + case hb_store:list(Store, ResolvedPath) of + {ok, Keys} -> Keys; + not_found -> not_found + end + end, + case {ets_keys(Opts, Path), PersistentKeys} of + {not_found, not_found} -> + not_found; + {InMemoryKeys, not_found} -> + {ok, InMemoryKeys}; + {not_found, PersistentKeys} -> + {ok, PersistentKeys}; + {InMemoryKeys, PersistentKeys} -> + {ok, hb_util:unique(InMemoryKeys ++ PersistentKeys)} + end. +``` + +### ets_keys + +List all of the keys in the store for a given path, supporting a special + +```erlang +ets_keys(Opts, <<"">>) -> ets_keys(Opts, <<"/">>); +``` + +### ets_keys + +List all of the keys in the store for a given path, supporting a special + +```erlang +ets_keys(Opts, <<"/">>) -> + #{ <<"cache-table">> := Table } = hb_store:find(Opts), + table_keys(Table, undefined); +``` + +### ets_keys + +List all of the keys in the store for a given path, supporting a special + +```erlang +ets_keys(Opts, Path) -> + case fetch_cache_with_retry(Opts, Path) of + {group, Set} -> + sets:to_list(Set); + {link, Link} -> + list(Opts, Link); + {raw, #{value := Value}} when is_map(Value) -> + maps:keys(Value); + {raw, #{value := Value}} when is_list(Value) -> + Value; + nil -> + not_found + end. +``` + +### type + +Determine the type of a key in the store. + +```erlang +type(Opts, Key) -> + case fetch_cache_with_retry(Opts, Key) of + nil -> + case get_persistent_store(Opts) of + no_store -> + not_found; + Store -> + ResolvedKey = hb_store:resolve(Store, Key), + hb_store:type(Store, ResolvedKey) + end; + {raw, _} -> + simple; + {link, NewKey} -> + type(Opts, NewKey); + {group, _Item} -> + composite + end. +``` + +### make_group + +Create a directory inside the store. + +```erlang +make_group(Opts, Key) -> + #{ <<"pid">> := Server } = hb_store:find(Opts), + Server ! {make_group, Key, self(), Ref = make_ref()}, + receive + {ok, Ref} -> + ok + end. +``` + +### table_keys + +```erlang +table_keys(TableName) -> + table_keys(TableName, undefined). +``` + +### table_keys + +```erlang +table_keys(TableName, Prefix) -> + FirstKey = ets:first(TableName), + table_keys(TableName, FirstKey, Prefix, []). +``` + +### table_keys + +```erlang +table_keys(_TableName, '$end_of_table', _Prefix, Acc) -> + Acc; +``` + +### table_keys + +```erlang +table_keys(TableName, CurrentKey, Prefix, Acc) -> + NextKey = ets:next(TableName, CurrentKey), + case Prefix of + undefined -> + table_keys(TableName, NextKey, Prefix, [CurrentKey | Acc]); + _ -> + PrefixParts = hb_path:term_to_path_parts(Prefix), + Key = hb_path:term_to_path_parts(CurrentKey), + case lists:prefix(PrefixParts, Key) of + true -> + Extracted = lists:nthtail(length(PrefixParts), Key), + table_keys( + TableName, + NextKey, + Prefix, + [hb_path:to_binary(Extracted) | Acc] + ); + false -> + table_keys(TableName, NextKey, Prefix, Acc) + end + end. +``` + +### get_cache_entry + +```erlang +get_cache_entry(#{cache_table := Table}, Key) -> + get_cache_entry(Table, Key); +``` + +### get_cache_entry + +```erlang +get_cache_entry(Table, Key) -> + case ets:lookup(Table, Key) of + [] -> + nil; + [{_, Entry}] -> + Entry + end. +``` + +### fetch_cache_with_retry + +```erlang +fetch_cache_with_retry(Opts, Key) -> + fetch_cache_with_retry(Opts, Key, 1). +``` + +### fetch_cache_with_retry + +```erlang +fetch_cache_with_retry(Opts, Key, Retries) -> + #{<<"cache-table">> := Table, <<"pid">> := Server} = hb_store:find(Opts), + case get_cache_entry(Table, Key) of + nil -> + case Retries < ?RETRY_THRESHOLD of + true -> + sync(Server), + fetch_cache_with_retry(Opts, Key, Retries + 1); + false -> + nil + end; + Entry -> + Entry + end. +``` + +### put_cache_entry + +```erlang +put_cache_entry(State, Key, Value, Opts) -> + ValueSize = erlang:external_size(Value), + CacheSize = cache_size(State), + ?event(cache_lru, {putting_entry, {size, ValueSize}, {opts, Opts}, {cache_size, CacheSize}}), + Capacity = hb_maps:get(<<"capacity">>, Opts, ?DEFAULT_LRU_CAPACITY), + case get_cache_entry(State, Key) of + nil -> + % For new entries, we check if the size will the fit the full + % capacity (even by evicting keys). +``` + +### handle_group + +```erlang +handle_group(State, Key, Opts) -> + case filename:dirname(hb_store:join(Key)) of + <<".">> -> undefined ; + BaseDir -> + case maps:get(mode, Opts, undefined) of + offload -> + Store = get_persistent_store(Opts), + ?event(cache_lru, {create_group, BaseDir}), + hb_store:make_group(Store, BaseDir), + BaseDir; + undefined -> + ensure_dir(State, BaseDir), + {group, Entry} = get_cache_entry(State, BaseDir), + BaseName = filename:basename(Key), + NewGroup = append_key_to_group(BaseName, Entry), + add_cache_entry(State, BaseDir, {group, NewGroup}), + BaseDir + end + end. +``` + +### ensure_dir + +```erlang +ensure_dir(State, Path) -> + PathParts = hb_path:term_to_path_parts(Path), + [First | Rest] = PathParts, + Result = ensure_dir(State, First, Rest), + Result. +``` + +### ensure_dir + +```erlang +ensure_dir(State, CurrentPath, []) -> + maybe_create_dir(State, CurrentPath, nil); +``` + +### ensure_dir + +```erlang +ensure_dir(State, CurrentPath, [Next]) -> + maybe_create_dir(State, CurrentPath, Next), + ensure_dir(State, hb_store:join([CurrentPath, Next]), []); +``` + +### ensure_dir + +```erlang +ensure_dir(State, CurrentPath, [Next | Rest]) -> + maybe_create_dir(State, CurrentPath, Next), + ensure_dir(State, hb_store:join([CurrentPath, Next]), Rest). +``` + +### maybe_create_dir + +```erlang +maybe_create_dir(State, DirPath, Value) -> + CurrentValueSet = + case get_cache_entry(State, DirPath) of + nil -> + sets:new(); + {group, CurrentValue} -> + CurrentValue + end, + NewValueSet = + case Value of + nil -> + CurrentValueSet; + _ -> + sets:add_element(Value, CurrentValueSet) + end, + ?event(cache_lru, {create_group, DirPath, sets:to_list(NewValueSet)}), + add_cache_entry(State, DirPath, {group, NewValueSet}). +``` + +### append_key_to_group + +```erlang +append_key_to_group(Key, Group) -> + BaseName = filename:basename(Key), + sets:add_element(BaseName, Group). +``` + +### assign_new_entry + +```erlang +assign_new_entry(State, Key, Value, ValueSize, Capacity, Group, Opts) -> + case cache_size(State) + ValueSize >= Capacity of + true -> + ?event(cache_lru, eviction_required), + evict_oldest_entry(State, ValueSize, Opts); + false -> + ok + end, + ID = get_index_id(State), + add_cache_index(State, ID, Key), + add_cache_entry( + State, + Key, + {raw, + #{ + value => Value, + id => ID, + size => ValueSize, + group => Group + } + } + ), + increase_cache_size(State, ValueSize). +``` + +### cache_size + +```erlang +cache_size(#{stats_table := Table}) -> + case ets:lookup(Table, size) of + [{_, Size}] -> + Size; + _ -> + 0 + end. +``` + +### get_index_id + +```erlang +get_index_id(#{stats_table := StatsTable}) -> + ets:update_counter(StatsTable, id, {2, 1}, {0, 0}). +``` + +### add_cache_entry + +```erlang +add_cache_entry(#{cache_table := Table}, Key, Value) -> + ets:insert(Table, {Key, Value}). +``` + +### add_cache_index + +```erlang +add_cache_index(#{index_table := Table}, ID, Key) -> + ets:insert(Table, {ID, Key}). +``` + +### link_cache_entry + +```erlang +link_cache_entry(State = #{cache_table := Table}, Existing, New, Opts) -> + ?event(cache_lru, {link, Existing, New}), + % Remove the link from the previous linked entry + clean_old_link(Table, New), + _ = handle_group(State, New, Opts), + ets:insert(Table, {New, {link, Existing}}), + % Add links to the linked entry + case ets:lookup(Table, Existing) of + [{_, {raw, Entry}}] -> + NewLinks = + case Entry of + #{links := ExistingLinks} -> + [New | ExistingLinks]; + _ -> + [New] + end, + ets:insert(Table, {Existing, {raw, Entry#{links => NewLinks}}}); + _ -> + ignore + end. +``` + +### clean_old_link + +Remove the link association for the the old linked data to the given key + +```erlang +clean_old_link(Table, Link) -> + case ets:lookup(Table, Link) of + [{_, {link, PreviousEntry}}] -> + ?event(cache_lru, {removing_previous_link, + {link, Link}, + {previous_entry, PreviousEntry} + }), + case ets:lookup(Table, PreviousEntry) of + [{_, {raw, OldEntry}}] -> + Links = sets:from_list(maps:get(links, OldEntry, [])), + UpdatedLinks = sets:del_element(Link, Links), + UpdatedEntry = maps:put( + links, + sets:to_list(UpdatedLinks), + OldEntry + ), + ets:insert(Table, {PreviousEntry, {raw, UpdatedEntry}}); + _ -> + skip + end; + _ -> skip + end. +``` + +### increase_cache_size + +```erlang +increase_cache_size(#{stats_table := StatsTable}, ValueSize) -> + ets:update_counter(StatsTable, size, {2, ValueSize}, {0, 0}). +``` + +### evict_oldest_entry + +```erlang +evict_oldest_entry(State, ValueSize, Opts) -> + evict_oldest_entry(State, ValueSize, 0, Opts). +``` + +### evict_oldest_entry + +```erlang +evict_oldest_entry(_State, ValueSize, FreeSize, _Opts) when FreeSize >= ValueSize -> + ok; +``` + +### evict_oldest_entry + +```erlang +evict_oldest_entry(State, ValueSize, FreeSize, Opts) -> + case cache_tail_key(State) of + nil -> + ok; + TailKey -> + Entry = #{ + size := ReclaimedSize, + id := ID, + value := TailValue, + group := Group + } = case get_cache_entry(State, TailKey) of + nil -> + % Raises a runtime error as this represents + % a non-recoverable error. This would signifies a + % inconsistency between the index and the cache table. +``` + +### evict_all_entries + +```erlang +evict_all_entries(#{cache_table := Table}, Opts) -> + lists:foreach( + fun(Key) -> + [{_, {raw, Entry}}] = ets:lookup(Table, Key), + #{ value := Value, group := Group } = Entry, + Links = maps:get(links, Entry, []), + offload_to_store(Key, Value, Links, Group, Opts) + end, + table_keys(Table) + ). +``` + +### offload_to_store + +```erlang +offload_to_store(TailKey, TailValue, Links, Group, Opts) -> + ?event(lru_offload, {offloading_to_store, Opts}), + FoundStore = get_persistent_store(Opts), + ?event(lru_offload, {found_store, FoundStore}), + case FoundStore of + no_store -> + ok; + Store -> + case Group of + undefined -> + ignore; + _ -> + hb_store:make_group(Store, Group) + end, + case hb_store:write(Store, TailKey, TailValue) of + ok -> + lists:foreach( + fun(Link) -> + ResolvedPath = resolve(Opts, Link), + hb_store:make_link(Store, ResolvedPath, Link) + end, + Links + ), + ?event(cache_lru, {offloaded_key, TailKey}), + ok; + Err -> + ?event(warning, {error_offloading_to_local_cache, Err}), + {error, Err} + end + end. +``` + +### cache_tail_key + +```erlang +cache_tail_key(#{index_table := Table}) -> + case ets:first(Table) of + '$end_of_table' -> + nil; + FirstID -> + [{_, Key}] = ets:lookup(Table, FirstID), + Key + end. +``` + +### delete_cache_index + +```erlang +delete_cache_index(#{index_table := IndexTable}, ID) -> + ets:delete(IndexTable, ID). +``` + +### delete_cache_entry + +```erlang +delete_cache_entry(#{cache_table := Table}, Key) -> + ets:delete(Table, Key), + ?event(cache_lru, {deleted, Key}). +``` + +### decrease_cache_size + +```erlang +decrease_cache_size(#{stats_table := Table}, Size) -> + ets:update_counter(Table, size, {2, -Size, 0, 0}). +``` + +### replace_entry + +```erlang +replace_entry(State, Key, Value, ValueSize, {raw, OldEntry = #{ value := OldValue}}) when Value =/= OldValue -> + % Update entry and move the keys in the front of the cache + % as the most used Key + ?event(debug_lru, {replace_entry, + {key, Key}, + {value, Value}, + {explicit, OldEntry} + }), + #{size := PreviousSize} = OldEntry, + NewEntry = OldEntry#{value := Value, size := ValueSize}, + add_cache_entry(State, Key, {raw, NewEntry}), + update_recently_used(State, Key, NewEntry), + update_cache_size(State, PreviousSize, ValueSize); +``` + +### replace_entry + +```erlang +replace_entry(_State, _Key, _Value, _ValueSize, {raw, _}) -> ok; +``` + +### replace_entry + +```erlang +replace_entry(_State, _Key, _Value, _ValueSize, {Type, _}) -> + % Link or group should be handle directly with `make_link` or `make_group` + % This aim of this function is to be used along with direct data insertion. +``` + +### update_recently_used + +```erlang +update_recently_used(State, Key, Entry) -> + % Acquire a new ID + NewID = get_index_id(State), + % Update the entry's ID + add_cache_entry(State, Key, {raw, Entry#{id := NewID}}), + #{id := PreviousID} = Entry, + % Delete previous ID to priorize the new NewID + delete_cache_index(State, PreviousID), + add_cache_index(State, NewID, Key). +``` + +### update_cache_size + +```erlang +update_cache_size(#{stats_table := Table}, PreviousSize, NewSize) -> + ets:update_counter(Table, size, [{2, -PreviousSize}, {2, NewSize}]). +``` + +### get_persistent_store + +```erlang +get_persistent_store(Opts) -> + hb_maps:get( + <<"persistent-store">>, + Opts, + no_store + ). +``` + +### convert_if_list + +```erlang +convert_if_list(Value) when is_list(Value) -> + join(Value); % Perform the conversion if it's a list +``` + +### convert_if_list + +```erlang +convert_if_list(Value) -> + Value. +``` + +### join + +```erlang +join(Key) when is_list(Key) -> + KeyList = hb_store:join(Key), + maybe_convert_to_binary(KeyList); +``` + +### join + +```erlang +join(Key) when is_binary(Key) -> Key. +``` + +### maybe_convert_to_binary + +```erlang +maybe_convert_to_binary(Value) when is_list(Value) -> + list_to_binary(Value); +``` + +### maybe_convert_to_binary + +```erlang +maybe_convert_to_binary(Value) when is_binary(Value) -> + Value. +``` + +### test_opts + +Generate a set of options for testing. The default is to use an `fs` + +```erlang +test_opts(PersistentStore) -> + test_opts(PersistentStore, 1000000). +``` + +### test_opts + +```erlang +test_opts(PersistentStore, Capacity) -> + % Set the server ID to a random address. +``` + +### unknown_value_test + +```erlang +unknown_value_test() -> + ?assertEqual(not_found, read(test_opts(default), <<"key1">>)). +``` + +### cache_term_test + +```erlang +cache_term_test() -> + StoreOpts = test_opts(default), + write(StoreOpts, <<"key1">>, <<"Hello">>), + ?assertEqual({ok, <<"Hello">>}, read(StoreOpts, <<"key1">>)). +``` + +### evict_oldest_items_test + +```erlang +evict_oldest_items_test() -> + StoreOpts = test_opts(no_store, 500), + Binary = crypto:strong_rand_bytes(200), + write(StoreOpts, <<"key1">>, Binary), + write(StoreOpts, <<"key2">>, Binary), + read(StoreOpts, <<"key1">>), + write(StoreOpts, <<"key3">>, Binary), + ?assertEqual({ok, Binary}, read(StoreOpts, <<"key1">>)), + ?assertEqual(not_found, read(StoreOpts, <<"key2">>)). +``` + +### evict_items_with_insufficient_space_test + +```erlang +evict_items_with_insufficient_space_test() -> + StoreOpts = test_opts(no_store, 500), + Binary = crypto:strong_rand_bytes(200), + write(StoreOpts, <<"key1">>, Binary), + write(StoreOpts, <<"key2">>, Binary), + write(StoreOpts, <<"key3">>, crypto:strong_rand_bytes(400)), + ?assertEqual(not_found, read(StoreOpts, <<"key1">>)), + ?assertEqual(not_found, read(StoreOpts, <<"key2">>)). +``` + +### evict_but_able_to_read_from_fs_store_test + +```erlang +evict_but_able_to_read_from_fs_store_test() -> + StoreOpts = test_opts(default, 500), + Binary = crypto:strong_rand_bytes(200), + write(StoreOpts, <<"key1">>, Binary), + write(StoreOpts, <<"key2">>, Binary), + read(StoreOpts, <<"key1">>), + write(StoreOpts, <<"key3">>, Binary), + ?assertEqual({ok, Binary}, read(StoreOpts, <<"key1">>)), + ?assertEqual({ok, Binary}, read(StoreOpts, <<"key2">>)), + % Directly offloads if the data is more than the LRU capacity + write(StoreOpts, <<"sub/key">>, crypto:strong_rand_bytes(600)), + ?assertMatch({ok, _}, read(StoreOpts, <<"sub/key">>)). +``` + +### stop_test + +```erlang +stop_test() -> + StoreOpts = test_opts(default, 500), + Binary = crypto:strong_rand_bytes(200), + write(StoreOpts, <<"key1">>, Binary), + write(StoreOpts, <<"key2">>, Binary), + #{ <<"pid">> := ServerPID } = hb_store:find(StoreOpts), + ok = stop(StoreOpts), + ?assertEqual(false, is_process_alive(ServerPID)), + PersistentStore = hb_maps:get(<<"persistent-store">>, StoreOpts), + ?assertEqual({ok, Binary}, hb_store:read(PersistentStore, <<"key1">>)), + ?assertEqual({ok, Binary}, hb_store:read(PersistentStore, <<"key2">>)). +``` + +### reset_test + +```erlang +reset_test() -> + StoreOpts = test_opts(default), + write(StoreOpts, <<"key1">>, <<"Hello">>), + write(StoreOpts, <<"key2">>, <<"Hi">>), + reset(StoreOpts), + ?assertEqual(not_found, read(StoreOpts, <<"key1">>)), + #{ <<"cache-table">> := Table } = hb_store:find(StoreOpts), + ?assertEqual([], ets:tab2list(Table)). +``` + +### list_test + +```erlang +list_test() -> + StoreOpts = test_opts(default, 500), + Binary = crypto:strong_rand_bytes(200), + make_group(StoreOpts, <<"sub">>), + write(StoreOpts, <<"hello">>, <<"world">>), + write(StoreOpts, <<"sub/key1">>, Binary), + write(StoreOpts, <<"sub/key2">>, Binary), + {ok, Keys1} = list(StoreOpts, <<"sub">>), + ?assertEqual([<<"key1">>, <<"key2">>], lists:sort(Keys1)), + write(StoreOpts, <<"sub/key3">>, Binary), + {ok, Keys2} = list(StoreOpts, <<"sub">>), + ?assertEqual( + [<<"key1">>, <<"key2">>, <<"key3">>], + lists:sort(Keys2) + ), + write(StoreOpts, <<"sub/inner/key1">>, Binary), + {ok, Keys3} = list(StoreOpts, <<"sub">>), + ?assertEqual([<<"inner">>, <<"key1">>, <<"key2">>, <<"key3">>], + lists:sort(Keys3)), + write(StoreOpts, <<"complex">>, #{<<"a">> => 10, <<"b">> => Binary}), + ?assertEqual({ok, [<<"a">>, <<"b">>]}, list(StoreOpts, <<"complex">>)). +``` + +### type_test + +```erlang +type_test() -> + StoreOpts = test_opts(default, 500), + Binary = crypto:strong_rand_bytes(200), + write(StoreOpts, <<"key1">>, Binary), + ?assertEqual(simple, type(StoreOpts, <<"key1">>)), + write(StoreOpts, <<"sub/key1">>, Binary), + ?assertEqual(composite, type(StoreOpts, <<"sub">>)), + make_link(StoreOpts, <<"key1">>, <<"keylink">>), + ?assertEqual(simple, type(StoreOpts, <<"keylink">>)). +``` + +### replace_link_test + +```erlang +replace_link_test() -> + StoreOpts = test_opts(default), + write(StoreOpts, <<"key1">>, <<"Hello">>), + make_link(StoreOpts, <<"key1">>, <<"keylink">>), + ?assertEqual({ok, <<"Hello">>}, read(StoreOpts, <<"keylink">>)), + write(StoreOpts, <<"key2">>, <<"Hello2">>), + make_link(StoreOpts, <<"key2">>, <<"keylink">>), + ?assertEqual({ok, <<"Hello2">>}, read(StoreOpts, <<"keylink">>)), + #{ <<"cache-table">> := Table } = hb_store:find(StoreOpts), + {raw, #{links := Links }}= get_cache_entry(Table, <<"key1">>), +``` + +--- + +*Generated from [hb_store_lru.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_lru.erl)* diff --git a/docs/book/src/hb_store_opts.erl.md b/docs/book/src/hb_store_opts.erl.md new file mode 100644 index 000000000..4a13ee2aa --- /dev/null +++ b/docs/book/src/hb_store_opts.erl.md @@ -0,0 +1,316 @@ +# hb_store_opts + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_opts.erl) + +A module responsible for applying default configuration to store options. +This module takes store options and store defaults and returns a new list +of stores with default properties applied based on the store-module type. +Supports recursive application to nested store configurations. + +--- + +## Exported Functions + +- `apply/2` + +--- + +### apply + +A module responsible for applying default configuration to store options. +Apply store defaults to store options. + +```erlang +apply(StoreOpts, Defaults) when is_list(StoreOpts), is_map(Defaults) -> + lists:map( + fun(StoreOpt) -> + apply_defaults_to_store(StoreOpt, Defaults) + end, + StoreOpts + ). +``` + +### apply_defaults_to_store + +Apply defaults to a single store configuration. + +```erlang +apply_defaults_to_store(StoreOpt, Defaults) when is_map(StoreOpt), is_map(Defaults) -> + UpdatedStore = apply_defaults_by_module_type(StoreOpt, Defaults), + apply_defaults_to_substores(UpdatedStore, Defaults). +``` + +### apply_defaults_by_module_type + +Apply defaults based on store-module. + +```erlang +apply_defaults_by_module_type(StoreOpt, Defaults) -> + case maps:get(<<"store-module">>, StoreOpt, undefined) of + hb_store_lmdb -> + apply_type_defaults(StoreOpt, <<"lmdb">>, Defaults); + hb_store_fs -> + apply_type_defaults(StoreOpt, <<"fs">>, Defaults); + hb_store_rocksdb -> + apply_type_defaults(StoreOpt, <<"rocksdb">>, Defaults); + hb_store_gateway -> + apply_type_defaults(StoreOpt, <<"gateway">>, Defaults); + _ -> + StoreOpt + end. +``` + +### apply_type_defaults + +Apply type-specific defaults to a store. + +```erlang +apply_type_defaults(StoreOpt, TypeKey, Defaults) -> + case maps:get(TypeKey, Defaults, #{}) of + TypeDefaults when is_map(TypeDefaults) -> + maps:merge(TypeDefaults, StoreOpt); + _ -> + StoreOpt + end. +``` + +### apply_defaults_to_substores + +Apply defaults to sub-stores recursively. + +```erlang +apply_defaults_to_substores(StoreOpt, Defaults) -> + case maps:get(<<"store">>, StoreOpt, undefined) of + SubStores when is_list(SubStores) -> + UpdatedSubStores = + lists:map( + fun(SubStore) -> + apply_defaults_to_store(SubStore, Defaults) + end, + SubStores + ), + maps:put(<<"store">>, UpdatedSubStores, StoreOpt); + _ -> + StoreOpt + end. +``` + +### basic_apply_test + +```erlang +basic_apply_test() -> + StoreOpts = + [ + #{ + <<"name">> => <<"cache-mainnet/lmdb">>, + <<"store-module">> => hb_store_lmdb + } + ], + Defaults = + #{ + <<"lmdb">> => #{ + <<"capacity">> => 1073741824 + } + }, + Expected = + [ + #{ + <<"name">> => <<"cache-mainnet/lmdb">>, + <<"store-module">> => hb_store_lmdb, + <<"capacity">> => 1073741824 + } + ], + Result = apply(StoreOpts, Defaults), + ?assertEqual(Expected, Result). +``` + +### empty_defaults_test + +```erlang +empty_defaults_test() -> + StoreOpts = + [ + #{ + <<"name">> => <<"cache-mainnet/lmdb">>, + <<"store-module">> => hb_store_lmdb + } + ], + Defaults = #{}, + Expected = + [ + #{ + <<"name">> => <<"cache-mainnet/lmdb">>, + <<"store-module">> => hb_store_lmdb + } + ], + Result = apply(StoreOpts, Defaults), + ?assertEqual(Expected, Result). +``` + +### empty_store_opts_test + +```erlang +empty_store_opts_test() -> + StoreOpts = [], + Defaults = + #{ + <<"lmdb">> => #{ + <<"capacity">> => 1073741824 + } + }, + Expected = [], + Result = apply(StoreOpts, Defaults), + ?assertEqual(Expected, Result). +``` + +### nested_stores_test + +```erlang +nested_stores_test() -> + StoreOpts = + [ + #{ + <<"store-module">> => hb_store_gateway, + <<"store">> => [ + #{ + <<"name">> => <<"cache-mainnet/lmdb">>, + <<"store-module">> => hb_store_lmdb + } + ] + } + ], + Defaults = + #{ + <<"lmdb">> => #{ + <<"capacity">> => 1073741824 + } + }, + Expected = + [ + #{ + <<"store-module">> => hb_store_gateway, + <<"store">> => [ + #{ + <<"name">> => <<"cache-mainnet/lmdb">>, + <<"store-module">> => hb_store_lmdb, + <<"capacity">> => 1073741824 + } + ] + } + ], + Result = apply(StoreOpts, Defaults), + ?assertEqual(Expected, Result). +``` + +### lmdb_capacity_integration_test + +Integration test to verify that capacity is properly set for hb_store_lmdb +Full integration test simulating the hb_http_server flow + +```erlang +lmdb_capacity_integration_test() -> + CustomCapacity = 5000, + StoreOpts = + [ + #{ + <<"name">> => <<"test-lmdb">>, + <<"store-module">> => hb_store_lmdb + } + ], + Defaults = + #{ + <<"lmdb">> => #{ + <<"capacity">> => CustomCapacity + } + }, + [UpdatedStoreOpt] = apply(StoreOpts, Defaults), + ?assertEqual(CustomCapacity, maps:get(<<"capacity">>, UpdatedStoreOpt)), + ?assertEqual(<<"test-lmdb">>, maps:get(<<"name">>, UpdatedStoreOpt)), + ?assertEqual(hb_store_lmdb, maps:get(<<"store-module">>, UpdatedStoreOpt)), + ?assertNotEqual(16 * 1024 * 1024 * 1024, maps:get(<<"capacity">>, UpdatedStoreOpt)), + MultipleStoreOpts = + [ + #{ + <<"name">> => <<"test-lmdb-1">>, + <<"store-module">> => hb_store_lmdb + }, + #{ + <<"name">> => <<"test-lmdb-2">>, + <<"store-module">> => hb_store_lmdb + }, + #{ + <<"name">> => <<"test-fs">>, + <<"store-module">> => hb_store_fs + } + ], + UpdatedMultipleStoreOpts = apply(MultipleStoreOpts, Defaults), + [LmdbStore1, LmdbStore2, FsStore] = UpdatedMultipleStoreOpts, + ?assertEqual(CustomCapacity, maps:get(<<"capacity">>, LmdbStore1)), + ?assertEqual(CustomCapacity, maps:get(<<"capacity">>, LmdbStore2)), + ?assertEqual(false, maps:is_key(<<"capacity">>, FsStore)), + ?event({integration_test_passed, {lmdb_capacity, CustomCapacity}, {note, "correctly applied to store options"}}). +``` + +### full_integration_flow_test + +Integration test to verify that capacity is properly set for hb_store_lmdb +Full integration test simulating the hb_http_server flow + +```erlang +full_integration_flow_test() -> + LoadedConfig = #{ + <<"store_defaults">> => #{ + <<"lmdb">> => #{ + <<"capacity">> => 5000 + } + } + }, + DefaultStoreOpts = [ + #{ + <<"name">> => <<"cache-mainnet/lmdb">>, + <<"store-module">> => hb_store_lmdb + }, + #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-mainnet">> + }, + #{ + <<"store-module">> => hb_store_gateway, + <<"subindex">> => [ + #{ + <<"name">> => <<"Data-Protocol">>, + <<"value">> => <<"ao">> + } + ], + <<"store">> => [ + #{ + <<"store-module">> => hb_store_lmdb, + <<"name">> => <<"cache-mainnet/lmdb">> + } + ] + } + ], + MergedConfig = maps:merge( + #{<<"store">> => DefaultStoreOpts}, + LoadedConfig + ), + StoreOpts = maps:get(<<"store">>, MergedConfig), + StoreDefaults = maps:get(<<"store_defaults">>, MergedConfig, #{}), + UpdatedStoreOpts = apply(StoreOpts, StoreDefaults), + [LmdbStore, FsStore, GatewayStore] = UpdatedStoreOpts, + ?assertEqual(5000, maps:get(<<"capacity">>, LmdbStore)), + ?assertEqual(<<"cache-mainnet/lmdb">>, maps:get(<<"name">>, LmdbStore)), + ?assertEqual(hb_store_lmdb, maps:get(<<"store-module">>, LmdbStore)), + ?assertEqual(false, maps:is_key(<<"capacity">>, FsStore)), + ?assertEqual(hb_store_fs, maps:get(<<"store-module">>, FsStore)), + ?assertEqual(hb_store_gateway, maps:get(<<"store-module">>, GatewayStore)), + NestedStores = maps:get(<<"store">>, GatewayStore), + [NestedLmdbStore] = NestedStores, + ?assertEqual(5000, maps:get(<<"capacity">>, NestedLmdbStore)), + ?assertEqual(hb_store_lmdb, maps:get(<<"store-module">>, NestedLmdbStore)), + ?assertEqual(3, length(UpdatedStoreOpts)), +``` + +--- + +*Generated from [hb_store_opts.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_opts.erl)* diff --git a/docs/book/src/hb_store_remote_node.erl.md b/docs/book/src/hb_store_remote_node.erl.md new file mode 100644 index 000000000..3371c8345 --- /dev/null +++ b/docs/book/src/hb_store_remote_node.erl.md @@ -0,0 +1,194 @@ +# hb_store_remote_node + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_remote_node.erl) + +A store module that reads data from another AO node. +Notably, this store only provides the _read_ side of the store interface. +The write side could be added, returning an commitment that the data has +been written to the remote node. In that case, the node would probably want +to upload it to an Arweave bundler to ensure persistence, too. + +--- + +## Exported Functions + +- `make_link/3` +- `maybe_cache/2` +- `maybe_cache/3` +- `read/2` +- `resolve/2` +- `scope/1` +- `type/2` +- `write/3` + +--- + +### scope + +A store module that reads data from another AO node. +Return the scope of this store. + +```erlang +scope(_StoreOpts) -> + remote. +``` + +### resolve + +Resolve a key path in the remote store. + +```erlang +resolve(#{ <<"node">> := Node }, Key) -> + ?event({remote_resolve, {node, Node}, {key, Key}}), + Key. +``` + +### type + +Determine the type of value at a given key. + +```erlang +type(Opts = #{ <<"node">> := Node }, Key) -> + ?event({remote_type, {node, Node}, {key, Key}}), + case read(Opts, Key) of + not_found -> not_found; + _ -> simple + end. +``` + +### read + +Read a key from the remote node. + +```erlang +read(Opts = #{ <<"node">> := Node }, Key) -> + ?event(store_remote_node, {executing_read, {node, Node}, {key, Key}}), + HTTPRes = + hb_http:get( + Node, + #{ <<"path">> => <<"/~cache@1.0/read">>, <<"target">> => Key }, + Opts + ), + case HTTPRes of + {ok, Res} -> + % returning the whole response to get the test-key + {ok, Msg} = hb_message:with_only_committed(Res, Opts), + ?event(store_remote_node, {read_found, {result, Msg, response, Res}}), + maybe_cache(Opts, Msg, [Key]), + {ok, Msg}; + {error, _Err} -> + ?event(store_remote_node, {read_not_found, {key, Key}}), + not_found + end. +``` + +### maybe_cache + +Cache the data if the cache is enabled. The `local-store` option may + +```erlang +maybe_cache(StoreOpts, Data) -> + maybe_cache(StoreOpts, Data, []). +``` + +### maybe_cache + +```erlang +maybe_cache(StoreOpts, Data, Links) -> + ?event({maybe_cache, StoreOpts, Data}), + % Check if the local store is in our store options. +``` + +### write + +Write a key to the remote node. + +```erlang +write(Opts = #{ <<"node">> := Node }, Key, Value) -> + ?event({write, {node, Node}, {key, Key}, {value, Value}}), + WriteMsg = #{ + <<"path">> => <<"/~cache@1.0/write">>, + <<"method">> => <<"POST">>, + <<"body">> => Value + }, + SignedMsg = hb_message:commit(WriteMsg, Opts), + ?event({write, {signed, SignedMsg}}), + case hb_http:post(Node, SignedMsg, Opts) of + {ok, Response} -> + Status = hb_ao:get(<<"status">>, Response, 0, #{}), + ?event(store_remote_node, {write_completed, {response, Response}}), + case Status of + 200 -> ok; + _ -> {error, {unexpected_status, Status}} + end; + {error, Err} -> + ?event({write, {error, Err}}), + {error, Err} + end. +``` + +### make_link + +Link a source to a destination in the remote node. + +```erlang +make_link(Opts = #{ <<"node">> := Node }, Source, Destination) -> + ?event({make_remote_link, {node, Node}, {source, Source}, + {destination, Destination}}), + LinkMsg = #{ + <<"path">> => <<"/~cache@1.0/link">>, + <<"method">> => <<"POST">>, + <<"source">> => Source, + <<"destination">> => Destination + }, + SignedMsg = hb_message:commit(LinkMsg, Opts), + ?event({make_remote_link, {signed, SignedMsg}}), + case hb_http:post(Node, SignedMsg, Opts) of + {ok, Response} -> + Status = hb_ao:get(<<"status">>, Response, 0, #{}), + ?event(store_remote_node, {make_link_completed, {response, Response}}), + case Status of + 200 -> ok; + _ -> {error, {unexpected_status, Status}} + end; + {error, Err} -> + ?event(store_remote_node, {make_link_error, {error, Err}}), + {error, Err} + end. +``` + +### read_test + +Test that we can create a store, write a random message to it, then + +```erlang +read_test() -> + rand:seed(default), + LocalStore = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache-mainnet">> + }, + hb_store:reset(LocalStore), + M = #{ <<"test-key">> => Rand = rand:uniform(1337) }, + ID = hb_message:id(M), + {ok, ID} = + hb_cache:write( + M, + #{ store => LocalStore } + ), + ?event({wrote, ID}), + Node = + hb_http_server:start_node( + #{ + store => LocalStore + } + ), + RemoteStore = [ + #{ <<"store-module">> => hb_store_remote_node, <<"node">> => Node } + ], + {ok, RetrievedMsg} = hb_cache:read(ID, #{ store => RemoteStore }), +``` + +--- + +*Generated from [hb_store_remote_node.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_remote_node.erl)* diff --git a/docs/book/src/hb_store_rocksdb.erl.md b/docs/book/src/hb_store_rocksdb.erl.md new file mode 100644 index 000000000..ce581e1d0 --- /dev/null +++ b/docs/book/src/hb_store_rocksdb.erl.md @@ -0,0 +1,884 @@ +# hb_store_rocksdb + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_rocksdb.erl) + +A process wrapper over rocksdb storage. Replicates functionality of the + hb_fs_store module. + Encodes the item types with the help of prefixes, see `encode_value/2` + and `decode_value/1` + +--- + +## Exported Functions + +- `add_path/3` +- `code_change/3` +- `enabled/0` +- `handle_call/3` +- `handle_cast/2` +- `handle_info/2` +- `init/1` +- `list/0` +- `list/2` +- `make_group/2` +- `make_link/3` +- `path/2` +- `read/2` +- `reset/1` +- `resolve/2` +- `scope/1` +- `start_link/1` +- `start/1` +- `stop/1` +- `terminate/2` +- `type/2` +- `write/3` + +--- + +### enabled + +A process wrapper over rocksdb storage. Replicates functionality of the +Returns whether the RocksDB store is enabled. + +```erlang +enabled() -> true. +-else. +``` + +### enabled + +```erlang +enabled() -> false. +-endif. +``` + +### start_link + +Start the RocksDB store. + +```erlang +start_link(#{ <<"store-module">> := hb_store_rocksdb, <<"name">> := Dir}) -> + ?event(rocksdb, {starting, Dir}), + application:ensure_all_started(rocksdb), + gen_server:start_link({local, ?MODULE}, ?MODULE, Dir, []); +``` + +### start_link + +Start the RocksDB store. + +```erlang +start_link(Stores) when is_list(Stores) -> + RocksStores = + [ + Store + || + Store = #{ <<"store-module">> := Module } <- Stores, + Module =:= hb_store_rocksdb + ], + case RocksStores of + [Store] -> start_link(Store); + _ -> ignore + end; +``` + +### start_link + +Start the RocksDB store. + +```erlang +start_link(Store) -> + ?event(rocksdb, {invalid_store_config, Store}), + ignore. +``` + +### start_link + +```erlang +start_link(_Opts) -> ignore. +-endif. +``` + +### start + +```erlang +start(Opts = #{ <<"store-module">> := hb_store_rocksdb, <<"name">> := _Dir}) -> + start_link(Opts); +``` + +### start + +```erlang +start(Opts) -> + start_link(Opts). +``` + +### scope + +Return scope (local) +Return path + +```erlang +-spec reset([]) -> ok | no_return(). +reset(_Opts) -> + gen_server:call(?MODULE, reset, ?TIMEOUT). +``` + +```erlang +scope(_) -> local. +``` + +### path + +Return scope (local) +Return path + +```erlang +-spec reset([]) -> ok | no_return(). +reset(_Opts) -> + gen_server:call(?MODULE, reset, ?TIMEOUT). +``` + +```erlang +path(_Opts, Path) -> + hb_store:path(Path). +``` + +### read + +Read data by the key. + +```erlang +-spec read(Opts, Key) -> Result when + Opts :: map(), + Key :: key() | list(), + Result :: {ok, value()} | not_found | {error, {corruption, string()}} | {error, any()}. +``` + +```erlang +read(Opts, RawPath) -> + ?event({read, RawPath}), + Path = resolve(Opts, RawPath), + case do_read(Opts, Path) of + not_found -> + not_found; + {error, _Reason} = Err -> Err; + {ok, {raw, Result}} -> + {ok, Result}; + {ok, {link, Link}} -> + ?event({link_found, Path, Link}), + read(Opts, Link); + {ok, {group, _Result}} -> + not_found + end. +``` + +### write + +Write given Key and Value to the database + +```erlang +-spec write(Opts, Key, Value) -> Result when + Opts :: map(), + Key :: key(), + Value :: value(), + Result :: ok | {error, any()}. +``` + +```erlang +write(Opts, RawKey, Value) -> + Key = hb_store:join(RawKey), + EncodedValue = encode_value(raw, Value), + ?event({writing, Key, byte_size(EncodedValue)}), + do_write(Opts, Key, EncodedValue). +``` + +### list + +```erlang +-spec list(Opts, Path) -> Result when + Opts :: any(), + Path :: any(), + Result :: {ok, [string()]} | {error, term()}. +``` + +```erlang +list(Opts, Path) -> + case do_read(Opts, Path) of + not_found -> {error, not_found}; + {error, _Reason} = Err -> + ?event(rocksdb, {could_not_list_folder, Err}), + Err; + {ok, {group, Value}} -> + {ok, sets:to_list(Value)}; + {ok, {link, LinkedPath}} -> + list(Opts, LinkedPath); + Reason -> + ?event(rocksdb, {could_not_list_folder, Reason}), + {ok, []} + end. +``` + +### resolve + +Replace links in a path with the target of the link. + +```erlang +-spec resolve(Opts, Path) -> Result when + Opts :: any(), + Path :: binary() | list(), + Result :: not_found | string(). +``` + +```erlang +resolve(Opts, Path) -> + PathList = hb_path:term_to_path_parts(hb_store:join(Path)), + ResolvedPath = do_resolve(Opts, "", PathList), + ResolvedPath. +``` + +### do_resolve + +```erlang +do_resolve(_Opts, FinalPath, []) -> + FinalPath; +``` + +### do_resolve + +```erlang +do_resolve(Opts, CurrentPath, [CurrentPath | Rest]) -> + do_resolve(Opts, CurrentPath, Rest); +``` + +### do_resolve + +```erlang +do_resolve(Opts, CurrentPath, [Next | Rest]) -> + PathPart = hb_store:join([CurrentPath, Next]), + case do_read(Opts, PathPart) of + not_found -> do_resolve(Opts, PathPart, Rest); + {error, _Reason} = Err -> Err; + {ok, {link, LinkValue}} -> + do_resolve(Opts, LinkValue, Rest); + {ok, _OtherType} -> do_resolve(Opts, PathPart, Rest) + end. +``` + +### type + +```erlang +-spec type(Opts, Key) -> Result when + Opts :: map(), + Key :: binary(), + Result :: composite | simple | not_found. +``` + +```erlang +type(Opts, RawKey) -> + Key = hb_store:join(RawKey), + case do_read(Opts, Key) of + not_found -> not_found; + {ok, {raw, _Item}} -> simple; + {ok, {link, NewKey}} -> type(Opts, NewKey); + {ok, {group, _Item}} -> composite + end. +``` + +### make_group + +Creates group under the given path. + +```erlang +-spec make_group(Opts, Key) -> Result when + Opts :: any(), + Key :: binary(), + Result :: ok | {error, already_added}. +``` + +```erlang +make_group(#{ <<"name">> := _DataDir }, Key) -> + gen_server:call(?MODULE, {make_group, Key}, ?TIMEOUT); +``` + +### make_group + +Creates group under the given path. + +```erlang +-spec make_group(Opts, Key) -> Result when + Opts :: any(), + Key :: binary(), + Result :: ok | {error, already_added}. +``` + +```erlang +make_group(_Opts, Key) -> + gen_server:call(?MODULE, {make_group, Key}, ?TIMEOUT). +``` + +### add_path + +Add two path components together. // is not used + +```erlang +-spec make_link(any(), key(), key()) -> ok. +make_link(_, Key1, Key1) -> + ok; +make_link(Opts, Existing, New) -> + ExistingBin = convert_if_list(Existing), + NewBin = convert_if_list(New), + % Create: NewValue -> ExistingBin + case do_read(Opts, NewBin) of + not_found -> + do_write(Opts, NewBin, encode_value(link, ExistingBin)); + _ -> + ok + end. +``` + +```erlang +add_path(_Opts, Path1, Path2) -> + Path1 ++ Path2. +``` + +### list + +List all items registered in rocksdb store. Should be used only + +```erlang +list() -> + gen_server:call(?MODULE, list, ?TIMEOUT). +``` + +### init + +```erlang +init(Dir) -> + filelib:ensure_dir(Dir), + case open_rockdb(Dir) of + {ok, DBHandle} -> + State = #{ + db_handle => DBHandle, + dir => Dir + }, + {ok, State}; + {error, Reason} -> + {stop, Reason} + end. +``` + +### handle_cast + +```erlang +handle_cast(_Request, State) -> + {noreply, State}. +``` + +### handle_info + +```erlang +handle_info(_Info, State) -> + {noreply, State}. +``` + +### handle_call + +```erlang +handle_call(Request, From, #{ db_handle := undefined, dir := Dir } = State) -> + % Re-initialize the DB handle if it's not set. +``` + +### handle_call + +```erlang +handle_call({do_write, Key, Value}, _From, #{db_handle := DBHandle} = State) -> + BaseName = filename:basename(Key), + rocksdb:put(DBHandle, Key, Value, #{}), + case filename:dirname(Key) of + <<".">> -> + ignore; + BaseDir -> + ensure_dir(DBHandle, BaseDir), + {ok, RawDirContent} = rocksdb:get(DBHandle, BaseDir, #{}), + NewDirContent = maybe_append_key_to_group(BaseName, RawDirContent), + ok = rocksdb:put(DBHandle, BaseDir, NewDirContent, #{}) + end, + {reply, ok, State}; +``` + +### handle_call + +```erlang +handle_call({do_read, Key}, _From, #{db_handle := DBHandle} = State) -> + Response = + case rocksdb:get(DBHandle, Key, #{}) of + {ok, Result} -> + {Type, Value} = decode_value(Result), + {ok, {Type, Value}}; + not_found -> + not_found; + {error, _Reason} = Err -> + Err + end, + {reply, Response, State}; +``` + +### handle_call + +```erlang +handle_call(reset, _From, State = #{db_handle := DBHandle, dir := Dir}) -> + ok = rocksdb:close(DBHandle), + ok = rocksdb:destroy(DirStr = ensure_list(Dir), []), + os:cmd(binary_to_list(<< "rm -Rf ", (list_to_binary(DirStr))/binary >>)), + {reply, ok, State#{ db_handle := undefined }}; +``` + +### handle_call + +```erlang +handle_call(list, _From, State = #{db_handle := DBHandle}) -> + {ok, Iterator} = rocksdb:iterator(DBHandle, []), + Items = collect(Iterator), + {reply, Items, State}; +``` + +### handle_call + +```erlang +handle_call({make_group, Path}, _From, #{db_handle := DBHandle} = State) -> + Result = ensure_dir(DBHandle, Path), + {reply, Result, State}; +``` + +### handle_call + +```erlang +handle_call(_Request, _From, State) -> + {reply, handle_call_unrecognized_message, State}. +``` + +### terminate + +```erlang +terminate(_Reason, _State) -> + ok. +``` + +### code_change + +```erlang +code_change(_OldVsn, State, _Extra) -> + {ok, State}. +``` + +### do_write + +Write given Key and Value to the database + +```erlang +-spec do_write(Opts, Key, Value) -> Result when + Opts :: map(), + Key :: key(), + Value :: value(), + Result :: ok | {error, any()}. +``` + +```erlang +do_write(_Opts, Key, Value) -> + gen_server:call(?MODULE, {do_write, Key, Value}, ?TIMEOUT). +``` + +### do_read + +```erlang +do_read(_Opts, Key) -> + gen_server:call(?MODULE, {do_read, Key}, ?TIMEOUT). +``` + +### ensure_dir + +```erlang +-spec decode_value(binary()) -> {value_type(), binary()}. +decode_value(<<1, Value/binary>>) -> {link, Value}; +decode_value(<<2, Value/binary>>) -> {raw, Value}; +decode_value(<<3, Value/binary>>) -> {group, binary_to_term(Value)}. +``` + +```erlang +ensure_dir(DBHandle, BaseDir) -> + PathParts = hb_path:term_to_path_parts(BaseDir), + [First | Rest] = PathParts, + Result = ensure_dir(DBHandle, First, Rest), + Result. +``` + +### ensure_dir + +```erlang +ensure_dir(DBHandle, CurrentPath, []) -> + maybe_create_dir(DBHandle, CurrentPath, nil), + ok; +``` + +### ensure_dir + +```erlang +ensure_dir(DBHandle, CurrentPath, [Next]) -> + maybe_create_dir(DBHandle, CurrentPath, Next), + ensure_dir(DBHandle, hb_store:join([CurrentPath, Next]), []); +``` + +### ensure_dir + +```erlang +ensure_dir(DBHandle, CurrentPath, [Next | Rest]) -> + maybe_create_dir(DBHandle, CurrentPath, Next), + ensure_dir(DBHandle, hb_store:join([CurrentPath, Next]), Rest). +``` + +### maybe_create_dir + +```erlang +maybe_create_dir(DBHandle, DirPath, Value) -> + CurrentValueSet = + case rocksdb:get(DBHandle, DirPath, #{}) of + not_found -> sets:new(); + {ok, CurrentValue} -> + {group, DecodedOldValue} = decode_value(CurrentValue), + DecodedOldValue + end, + NewValueSet = + case Value of + nil -> CurrentValueSet; + _ -> sets:add_element(Value, CurrentValueSet) + end, + rocksdb:put(DBHandle, DirPath, encode_value(group, NewValueSet), #{}). +``` + +### open_rockdb + +```erlang +open_rockdb(RawDir) -> + filelib:ensure_dir(Dir = ensure_list(RawDir)), + Options = [{create_if_missing, true}], + rocksdb:open(Dir, Options). +``` + +### convert_if_list + +```erlang +convert_if_list(Value) when is_list(Value) -> + join(Value); % Perform the conversion if it's a list +``` + +### convert_if_list + +Ensure that the given filename is a list, not a binary. + +```erlang +convert_if_list(Value) -> + Value. % Leave unchanged if it's not a list +``` + +### ensure_list + +Ensure that the given filename is a list, not a binary. + +```erlang +ensure_list(Value) when is_binary(Value) -> binary_to_list(Value); +``` + +### ensure_list + +Ensure that the given filename is a list, not a binary. + +```erlang +ensure_list(Value) -> Value. +``` + +### maybe_convert_to_binary + +Ensure that the given filename is a list, not a binary. + +```erlang +maybe_convert_to_binary(Value) when is_list(Value) -> + list_to_binary(Value); +``` + +### maybe_convert_to_binary + +Ensure that the given filename is a list, not a binary. + +```erlang +maybe_convert_to_binary(Value) when is_binary(Value) -> + Value. +``` + +### join + +```erlang +join(Key) when is_list(Key) -> + KeyList = hb_store:join(Key), + maybe_convert_to_binary(KeyList); +``` + +### join + +```erlang +join(Key) when is_binary(Key) -> Key. +``` + +### collect + +```erlang +collect(Iterator) -> + case rocksdb:iterator_move(Iterator, <<>>) of + {error, invalid_iterator} -> []; + {ok, Key, Value} -> + DecodedValue = decode_value(Value), + collect(Iterator, [{Key, DecodedValue}]) + end. +``` + +### collect + +```erlang +collect(Iterator, Acc) -> + case rocksdb:iterator_move(Iterator, next) of + {ok, Key, Value} -> + % Continue iterating, accumulating the key-value pair in the list + DecodedValue = decode_value(Value), + collect(Iterator, [{Key, DecodedValue} | Acc]); + {error, invalid_iterator} -> + % Reached the end of the iterator, return the accumulated list + lists:reverse(Acc) + end. +``` + +### maybe_append_key_to_group + +```erlang +maybe_append_key_to_group(Key, CurrentDirContents) -> + case decode_value(CurrentDirContents) of + {group, GroupSet} -> + BaseName = filename:basename(Key), + NewGroupSet = sets:add_element(BaseName, GroupSet), + encode_value(group, NewGroupSet); + _ -> + CurrentDirContents + end. +``` + +### get_or_start_server + +```erlang +get_or_start_server() -> + % Store = lists:keyfind(hb_store_rocksdb2, 1, hb_store:test_stores()), + Opts = #{ + <<"store-module">> => hb_store_rocksdb, + <<"name">> => <<"cache-TEST/rocksdb">> + }, + case start_link(Opts) of + {ok, Pid} -> + Pid; + {error, {already_started, Pid}} -> + Pid + end. +``` + +### write_read_test_ + +```erlang +write_read_test_() -> + {foreach, + fun() -> + Pid = get_or_start_server(), + unlink(Pid) + end, + fun(_) -> reset([]) end, + [ + {"can read/write data", fun() -> + ok = write(#{}, <<"test_key">>, <<"test_value">>), + {ok, Value} = read(#{}, <<"test_key">>), + ?assertEqual(<<"test_value">>, Value) + end}, + {"returns not_found for non existing keys", fun() -> + Value = read(#{}, <<"non_existing">>), + ?assertEqual(not_found, Value) + end}, + {"follows links", fun() -> + ok = write(#{}, <<"test_key2">>, <<"value_under_linked_key">>), + ok = make_link(#{}, <<"test_key2">>, <<"test_key">>), + {ok, Value} = read(#{}, <<"test_key">>), + ?assertEqual(<<"value_under_linked_key">>, Value) + end} + ]}. +``` + +### api_test_ + +```erlang +api_test_() -> + {foreach, + fun() -> + Pid = get_or_start_server(), + unlink(Pid) + end, + fun(_) -> reset([]) end, [ + {"write/3 can automatically create folders", fun() -> + ok = write(#{}, <<"messages/key1">>, <<"val1">>), + ok = write(#{}, <<"messages/key2">>, <<"val2">>), + {ok, Items} = list(#{}, <<"messages">>), + ?assertEqual( + lists:sort([<<"key1">>, <<"key2">>]), + lists:sort(Items) + ), + {ok, Item} = read(#{}, <<"messages/key1">>), + ?assertEqual(<<"val1">>, Item) + end}, + {"list/2 lists keys under given path", fun() -> + ok = write(#{}, <<"messages/key1">>, <<"val1">>), + ok = write(#{}, <<"messages/key2">>, <<"val2">>), + ok = write(#{}, <<"other_path/key3">>, <<"val3">>), + {ok, Items} = list(#{}, <<"messages">>), + ?assertEqual( + lists:sort([<<"key1">>, <<"key2">>]), lists:sort(Items) + ) + end}, + {"list/2 when database is empty", fun() -> + ?assertEqual({error, not_found}, list(#{}, <<"process/slot">>)) + end}, + {"make_link/3 creates a link to actual data", fun() -> + ok = write(ignored_options, <<"key1">>, <<"test_value">>), + ok = make_link([], <<"key1">>, <<"key2">>), + {ok, Value} = read([], <<"key2">>), + ?assertEqual(<<"test_value">>, Value) + end}, + {"make_link/3 does not create links if keys are same", fun() -> + ok = make_link([], <<"key1">>, <<"key1">>), + ?assertEqual(not_found, read(#{}, <<"key1">>)) + end}, + {"reset cleans up the database", fun() -> + ok = write(ignored_options, <<"test_key">>, <<"test_value">>), + ok = reset([]), + ?assertEqual(not_found, read(ignored_options, <<"test_key">>)) + end}, + { + "type/2 can identify simple items", + fun() -> + ok = write(#{}, <<"simple_item">>, <<"test">>), + ?assertEqual(simple, type(#{}, <<"simple_item">>)) + end + }, + { + "type/2 returns not_found for non existing keys", + fun() -> + ?assertEqual(not_found, type(#{}, <<"random_key">>)) + end + }, + { + "type/2 resolves links before checking real type of the following item", + fun() -> + ok = write(#{}, <<"messages/key1">>, <<"val1">>), + ok = write(#{}, <<"messages/key2">>, <<"val2">>), + make_link(#{}, <<"messages">>, <<"CompositeKey">>), + make_link(#{}, <<"messages/key2">>, <<"SimpleKey">>), + ?assertEqual(composite, type(#{}, <<"CompositeKey">>)), + ?assertEqual(simple, type(#{}, <<"SimpleKey">>)) + end + }, + { + "type/2 treats groups as composite items", + fun() -> + make_group(#{}, <<"messages_folder">>), + ?assertEqual(composite, type(#{}, <<"messages_folder">>)) + end + }, + { + "resolve/2 resolves raw/groups items", + fun() -> + write(#{}, <<"top_level/level1/item1">>, <<"1">>), + write(#{}, <<"top_level/level1/item2">>, <<"1">>), + write(#{}, <<"top_level/level1/item3">>, <<"1">>), + ?assertEqual( + <<"top_level/level1/item3">>, + resolve(#{}, <<"top_level/level1/item3">>) + ) + end + }, + { + "resolve/2 follows links", + fun() -> + write(#{}, <<"data/the_data_item">>, <<"the_data">>), + make_link(#{}, <<"data/the_data_item">>, <<"top_level/level1/item">>), + ?assertEqual( + <<"data/the_data_item">>, + resolve(#{}, <<"top_level/level1/item">>) + ) + end + }, + { + "make_group/2 creates a folder", + fun() -> + ?assertEqual(ok, make_group(#{}, <<"messages">>)), + ?assertEqual( + list(#{}, <<"messages">>), + {ok, []} + ) + end + }, + { + "make_group/2 does not override folder contents", + fun() -> + write(#{}, <<"messages/id">>, <<"1">>), + write(#{}, <<"messages/commitments">>, <<"2">>), + ?assertEqual(ok, make_group(#{}, <<"messages">>)), + ?assertEqual( + list(#{}, <<"messages">>), + {ok, [<<"id">>, <<"commitments">>]} + ) + end + }, + { + "make_group/2 making deep nested groups", + fun() -> + make_group(#{}, <<"messages/ids/items">>), + ?assertEqual( + {ok, [<<"ids">>]}, + list(#{}, <<"messages">>) + ), + ?assertEqual( + {ok, [<<"items">>]}, + list(#{}, <<"messages/ids">>) + ), + ?assertEqual( + {ok, []}, + list(#{}, <<"messages/ids/items">>) + ) + end + }, + { + "write/3 automatically does deep groups", + fun() -> + write(#{}, <<"messages/ids/item1">>, <<"1">>), + write(#{}, <<"messages/ids/item2">>, <<"2">>), + ?assertEqual( + {ok, [<<"ids">>]}, + list(#{}, <<"messages">>) + ), + ?assertEqual( + {ok, [<<"item2">>, <<"item1">>]}, + list(#{}, <<"messages/ids">>) + ), + ?assertEqual(read(#{}, <<"messages/ids/item1">>),{ok, <<"1">>}), + ?assertEqual(read(#{}, <<"messages/ids/item2">>), {ok, <<"2">>}) + end + } + ]}. +``` + +--- + +*Generated from [hb_store_rocksdb.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_rocksdb.erl)* diff --git a/docs/book/src/hb_structured_fields.erl.md b/docs/book/src/hb_structured_fields.erl.md new file mode 100644 index 000000000..62a699964 --- /dev/null +++ b/docs/book/src/hb_structured_fields.erl.md @@ -0,0 +1,1426 @@ +# hb_structured_fields + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_structured_fields.erl) + +A module for parsing and converting between Erlang and HTTP Structured +Fields, as described in RFC-9651. +The mapping between Erlang and structured headers types is as follow: +List: list() +Inner list: {list, [item()], params()} +Dictionary: [{binary(), item()}] + There is no distinction between empty list and empty dictionary. +Item with parameters: {item, bare_item(), params()} +Parameters: [{binary(), bare_item()}] +Bare item: one bare_item() that can be of type: +Integer: integer() +Decimal: {decimal, {integer(), integer()}} +String: {string, binary()} +Token: {token, binary()} +Byte sequence: {binary, binary()} +Boolean: boolean() + +--- + +## Exported Functions + +- `bare_item/1` +- `dictionary/1` +- `from_bare_item/1` +- `item/1` +- `list/1` +- `parse_bare_item/1` +- `parse_binary/1` +- `parse_dictionary/1` +- `parse_item/1` +- `parse_list/1` +- `to_dictionary/1` +- `to_item/1` +- `to_item/2` +- `to_list/1` + +--- + +### to_dictionary + +A module for parsing and converting between Erlang and HTTP Structured +Convert a map to a dictionary. + +```erlang +to_dictionary(Map) when is_map(Map) -> + to_dictionary(maps:to_list(Map)); +``` + +### to_dictionary + +A module for parsing and converting between Erlang and HTTP Structured +Convert a map to a dictionary. + +```erlang +to_dictionary(Pairs) when is_list(Pairs) -> + to_dictionary([], Pairs). +``` + +### to_dictionary + +```erlang +to_dictionary(Dict, []) -> + {ok, Dict}; +``` + +### to_dictionary + +```erlang +to_dictionary(_Dict, [{ Name, Value } | _Rest]) when is_map(Value) -> + {too_deep, Name}; +``` + +### to_dictionary + +```erlang +to_dictionary(Dict, [{Name, Value} | Rest]) -> + case to_item_or_inner_list(Value) of + {ok, ItemOrInner} -> to_dictionary([{key_to_binary(Name), ItemOrInner} | Dict], Rest); + E -> E + end. +``` + +### to_item + +Convert an item to a dictionary. + +```erlang +to_item({item, Kind, Params}) when is_list(Params) -> + {ok, {item, to_bare_item(Kind), [to_param(Pair) || Pair <- Params] }}; +``` + +### to_item + +Convert an item to a dictionary. + +```erlang +to_item(Item) -> + to_item(Item, []). +``` + +### to_item + +```erlang +to_item(Item, Params) when is_list(Params) -> + to_item({ item, to_bare_item(Item), Params}). +``` + +### to_list + +Convert a list to an SF term. + +```erlang +to_list(List) when is_list(List) -> + to_list([], List). +``` + +### to_list + +```erlang +to_list(Acc, []) -> + {ok, lists:reverse(Acc)}; +``` + +### to_list + +```erlang +to_list(Acc, [ItemOrInner | Rest]) -> + Res = to_item_or_inner_list(ItemOrInner), + case Res of + {ok, Elem} -> to_list([Elem | Acc], Rest); + E -> E + end. +``` + +### to_inner_list + +Convert an inner list to an SF term. + +```erlang +to_inner_list({list, Inner, Params}) when is_list(Inner) andalso is_list(Params) -> + {ok, {list, [to_inner_item(I) || I <-- Inner], [to_param(Pair) || Pair <- Params]}}; +``` + +### to_inner_list + +Convert an inner list to an SF term. + +```erlang +to_inner_list(Inner) -> + to_inner_list(Inner, []). +``` + +### to_inner_list + +```erlang +to_inner_list(Inner, Params) when is_list(Inner) andalso is_list(Params) -> + to_inner_list([], Inner, Params). +``` + +### to_inner_list + +```erlang +to_inner_list(Inner, [], Params) when is_list(Params) -> + {ok, {list, lists:reverse(Inner), [to_param(Param) || Param <- Params]}}; +``` + +### to_inner_list + +```erlang +to_inner_list(_List, [Item | _Rest], _Params) when is_list(Item) orelse is_map(Item) -> + {too_deep, Item}; +``` + +### to_inner_list + +```erlang +to_inner_list(Inner, [Item | Rest], Params) -> + case to_item(Item) of + {ok, I} -> to_inner_list([I | Inner], Rest, Params); + E -> E + end. +``` + +### to_item_or_inner_list + +Convert an Erlang term to an SF `item` or `inner_list`. + +```erlang +to_item_or_inner_list(ItemOrInner) -> + case ItemOrInner of + Map when is_map(Map) -> {too_deep, Map}; + % pre-parsed inner list + {list, Inner, Params} -> to_inner_list(Inner, Params); + Item when not is_list(Item) -> to_item(Item); + Inner when is_list(Inner) -> to_inner_list(Inner) + end. +``` + +### to_inner_item + +Convert an Erlang term to an SF `item`. + +```erlang +to_inner_item(Item) when is_list(Item) -> + {too_deep, Item}; +``` + +### to_inner_item + +Convert an Erlang term to an SF `item`. + +```erlang +to_inner_item(Item) -> + case to_item(Item) of + {ok, I} -> I; + E -> E + end. +``` + +### to_param + +Convert an Erlang term to an SF `parameter`. + +```erlang +to_param({Name, Value}) -> + NormalizedName = key_to_binary(Name), + {NormalizedName, to_bare_item(Value)}. +``` + +### to_bare_item + +Convert an Erlang term to an SF `bare_item`. + +```erlang +to_bare_item(BareItem) -> + case BareItem of + % Assume tuple is already parsed + BI when is_tuple(BI) -> BI; + % Serialize -> Parse numbers in order to ensure their lengths adhere to structured fields + B when is_boolean(B) -> B; + I when is_integer(I) -> + {Int, _} = parse_bare_item(bare_item(I)), + Int; + F when is_float(F) -> + {Dec, _} = parse_bare_item(bare_item({decimal, {F, 0}})), + Dec; + A when is_atom(A) -> {token, atom_to_binary(A)}; + S when is_binary(S) or is_list(S) -> {string, iolist_to_binary(S)} + end. +``` + +### from_bare_item + +Convert an SF `bare_item` to an Erlang term. + +```erlang +from_bare_item(BareItem) -> + case BareItem of + I when is_integer(I) -> I; + B when is_boolean(B) -> B; + D = {decimal, _} -> + list_to_float( + binary_to_list( + iolist_to_binary( + bare_item(D) + ) + ) + ); + {string, S} -> S; + {token, T} -> + try binary_to_existing_atom(T) of + Atom -> Atom + catch + error:badarg -> T + end; + {binary, B} -> B + end. +``` + +### key_to_binary + +Convert an Erlang term to a binary key. + +```erlang +key_to_binary(Key) when is_atom(Key) -> atom_to_binary(Key); +``` + +### key_to_binary + +Convert an Erlang term to a binary key. +Parse a binary SF dictionary. + +```erlang +-spec parse_dictionary(binary()) -> sh_dictionary(). +parse_dictionary(<<>>) -> + []; +parse_dictionary(<>) when ?IS_ALPHA(C) + or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) + or (C =:= $.) -> + parse_dict_key(R, [], <>). +``` + +```erlang +key_to_binary(Key) -> iolist_to_binary(Key). +``` + +### parse_dict_key + +Convert an Erlang term to a binary key. +Parse a binary SF dictionary. + +```erlang +-spec parse_dictionary(binary()) -> sh_dictionary(). +parse_dictionary(<<>>) -> + []; +parse_dictionary(<>) when ?IS_ALPHA(C) + or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) + or (C =:= $.) -> + parse_dict_key(R, [], <>). +``` + +```erlang +parse_dict_key(<<$=, $(, R0/bits>>, Acc, K) -> + {Item, R} = parse_inner_list(R0, []), + parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, Item})); +``` + +### parse_dict_key + +Convert an Erlang term to a binary key. +Parse a binary SF dictionary. + +```erlang +-spec parse_dictionary(binary()) -> sh_dictionary(). +parse_dictionary(<<>>) -> + []; +parse_dictionary(<>) when ?IS_ALPHA(C) + or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) + or (C =:= $.) -> + parse_dict_key(R, [], <>). +``` + +```erlang +parse_dict_key(<<$=, R0/bits>>, Acc, K) -> + {Item, R} = parse_item1(R0), + parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, Item})); +``` + +### parse_dict_key + +Convert an Erlang term to a binary key. +Parse a binary SF dictionary. + +```erlang +-spec parse_dictionary(binary()) -> sh_dictionary(). +parse_dictionary(<<>>) -> + []; +parse_dictionary(<>) when ?IS_ALPHA(C) + or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) + or (C =:= $.) -> + parse_dict_key(R, [], <>). +``` + +```erlang +parse_dict_key(<>, Acc, K) when + ?IS_ALPHA(C) or ?IS_DIGIT(C) or + (C =:= $_) or (C =:= $-) or (C =:= $.) or (C =:= $*) or (C =:= $%) -> + parse_dict_key(R, Acc, <>); +``` + +### parse_dict_key + +Convert an Erlang term to a binary key. +Parse a binary SF dictionary. + +```erlang +-spec parse_dictionary(binary()) -> sh_dictionary(). +parse_dictionary(<<>>) -> + []; +parse_dictionary(<>) when ?IS_ALPHA(C) + or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) + or (C =:= $.) -> + parse_dict_key(R, [], <>). +``` + +```erlang +parse_dict_key(<<$;, R0/bits>>, Acc, K) -> + {Params, R} = parse_before_param(R0, []), + parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, {item, true, Params}})); +``` + +### parse_dict_key + +Convert an Erlang term to a binary key. +Parse a binary SF dictionary. + +```erlang +-spec parse_dictionary(binary()) -> sh_dictionary(). +parse_dictionary(<<>>) -> + []; +parse_dictionary(<>) when ?IS_ALPHA(C) + or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) + or (C =:= $.) -> + parse_dict_key(R, [], <>). +``` + +```erlang +parse_dict_key(R, Acc, K) -> + parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, {item, true, []}})). +``` + +### parse_dict_before_sep + +Parse a binary SF dictionary before a separator. + +```erlang +parse_dict_before_sep(<<$\s, R/bits>>, Acc) -> + parse_dict_before_sep(R, Acc); +``` + +### parse_dict_before_sep + +Parse a binary SF dictionary before a separator. + +```erlang +parse_dict_before_sep(<<$\t, R/bits>>, Acc) -> + parse_dict_before_sep(R, Acc); +``` + +### parse_dict_before_sep + +Parse a binary SF dictionary before a separator. + +```erlang +parse_dict_before_sep(<>, Acc) when C =:= $, -> + parse_dict_before_member(R, Acc); +``` + +### parse_dict_before_sep + +Parse a binary SF dictionary before a separator. + +```erlang +parse_dict_before_sep(<<>>, Acc) -> + Acc. +``` + +### parse_dict_before_member + +Parse a binary SF dictionary before a member. + +```erlang +parse_dict_before_member(<<$\s, R/bits>>, Acc) -> + parse_dict_before_member(R, Acc); +``` + +### parse_dict_before_member + +Parse a binary SF dictionary before a member. + +```erlang +parse_dict_before_member(<<$\t, R/bits>>, Acc) -> + parse_dict_before_member(R, Acc); +``` + +### parse_dict_before_member + +Parse a binary SF dictionary before a member. + +```erlang +parse_dict_before_member(<>, Acc) + when ?IS_ALPHA(C) or ?IS_DIGIT(C) or (C =:= $*) + or (C =:= $%) or (C =:= $_) or (C =:= $-) -> + parse_dict_key(R, Acc, <>). +``` + +### parse_item1 + +```erlang +-spec parse_item(binary()) -> sh_item(). +parse_item(Bin) -> + {Item, <<>>} = parse_item1(Bin), + Item. +``` + +```erlang +parse_item1(Bin) -> + case parse_bare_item(Bin) of + {Item, <<$;, R/bits>>} -> + {Params, Rest} = parse_before_param(R, []), + {{item, Item, Params}, Rest}; + {Item, Rest} -> + {{item, Item, []}, Rest} + end. +``` + +### parse_list_member + +Parse a binary SF list before a member. + +```erlang +-spec parse_list(binary()) -> sh_list(). +parse_list(<<>>) -> + []; +parse_list(Bin) -> + parse_list_before_member(Bin, []). +``` + +```erlang +parse_list_member(<<$(, R0/bits>>, Acc) -> + {Item, R} = parse_inner_list(R0, []), + parse_list_before_sep(R, [Item | Acc]); +``` + +### parse_list_member + +Parse a binary SF list before a member. + +```erlang +-spec parse_list(binary()) -> sh_list(). +parse_list(<<>>) -> + []; +parse_list(Bin) -> + parse_list_before_member(Bin, []). +``` + +```erlang +parse_list_member(R0, Acc) -> + {Item, R} = parse_item1(R0), + parse_list_before_sep(R, [Item | Acc]). +``` + +### parse_list_before_sep + +Parse a binary SF list before a separator. + +```erlang +parse_list_before_sep(<<$\s, R/bits>>, Acc) -> + parse_list_before_sep(R, Acc); +``` + +### parse_list_before_sep + +Parse a binary SF list before a separator. + +```erlang +parse_list_before_sep(<<$\t, R/bits>>, Acc) -> + parse_list_before_sep(R, Acc); +``` + +### parse_list_before_sep + +Parse a binary SF list before a separator. + +```erlang +parse_list_before_sep(<<$,, R/bits>>, Acc) -> + parse_list_before_member(R, Acc); +``` + +### parse_list_before_sep + +Parse a binary SF list before a separator. + +```erlang +parse_list_before_sep(<<>>, Acc) -> + lists:reverse(Acc). +``` + +### parse_list_before_member + +Parse a binary SF list before a member. + +```erlang +parse_list_before_member(<<$\s, R/bits>>, Acc) -> + parse_list_before_member(R, Acc); +``` + +### parse_list_before_member + +Parse a binary SF list before a member. + +```erlang +parse_list_before_member(<<$\t, R/bits>>, Acc) -> + parse_list_before_member(R, Acc); +``` + +### parse_list_before_member + +Parse a binary SF list before a member. + +```erlang +parse_list_before_member(R, Acc) -> + parse_list_member(R, Acc). +``` + +### parse_inner_list + +```erlang +parse_inner_list(<<$\s, R/bits>>, Acc) -> + parse_inner_list(R, Acc); +``` + +### parse_inner_list + +```erlang +parse_inner_list(<<$), $;, R0/bits>>, Acc) -> + {Params, R} = parse_before_param(R0, []), + {{list, lists:reverse(Acc), Params}, R}; +``` + +### parse_inner_list + +```erlang +parse_inner_list(<<$), R/bits>>, Acc) -> + {{list, lists:reverse(Acc), []}, R}; +``` + +### parse_inner_list + +```erlang +parse_inner_list(R0, Acc) -> + {Item, R = <>} = parse_item1(R0), + true = (C =:= $\s) orelse (C =:= $)), + parse_inner_list(R, [Item | Acc]). +``` + +### parse_before_param + +```erlang +parse_before_param(<<$\s, R/bits>>, Acc) -> + parse_before_param(R, Acc); +``` + +### parse_before_param + +```erlang +parse_before_param(<>, Acc) when ?IS_LC_ALPHA(C) or (C =:= $*) -> + parse_param(R, Acc, <>). +``` + +### parse_param + +```erlang +parse_param(<<$;, R/bits>>, Acc, K) -> + parse_before_param(R, lists:keystore(K, 1, Acc, {K, true})); +``` + +### parse_param + +```erlang +parse_param(<<$=, R0/bits>>, Acc, K) -> + case parse_bare_item(R0) of + {Item, <<$;, R/bits>>} -> + parse_before_param(R, lists:keystore(K, 1, Acc, {K, Item})); + {Item, R} -> + {lists:keystore(K, 1, Acc, {K, Item}), R} + end; +``` + +### parse_param + +```erlang +parse_param(<>, Acc, K) when + ?IS_LC_ALPHA(C) or ?IS_DIGIT(C) or + (C =:= $_) or (C =:= $-) or (C =:= $.) or (C =:= $*) -> + parse_param(R, Acc, <>); +``` + +### parse_param + +```erlang +parse_param(R, Acc, K) -> + {lists:keystore(K, 1, Acc, {K, true}), R}. +``` + +### parse_bare_item + +Parse an integer or decimal. + +```erlang +parse_bare_item(<<$-, R/bits>>) -> parse_number(R, 0, <<$->>); +``` + +### parse_bare_item + +Parse an integer or decimal. + +```erlang +parse_bare_item(<>) when ?IS_DIGIT(C) -> parse_number(R, 1, <>); +``` + +### parse_bare_item + +Parse an integer or decimal. + +```erlang +parse_bare_item(<<$", R/bits>>) -> + % Parse a string. +``` + +### parse_bare_item + +```erlang +parse_bare_item(<>) when ?IS_ALPHA(C) or (C =:= $*) -> + % Parse a token. +``` + +### parse_bare_item + +```erlang +parse_bare_item(<<$:, R/bits>>) -> + % Parse a byte sequence. +``` + +### parse_bare_item + +```erlang +parse_bare_item(<<"?0", R/bits>>) -> + % Parse a boolean false. +``` + +### parse_bare_item + +```erlang +parse_bare_item(<<"?1", R/bits>>) -> + % Parse a boolean true. +``` + +### parse_number + +Parse an integer or decimal binary. + +```erlang +parse_number(<>, L, Acc) when ?IS_DIGIT(C) -> + parse_number(R, L + 1, <>); +``` + +### parse_number + +Parse an integer or decimal binary. + +```erlang +parse_number(<<$., R/bits>>, L, Acc) -> + parse_decimal(R, L, 0, Acc, <<>>); +``` + +### parse_number + +Parse an integer or decimal binary. + +```erlang +parse_number(R, L, Acc) when L =< 15 -> + {binary_to_integer(Acc), R}. +``` + +### parse_decimal + +Parse a decimal binary. + +```erlang +parse_decimal(<>, L1, L2, IntAcc, FracAcc) when ?IS_DIGIT(C) -> + parse_decimal(R, L1, L2 + 1, IntAcc, <>); +``` + +### parse_decimal + +Parse a decimal binary. + +```erlang +parse_decimal(R, L1, L2, IntAcc, FracAcc0) when L1 =< 12, L2 >= 1, L2 =< 3 -> + %% While not strictly required this gives a more consistent representation. +``` + +### parse_string + +Parse a string binary. + +```erlang +parse_string(<<$\\, $", R/bits>>, Acc) -> + parse_string(R, <>); +``` + +### parse_string + +Parse a string binary. + +```erlang +parse_string(<<$\\, $\\, R/bits>>, Acc) -> + parse_string(R, <>); +``` + +### parse_string + +Parse a string binary. + +```erlang +parse_string(<<$", R/bits>>, Acc) -> + {{string, Acc}, R}; +``` + +### parse_string + +Parse a string binary. + +```erlang +parse_string(<>, Acc) when + C >= 16#20, C =< 16#21; + C >= 16#23, C =< 16#5b; + C >= 16#5d, C =< 16#7e -> + parse_string(R, <>). +``` + +### parse_token + +Parse a token binary. + +```erlang +parse_token(<>, Acc) when ?IS_TOKEN(C) or (C =:= $:) or (C =:= $/) -> + parse_token(R, <>); +``` + +### parse_token + +Parse a token binary. + +```erlang +parse_token(R, Acc) -> + {{token, Acc}, R}. +``` + +### parse_binary + +Parse a byte sequence binary. + +```erlang +parse_binary(Bin) when is_binary(Bin) -> + parse_binary(Bin, <<>>). +``` + +### parse_binary + +```erlang +parse_binary(<<$:, R/bits>>, Acc) -> + {{binary, base64:decode(Acc)}, R}; +``` + +### parse_binary + +```erlang +parse_binary(<>, Acc) when ?IS_ALPHANUM(C) or (C =:= $+) or (C =:= $/) or (C =:= $=) -> + parse_binary(R, <>). +``` + +### parse_struct_hd_test_ + +```erlang +parse_struct_hd_test_() -> + Files = filelib:wildcard("deps/structured-header-tests/*.json"), + lists:flatten([ + begin + {ok, JSON} = file:read_file(File), + Tests = json:decode(JSON), + [ + {iolist_to_binary(io_lib:format("~s: ~s", [filename:basename(File), Name])), fun() -> + %% The implementation is strict. We fail whenever we can. +``` + +### expected_to_term + +```erlang +expected_to_term([Bare, []]) when + is_boolean(Bare); is_number(Bare); is_binary(Bare); is_map(Bare) -> + {item, e2tb(Bare), []}; +``` + +### expected_to_term + +```erlang +expected_to_term([Bare, Params = [[<<_/bits>>, _] | _]]) when + is_boolean(Bare); is_number(Bare); is_binary(Bare); is_map(Bare) -> + {item, e2tb(Bare), e2tp(Params)}; +%% Empty list or dictionary. +``` + +### expected_to_term + +```erlang +expected_to_term([]) -> + []; +%% Dictionary. +``` + +### expected_to_term + +```erlang +expected_to_term(Dict = [[<<_/bits>>, V] | _]) when V =/= [] -> + e2t(Dict); +%% Outer list. +``` + +### expected_to_term + +```erlang +expected_to_term(List) when is_list(List) -> + [ e2t(E) || E <- List ]. +``` + +### e2t + +```erlang +e2t(Dict = [[<<_/bits>>, _] | _]) -> + [{K, e2t(V)} || [K, V] <- Dict]; +%% Inner list. +``` + +### e2t + +```erlang +e2t([List, Params]) when is_list(List) -> + {list, [ e2t(E) || E <- List ], e2tp(Params)}; +%% Item. +``` + +### e2t + +```erlang +e2t([Bare, Params]) -> + {item, e2tb(Bare), e2tp(Params)}. +``` + +### e2tb + +```erlang +e2tb(#{<<"__type">> := <<"token">>, <<"value">> := V}) -> + {token, V}; +``` + +### e2tb + +```erlang +e2tb(#{<<"__type">> := <<"binary">>, <<"value">> := V}) -> + {binary, base32:decode(V)}; +``` + +### e2tb + +```erlang +e2tb(V) when is_binary(V) -> + {string, V}; +``` + +### e2tb + +```erlang +e2tb(V) when is_float(V) -> + %% There should be no rounding needed for the test cases. +``` + +### e2tb + +```erlang +e2tb(V) -> + V. +``` + +### e2tp + +```erlang +e2tp([]) -> + []; +``` + +### e2tp + +```erlang +e2tp(Params) -> + [{K, e2tb(V)} || [K, V] <- Params]. +``` + +### raw_to_binary + +```erlang +raw_to_binary(RawList) -> + trim_ws(iolist_to_binary(lists:join(<<", ">>, RawList))). +``` + +### trim_ws + +```erlang +trim_ws(<<$\s, R/bits>>) -> trim_ws(R); +``` + +### trim_ws + +```erlang +trim_ws(R) -> trim_ws_end(R, byte_size(R) - 1). +``` + +### trim_ws_end + +```erlang +trim_ws_end(_, -1) -> + <<>>; +``` + +### trim_ws_end + +```erlang +trim_ws_end(Value, N) -> + case binary:at(Value, N) of + $\s -> + trim_ws_end(Value, N - 1); + _ -> + S = N + 1, + <> = Value, + Value2 + end. +``` + +### dictionary + +```erlang +-spec dictionary(#{binary() => sh_item() | sh_inner_list()} | sh_dictionary()) -> + iolist(). +``` + +```erlang +dictionary(Map) when is_map(Map) -> + dictionary(maps:to_list(Map)); +``` + +### dictionary + +```erlang +-spec dictionary(#{binary() => sh_item() | sh_inner_list()} | sh_dictionary()) -> + iolist(). +``` + +```erlang +dictionary(KVList) when is_list(KVList) -> + lists:join( + <<", ">>, + [ + case Value of + true -> Key; + _ -> [Key, $=, item_or_inner_list(Value)] + end + || + {Key, Value} <- KVList + ] + ). +``` + +### item_or_inner_list + +```erlang +-spec list(sh_list()) -> iolist(). +list(List) -> + lists:join(<<", ">>, [item_or_inner_list(Value) || Value <- List]). +``` + +```erlang +item_or_inner_list(Value = {list, _, _}) -> + inner_list(Value); +``` + +### item_or_inner_list + +```erlang +-spec list(sh_list()) -> iolist(). +list(List) -> + lists:join(<<", ">>, [item_or_inner_list(Value) || Value <- List]). +``` + +```erlang +item_or_inner_list(Value) -> + item(Value). +``` + +### inner_list + +```erlang +inner_list({list, List, Params}) -> + [$(, lists:join($\s, [item(Value) || Value <- List]), $), params(Params)]. +``` + +### bare_item + +```erlang +bare_item({string, String}) -> + [$", escape_string(String, <<>>), $"]; +%% @todo Must fail if Token has invalid characters. +``` + +### bare_item + +```erlang +bare_item({token, Token}) -> + Token; +``` + +### bare_item + +```erlang +bare_item({binary, Binary}) -> + [$:, base64:encode(Binary), $:]; +``` + +### bare_item + +```erlang +bare_item({decimal, {Base, Exp}}) when Exp >= 0 -> + Mul = + case Exp of + 0 -> 1; + 1 -> 10; + 2 -> 100; + 3 -> 1000; + 4 -> 10000; + 5 -> 100000; + 6 -> 1000000; + 7 -> 10000000; + 8 -> 100000000; + 9 -> 1000000000; + 10 -> 10000000000; + 11 -> 100000000000; + 12 -> 1000000000000 + end, + MaxLenWithSign = + if + Base < 0 -> 13; + true -> 12 + end, + Bin = integer_to_binary(Base * Mul), + true = byte_size(Bin) =< MaxLenWithSign, + [Bin, <<".0">>]; +``` + +### bare_item + +```erlang +bare_item({decimal, {Base, -1}}) -> + Int = Base div 10, + Frac = abs(Base) rem 10, + [integer_to_binary(Int), $., integer_to_binary(Frac)]; +``` + +### bare_item + +```erlang +bare_item({decimal, {Base, -2}}) -> + Int = Base div 100, + Frac = abs(Base) rem 100, + [integer_to_binary(Int), $., integer_to_binary(Frac)]; +``` + +### bare_item + +```erlang +bare_item({decimal, {Base, -3}}) -> + Int = Base div 1000, + Frac = abs(Base) rem 1000, + [integer_to_binary(Int), $., integer_to_binary(Frac)]; +``` + +### bare_item + +```erlang +bare_item({decimal, {Base, Exp}}) -> + Div = exp_div(Exp), + Int0 = Base div Div, + true = abs(Int0) < 1000000000000, + Frac0 = abs(Base) rem Div, + DivFrac = Div div 1000, + Frac1 = Frac0 div DivFrac, + {Int, Frac} = + if + (Frac0 rem DivFrac) > (DivFrac div 2) -> + case Frac1 of + 999 when Int0 < 0 -> {Int0 - 1, 0}; + 999 -> {Int0 + 1, 0}; + _ -> {Int0, Frac1 + 1} + end; + true -> + {Int0, Frac1} + end, + [ + integer_to_binary(Int), + $., + if + Frac < 10 -> [$0, $0, integer_to_binary(Frac)]; + Frac < 100 -> [$0, integer_to_binary(Frac)]; + true -> integer_to_binary(Frac) + end + ]; +``` + +### bare_item + +```erlang +bare_item(Integer) when is_integer(Integer) -> + integer_to_binary(Integer); +``` + +### bare_item + +```erlang +bare_item(true) -> + <<"?1">>; +``` + +### bare_item + +```erlang +bare_item(false) -> + <<"?0">>. +``` + +### exp_div + +```erlang +exp_div(0) -> 1; +``` + +### exp_div + +```erlang +exp_div(N) -> 10 * exp_div(N + 1). +``` + +### escape_string + +```erlang +escape_string(<<>>, Acc) -> Acc; +``` + +### escape_string + +```erlang +escape_string(<<$\\, R/bits>>, Acc) -> escape_string(R, <>); +``` + +### escape_string + +```erlang +escape_string(<<$", R/bits>>, Acc) -> escape_string(R, <>); +``` + +### escape_string + +```erlang +escape_string(<>, Acc) -> escape_string(R, <>). +``` + +### params + +```erlang +params(Params) -> + [ + case Param of + {Key, true} -> [$;, Key]; + {Key, Value} -> [$;, Key, $=, bare_item(Value)] + end + || + Param <- Params + ]. +``` + +### to_dictionary_test + +```erlang +to_dictionary_test() -> + {ok, SfDictionary} = to_dictionary(#{ + foo => bar, + <<"fizz">> => <<"buzz">>, + <<"item-with">> => { item, <<"params">>, [{first, param}, {another, true}] }, + <<"int-item">> => 1, + <<"int-item-with-params">> => { item, 1, [{int, <<"param">>}] }, + <<"no">> => <<"params">>, + <<"empty">> => {item, params, []}, + inner => [<<"a">>, b, true, 3], + inner_with_params => {list, [{item, 1, []}, 2], [{first, param}]}, + inner_inner_params => [{item, 1, [{heres, <<"one">>}]}, 2] + }), + ?assertEqual( + {<<"foo">>, {item, {token,<<"bar">>}, []}}, + lists:keyfind(<<"foo">>, 1, SfDictionary) + ), + ?assertEqual( + {<<"fizz">>, {item, {string,<<"buzz">>}, []}}, + lists:keyfind(<<"fizz">>, 1, SfDictionary) + ), + ?assertEqual( + {<<"item-with">>, + {item, + {string,<<"params">>}, + [{<<"first">>, {token,<<"param">>}}, {<<"another">>, true}] + } + }, + lists:keyfind(<<"item-with">>, 1, SfDictionary) + ), + ?assertEqual( + {<<"int-item">>, {item, 1, []}}, + lists:keyfind(<<"int-item">>, 1, SfDictionary) + ), + ?assertEqual( + {<<"int-item-with-params">>, {item, 1, [{<<"int">>, {string, <<"param">>}}]}}, + lists:keyfind(<<"int-item-with-params">>, 1, SfDictionary) + ), + ?assertEqual( + {<<"no">>, {item, {string, <<"params">>}, []}}, + lists:keyfind(<<"no">>, 1, SfDictionary) + ), + ?assertEqual( + {<<"empty">>, {item, {token, <<"params">>}, []}}, + lists:keyfind(<<"empty">>, 1, SfDictionary) + ), + ?assertEqual( + { + <<"inner">>, + { + list, + [ + {item, {string, <<"a">>}, []}, + {item, {token, <<"b">>}, []}, + {item, true, []}, + {item, 3, []} + ], + [] + } + }, + lists:keyfind(<<"inner">>, 1, SfDictionary) + ), + ?assertEqual( + {<<"inner_with_params">>, + {list, + [{item, 1, []}, {item, 2, []}], + [{<<"first">>, {token, <<"param">>}}] + } + }, + lists:keyfind(<<"inner_with_params">>, 1, SfDictionary) + ), + ?assertEqual( + {<<"inner_inner_params">>, + {list, + [{item, 1, [{<<"heres">>, {string, <<"one">>}}]}, {item, 2, []}], + [] + } + }, + lists:keyfind(<<"inner_inner_params">>, 1, SfDictionary) + ), + dictionary(SfDictionary). +``` + +### to_dictionary_depth_test + +```erlang +to_dictionary_depth_test() -> + {too_deep, _} = to_dictionary(#{ + foo => #{ bar => buzz } + }), + {too_deep, _} = to_dictionary(#{ + foo => [1, 2, [3]] + }), + ok. +``` + +### to_item_test + +```erlang +to_item_test() -> + ?assertEqual(to_item(1), {ok, {item, 1, []}}), + ?assertEqual(to_item(true), {ok, {item, true, []}}), + ?assertEqual(to_item(<<"foobar">>), {ok, {item, {string, <<"foobar">>}, []}}), + ?assertEqual(to_item("foobar"), {ok, {item, {string, <<"foobar">>}, []}}), + ?assertEqual(to_item(foobar), {ok, {item, {token, <<"foobar">>}, []}}), + ?assertEqual( + to_item({item, "foobar", [{first, param}]}), + {ok, {item, {string, <<"foobar">>}, [{<<"first">>, {token, <<"param">>}}]}} + ), + ok. +``` + +### to_list_test + +```erlang +to_list_test() -> + ?assertEqual( + to_list( + [1, 2, <<"three">>, [4, <<"five">>], + {list, [6, <<"seven">>], + [{<<"first">>, {token, <<"param">>}}] + } + ] + ), + {ok, [ + {item, 1, []}, + {item, 2, []}, + {item, {string, <<"three">>}, []}, + {list, [{ item, 4, []}, {item, {string, <<"five">>}, []}], []}, + {list, + [{ item, 6, []}, {item, {string, <<"seven">>}, []}], + [{<<"first">>, {token, <<"param">>}}] + } + ]} + ), + ok. +``` + +### to_list_depth_test + +```erlang +to_list_depth_test() -> + {too_deep, _} = to_list([1,2,3, [4, [5]]]), + {too_deep, _} = to_list([1,2,3, #{ foo => bar } ]), + {too_deep, _} = to_list([1,2,3, [#{ foo => bar }] ]), + ok. +``` + +--- + +*Generated from [hb_structured_fields.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_structured_fields.erl)* diff --git a/docs/book/src/hb_sup.erl.md b/docs/book/src/hb_sup.erl.md new file mode 100644 index 000000000..5d115e181 --- /dev/null +++ b/docs/book/src/hb_sup.erl.md @@ -0,0 +1,89 @@ +# hb_sup + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_sup.erl) + +## Exported Functions + +- `init/1` +- `start_link/0` +- `start_link/1` + +--- + +### start_link + +```erlang +start_link() -> + start_link(#{}). +``` + +### start_link + +```erlang +start_link(Opts) -> + supervisor:start_link({local, ?SERVER}, ?MODULE, Opts). +``` + +### init + +```erlang +init(Opts) -> + SupFlags = #{strategy => one_for_all, + intensity => 0, + period => 1}, + StoreChildren = store_children(hb_opts:get(store, [], Opts)), + GunChild = + #{ + id => hb_http_client, + start => {hb_http_client, start_link, [Opts]}, + restart => permanent, + shutdown => 5000, + type => worker, + modules => [hb_http_client] + }, + {ok, {SupFlags, [GunChild | StoreChildren]}}. +``` + +### store_children + +Generate a child spec for stores in the given Opts. + +```erlang +store_children(Store) when not is_list(Store) -> + store_children([Store]); +``` + +### store_children + +Generate a child spec for stores in the given Opts. + +```erlang +store_children([]) -> []; +``` + +### store_children + +Generate a child spec for stores in the given Opts. + +```erlang +store_children([RocksDBOpts = #{ <<"store-module">> := hb_store_rocksdb } | Rest]) -> + [ + #{ + id => hb_store_rocksdb, + start => {hb_store_rocksdb, start_link, [RocksDBOpts]} + } + ] ++ store_children(Rest); +``` + +### store_children + +Generate a child spec for stores in the given Opts. + +```erlang +store_children([_ | Rest]) -> + store_children(Rest). +``` + +--- + +*Generated from [hb_sup.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_sup.erl)* diff --git a/docs/book/src/hb_test_utils.erl.md b/docs/book/src/hb_test_utils.erl.md new file mode 100644 index 000000000..f5a86ce3c --- /dev/null +++ b/docs/book/src/hb_test_utils.erl.md @@ -0,0 +1,351 @@ +# hb_test_utils + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_test_utils.erl) + +Simple utilities for testing HyperBEAM. Includes functions for +generating isolated (fresh) test stores, running suites of tests with +differing options, as well as executing and reporting benchmarks. + +--- + +## Exported Functions + +- `assert_throws/4` +- `benchmark_iterations/2` +- `benchmark_print/2` +- `benchmark_print/3` +- `benchmark_print/4` +- `benchmark/1` +- `benchmark/2` +- `benchmark/3` +- `compare_events/3` +- `compare_events/4` +- `compare_events/5` +- `run/4` +- `suite_with_opts/2` +- `test_store/0` +- `test_store/1` +- `test_store/2` + +--- + +### test_store + +Simple utilities for testing HyperBEAM. Includes functions for +Generate a new, unique test store as an isolated context for an execution. + +```erlang +test_store() -> + test_store(maps:get(<<"store-module">>, hd(hb_opts:get(store)))). +``` + +### test_store + +Simple utilities for testing HyperBEAM. Includes functions for +Generate a new, unique test store as an isolated context for an execution. + +```erlang +test_store(Mod) -> + test_store(Mod, <<"default">>). +``` + +### test_store + +Simple utilities for testing HyperBEAM. Includes functions for +Generate a new, unique test store as an isolated context for an execution. + +```erlang +test_store(Mod, Tag) -> + TestDir = + << + "cache-TEST/run-", + Tag/binary, "-", + (integer_to_binary(erlang:system_time(millisecond)))/binary + >>, + % Wait a tiny interval to ensure that any further tests will get their own + % directory. +``` + +### suite_with_opts + +Run each test in a suite with each set of options. Start and reset + +```erlang +suite_with_opts(Suite, OptsList) -> + lists:filtermap( + fun(OptSpec = #{ name := _Name, opts := Opts, desc := ODesc}) -> + Store = hb_opts:get(store, hb_opts:get(store), Opts), + Skip = hb_maps:get(skip, OptSpec, [], Opts), + case satisfies_requirements(OptSpec) of + true -> + {true, {foreach, + fun() -> + ?event({starting, Store}), + % Create and set a random server ID for the test + % process. +``` + +### satisfies_requirements + +Determine if the environment satisfies the given test requirements. + +```erlang +satisfies_requirements(Requirements) when is_map(Requirements) -> + satisfies_requirements(hb_maps:get(requires, Requirements, [])); +``` + +### satisfies_requirements + +Determine if the environment satisfies the given test requirements. + +```erlang +satisfies_requirements(Requirements) -> + lists:all( + fun(Req) -> + case hb_features:enabled(Req) of + true -> true; + false -> + case code:is_loaded(Req) of + false -> false; + {file, _} -> + case erlang:function_exported(Req, enabled, 0) of + true -> Req:enabled(); + false -> true + end + end + end + end, + Requirements + ). +``` + +### opts_from_list + +Find the options from a list of options by name. + +```erlang +opts_from_list(OptsName, OptsList) -> + hd([ O || #{ name := OName, opts := O } <- OptsList, OName == OptsName ]). +``` + +### run + +```erlang +run(Name, OptsName, Suite, OptsList) -> + {_, _, Test} = lists:keyfind(Name, 1, Suite), + Test(opts_from_list(OptsName, OptsList)). +``` + +### compare_events + +Compares the events generated by executing a test/function with two + +```erlang +compare_events(Fun, Opts1, Opts2) -> + hb_store:reset(hb_opts:get(store, hb_opts:get(store), Opts1)), + hb_store:write( + hb_opts:get(store, hb_opts:get(store), Opts1), + <<"test">>, + <<"test">> + ), + {EventsSample1, _Res2} = hb_event:diff( + fun() -> + Fun(Opts1) + end + ), + hb_store:reset(hb_opts:get(store, hb_opts:get(store), Opts1)), + hb_store:reset(hb_opts:get(store, hb_opts:get(store), Opts2)), + {EventsSample2, _Res} = hb_event:diff( + fun() -> + Fun(Opts2) + end + ), + hb_store:reset(hb_opts:get(store, hb_opts:get(store), Opts2)), + EventsDiff = hb_message:diff(EventsSample1, EventsSample2, #{}), + ?event( + debug_perf, + {events, + {sample1, EventsSample1}, + {sample2, EventsSample2}, + {events_diff, EventsDiff} + } + ), + EventsDiff. +``` + +### compare_events + +```erlang +compare_events(Fun, OptsName1, OptsName2, OptsList) -> + compare_events( + Fun, + opts_from_list(OptsName1, OptsList), + opts_from_list(OptsName2, OptsList) + ). +``` + +### compare_events + +```erlang +compare_events(Name, OptsName1, OptsName2, Suite, OptsList) -> + {_, _, Test} = lists:keyfind(Name, 1, Suite), + compare_events( + Test, + opts_from_list(OptsName1, OptsList), + opts_from_list(OptsName2, OptsList) + ). +``` + +### assert_throws + +Assert that a function throws an expected exception. Needed to work around some + +```erlang +assert_throws(Fun, Args, ExpectedException, Label) -> + Error = try + apply(Fun, Args), + failed_to_throw + catch + error:ExpectedException -> expected_exception; + ExpectedException -> expected_exception; + error:Other -> {wrong_exception, Other}; + Other -> {wrong_exception, Other} + end, + ?assertEqual(expected_exception, Error, Label). +``` + +### benchmark + +Run a function as many times as possible in a given amount of time. + +```erlang +benchmark(Fun) -> + benchmark(Fun, ?DEFAULT_BENCHMARK_TIME). +``` + +### benchmark + +```erlang +benchmark(Fun, TLen) -> + T0 = erlang:system_time(millisecond), + hb_util:until( + fun() -> erlang:system_time(millisecond) - T0 > (TLen * 1000) end, + Fun, + 0 + ). +``` + +### benchmark_iterations + +Return the amount of time required to execute N iterations of a function + +```erlang +benchmark_iterations(Fun, N) -> + {Time, _} = timer:tc( + fun() -> + lists:foreach( + fun(I) -> Fun(I) end, + lists:seq(1, N) + ) + end + ), + Time / 1_000_000. +``` + +### benchmark + +Run multiple instances of a function in parallel for a given amount of time. + +```erlang +benchmark(Fun, TLen, Procs) -> + Parent = self(), + receive _ -> worker_synchronized end, + StartWorker = + fun(_) -> + Ref = make_ref(), + spawn_link(fun() -> + Count = benchmark(Fun, TLen), + Parent ! {work_complete, Ref, Count} + end), + Ref + end, + CollectRes = + fun(R) -> + receive + {work_complete, R, Count} -> + %?event(benchmark, {work_complete, R, Count}), + Count + end + end, + Refs = lists:map(StartWorker, lists:seq(1, Procs)), + lists:sum(lists:map(CollectRes, Refs)). +``` + +### benchmark_print + +Print benchmark results in a human-readable format that EUnit writes to + +```erlang +benchmark_print(Verb, Iterations) -> + benchmark_print(Verb, Iterations, ?DEFAULT_BENCHMARK_TIME). +``` + +### benchmark_print + +```erlang +benchmark_print(Verb, Iterations, Time) when is_integer(Iterations) -> + hb_format:eunit_print( + "~s ~s in ~s (~s/s)", + [ + Verb, + hb_util:human_int(Iterations), + format_time(Time), + hb_util:human_int(Iterations / Time) + ] + ); +``` + +### benchmark_print + +```erlang +benchmark_print(Verb, Noun, Iterations) -> + benchmark_print(Verb, Noun, Iterations, ?DEFAULT_BENCHMARK_TIME). +``` + +### benchmark_print + +```erlang +benchmark_print(Verb, Noun, Iterations, Time) -> + hb_format:eunit_print( + "~s ~s ~s in ~s (~s ~s/s)", + [ + Verb, + hb_util:human_int(Iterations), + Noun, + format_time(Time), + hb_util:human_int(Iterations / Time), + Noun + ] + ). +``` + +### format_time + +Format a time in human-readable format. Takes arguments in seconds. + +```erlang +format_time(Time) when is_integer(Time) -> + hb_util:human_int(Time) ++ "s"; +``` + +### format_time + +Format a time in human-readable format. Takes arguments in seconds. + +```erlang +format_time(Time) -> +``` + +--- + +*Generated from [hb_test_utils.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_test_utils.erl)* diff --git a/docs/book/src/hb_tracer.erl.md b/docs/book/src/hb_tracer.erl.md new file mode 100644 index 000000000..001567bb3 --- /dev/null +++ b/docs/book/src/hb_tracer.erl.md @@ -0,0 +1,186 @@ +# hb_tracer + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_tracer.erl) + +A module for tracing the flow of requests through the system. +This allows for tracking the lifecycle of a request from HTTP receipt through processing and response. + +--- + +## Exported Functions + +- `format_error_trace/1` +- `get_trace/1` +- `record_step/2` +- `start_trace/0` + +--- + +### start_trace + +A module for tracing the flow of requests through the system. +Start a new tracer acting as queue of events registered. + +```erlang +start_trace() -> + Trace = #{steps => queue:new()}, + TracePID = spawn(fun() -> trace_loop(Trace) end), + ?event(trace, {trace_started, TracePID}), + TracePID. +``` + +### trace_loop + +```erlang +trace_loop(Trace) -> + receive + {record_step, Step} -> + Steps = maps:get(steps, Trace), + NewTrace = Trace#{steps => queue:in(Step, Steps)}, + ?event(trace, {step_recorded, Step}), + trace_loop(NewTrace); + {get_trace, From} -> + % Convert queue to list for the response + TraceWithList = + Trace#{steps => + queue:to_list( + maps:get(steps, Trace))}, + From ! {trace, TraceWithList}, + trace_loop(Trace) + end. +``` + +### record_step + +Register a new step into a tracer + +```erlang +record_step(TracePID, Step) -> + TracePID ! {record_step, Step}. +``` + +### get_trace + +Exports the complete queue of events + +```erlang +get_trace(TracePID) -> + TracePID ! {get_trace, self()}, + receive + {trace, Trace} -> + Trace + after 5000 -> + ?event(trace, {trace_timeout, TracePID}), + {trace, #{}} + end. +``` + +### format_error_trace + +Format a trace for error in a user-friendly emoji oriented output + +```erlang +format_error_trace(Trace) -> + Steps = maps:get(steps, Trace, []), + TraceMap = + lists:foldl(fun(TraceItem, Acc) -> + case TraceItem of + {http, {parsed_singleton, _ReqSingleton, _}} -> + maps:put(request_parsing, true, Acc); + {ao_core, {stage, Stage, _Task}} -> + maps:put(resolve_stage, Stage, Acc); + {ao_result, + {load_device_failed, _, _, _, _, {exec_exception, Exception}, _, _}} -> + maps:put(error, Exception, Acc); + {ao_result, + {exec_failed, + _, + _, + _, + {func, Fun}, + _, + {exec_exception, Error}, + _, + _}} -> + maps:put(error, {Fun, Error}, Acc); + _ -> Acc + end + end, + #{}, + Steps), + % Build the trace message + TraceStrings = <<"Oops! Something went wrong. Here's the rundown:">>, + % Add parsing status + ParsingTrace = + case maps:get(request_parsing, TraceMap, false) of + false -> + Emoji = failure_emoji(), + <>; + true -> + Emoji = checkmark_emoji(), + <> + end, + % Add stage information + StageTrace = + case maps:get(resolve_stage, TraceMap, undefined) of + undefined -> + ParsingTrace; + Stage -> + StageEmoji = stage_to_emoji(Stage), + try << ParsingTrace/binary, "\n", StageEmoji/binary, + " Resolved steps of your execution" >> + catch + error:badarg -> + iolist_to_binary(io_lib:format("~p", [ParsingTrace])) + end + end, + % Add error information + case maps:get(error, TraceMap, undefined) of + undefined -> + StageTrace; + {Fun, Reason} -> + FailureEmoji = failure_emoji(), + ErrMsg = list_to_binary(io_lib:format("~p -> ~p", [Fun, Reason])), + <>; + Error -> + FailureEmoji = failure_emoji(), + <> + end. +``` + +### checkmark_emoji + +```erlang +checkmark_emoji() -> + % Unicode for checkmark + <<"\xE2\x9C\x85">>. % \xE2\x9C\x85 is the checkmark emoji in UTF-8 +``` + +### failure_emoji + +```erlang +failure_emoji() -> + % Unicode for failure emoji + <<"\xE2\x9D\x8C">>. % \xE2\x9D\x8C is the failure emoji in UTF-8 +% Helper function to convert stage number to emoji +``` + +### stage_to_emoji + +```erlang +stage_to_emoji(Stage) when Stage >= 1, Stage =< 9 -> + % Unicode for circled numbers 1-9 + StageEmoji = Stage + 48, + <>; +``` + +### stage_to_emoji + +```erlang +stage_to_emoji(_) -> + "". +``` + +--- + +*Generated from [hb_tracer.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_tracer.erl)* diff --git a/docs/book/src/hb_util.erl.md b/docs/book/src/hb_util.erl.md new file mode 100644 index 000000000..52245617c --- /dev/null +++ b/docs/book/src/hb_util.erl.md @@ -0,0 +1,1666 @@ +# hb_util + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_util.erl) + +Simple type coercion functions, useful for quickly turning inputs from the +HTTP API into the correct types for the HyperBEAM runtime, if they are not +annotated by the user. + +--- + +## Exported Functions + +- `all_atoms/0` +- `all_hb_modules/0` +- `atom/1` +- `bin/1` +- `binary_is_atom/1` +- `binary_to_addresses/1` +- `ceil_int/2` +- `check_size/2` +- `check_type/2` +- `check_value/2` +- `count/2` +- `decode/1` +- `deep_get/3` +- `deep_get/4` +- `deep_merge/3` +- `deep_set/4` +- `encode/1` +- `find_target_path/2` +- `find_value/2` +- `find_value/3` +- `float/1` +- `floor_int/2` +- `hd/1` +- `hd/2` +- `hd/3` +- `human_id/1` +- `human_int/1` +- `id/1` +- `id/2` +- `int/1` +- `is_hb_module/1` +- `is_hb_module/2` +- `is_ordered_list/2` +- `is_string_list/1` +- `key_to_atom/1` +- `key_to_atom/2` +- `list_replace/3` +- `list_to_numbered_message/1` +- `list_with/2` +- `list_without/2` +- `list/1` +- `lower_case_key_map/2` +- `map/1` +- `maybe_throw/2` +- `mean/1` +- `message_to_ordered_list/1` +- `message_to_ordered_list/2` +- `native_id/1` +- `number/1` +- `numbered_keys_to_list/2` +- `ok_or_throw/3` +- `ok/1` +- `ok/2` +- `remove_common/2` +- `safe_decode/1` +- `safe_encode/1` +- `split_depth_string_aware_single/2` +- `split_depth_string_aware/2` +- `split_escaped_single/2` +- `stddev/1` +- `template_matches/3` +- `to_hex/1` +- `to_lower/1` +- `to_sorted_keys/1` +- `to_sorted_keys/2` +- `to_sorted_list/1` +- `to_sorted_list/2` +- `unique/1` +- `until/1` +- `until/2` +- `until/3` +- `variance/1` +- `weighted_random/1` + +--- + +### int + +A collection of utility functions for building with HyperBEAM. +Coerce a string to an integer. + +```erlang +int(Str) when is_binary(Str) -> + list_to_integer(binary_to_list(Str)); +``` + +### int + +A collection of utility functions for building with HyperBEAM. +Coerce a string to an integer. + +```erlang +int(Str) when is_list(Str) -> + list_to_integer(Str); +``` + +### int + +A collection of utility functions for building with HyperBEAM. +Coerce a string to an integer. + +```erlang +int(Int) when is_integer(Int) -> + Int. +``` + +### float + +Coerce a string to a float. + +```erlang +float(Str) when is_binary(Str) -> + list_to_float(binary_to_list(Str)); +``` + +### float + +Coerce a string to a float. + +```erlang +float(Str) when is_list(Str) -> + list_to_float(Str); +``` + +### float + +Coerce a string to a float. + +```erlang +float(Float) when is_float(Float) -> + Float; +``` + +### float + +Coerce a string to a float. + +```erlang +float(Int) when is_integer(Int) -> + Int / 1. +``` + +### atom + +Coerce a string to an atom. + +```erlang +atom(Str) when is_binary(Str) -> + list_to_existing_atom(binary_to_list(Str)); +``` + +### atom + +Coerce a string to an atom. + +```erlang +atom(Str) when is_list(Str) -> + list_to_existing_atom(Str); +``` + +### atom + +Coerce a string to an atom. + +```erlang +atom(Atom) when is_atom(Atom) -> + Atom. +``` + +### bin + +Coerce a value to a binary. + +```erlang +bin(Value) when is_atom(Value) -> + atom_to_binary(Value, utf8); +``` + +### bin + +Coerce a value to a binary. + +```erlang +bin(Value) when is_integer(Value) -> + integer_to_binary(Value); +``` + +### bin + +Coerce a value to a binary. + +```erlang +bin(Value) when is_float(Value) -> + float_to_binary(Value, [{decimals, 10}, compact]); +``` + +### bin + +Coerce a value to a binary. + +```erlang +bin(Value) when is_list(Value) -> + list_to_binary(Value); +``` + +### bin + +Coerce a value to a binary. + +```erlang +bin(Value) when is_binary(Value) -> + Value. +``` + +### list + +Coerce a value to a string list. + +```erlang +list(Value) when is_binary(Value) -> + binary_to_list(Value); +``` + +### list + +Coerce a value to a string list. + +```erlang +list(Value) when is_list(Value) -> Value; +``` + +### list + +Coerce a value to a string list. +Ensure that a value is a map. Only supports maps and lists of key-value + +```erlang +list(Value) when is_atom(Value) -> atom_to_list(Value). +``` + +### map + +Coerce a value to a string list. +Ensure that a value is a map. Only supports maps and lists of key-value + +```erlang +map(Value) when is_list(Value) -> + maps:from_list(Value); +``` + +### map + +Coerce a value to a string list. +Ensure that a value is a map. Only supports maps and lists of key-value + +```erlang +map(Value) when is_map(Value) -> + Value. +``` + +### ceil_int + +```erlang +ceil_int(IntValue, Nearest) -> + IntValue - (IntValue rem Nearest) + Nearest. +``` + +### floor_int + +```erlang +floor_int(IntValue, Nearest) -> + IntValue - (IntValue rem Nearest). +``` + +### ok + +Unwrap a tuple of the form `{ok, Value}`, or throw/return, depending on + +```erlang +ok(Value) -> ok(Value, #{}). +``` + +### ok + +Unwrap a tuple of the form `{ok, Value}`, or throw/return, depending on + +```erlang +ok({ok, Value}, _Opts) -> Value; +``` + +### ok + +Unwrap a tuple of the form `{ok, Value}`, or throw/return, depending on + +```erlang +ok(Other, Opts) -> + case hb_opts:get(error_strategy, throw, Opts) of + throw -> throw({unexpected, Other}); + _ -> {unexpected, Other} + end. +``` + +### until + +Utility function to wait for a condition to be true. Optionally, + +```erlang +until(Condition) -> + until(Condition, 0). +``` + +### until + +```erlang +until(Condition, Count) -> + until(Condition, fun() -> receive after 100 -> 1 end end, Count). +``` + +### until + +```erlang +until(Condition, Fun, Count) -> + case Condition() of + false -> + case apply(Fun, hb_ao:truncate_args(Fun, [Count])) of + {count, AddToCount} -> + until(Condition, Fun, Count + AddToCount); + _ -> + until(Condition, Fun, Count + 1) + end; + true -> Count + end. +``` + +### id + +Return the human-readable form of an ID of a message when given either + +```erlang +id(Item) -> id(Item, unsigned). +``` + +### id + +Return the human-readable form of an ID of a message when given either + +```erlang +id(TX, Type) when is_record(TX, tx) -> + encode(ar_bundles:id(TX, Type)); +``` + +### id + +Return the human-readable form of an ID of a message when given either + +```erlang +id(Map, Type) when is_map(Map) -> + hb_message:id(Map, Type); +``` + +### id + +Return the human-readable form of an ID of a message when given either + +```erlang +id(Bin, _) when is_binary(Bin) andalso byte_size(Bin) == 43 -> + Bin; +``` + +### id + +Return the human-readable form of an ID of a message when given either + +```erlang +id(Bin, _) when is_binary(Bin) andalso byte_size(Bin) == 32 -> + encode(Bin); +``` + +### id + +Return the human-readable form of an ID of a message when given either + +```erlang +id(Data, Type) when is_list(Data) -> + id(list_to_binary(Data), Type). +``` + +### to_lower + +Convert a binary to a lowercase. + +```erlang +to_lower(Str) -> + string:lowercase(Str). +``` + +### is_string_list + +Is the given term a string list? + +```erlang +is_string_list(MaybeString) -> + lists:all(fun is_integer/1, MaybeString). +``` + +### to_sorted_list + +Given a map or KVList, return a deterministically sorted list of its + +```erlang +to_sorted_list(Msg) -> + to_sorted_list(Msg, #{}). +``` + +### to_sorted_list + +```erlang +to_sorted_list(Msg, Opts) when is_map(Msg) -> + to_sorted_list(hb_maps:to_list(Msg, Opts), Opts); +``` + +### to_sorted_list + +```erlang +to_sorted_list(Msg = [{_Key, _} | _], _Opts) when is_list(Msg) -> + lists:sort(fun({Key1, _}, {Key2, _}) -> Key1 < Key2 end, Msg); +``` + +### to_sorted_list + +```erlang +to_sorted_list(Msg, _Opts) when is_list(Msg) -> + lists:sort(fun(Key1, Key2) -> Key1 < Key2 end, Msg). +``` + +### to_sorted_keys + +Given a map or KVList, return a deterministically ordered list of its keys. + +```erlang +to_sorted_keys(Msg) -> + to_sorted_keys(Msg, #{}). +``` + +### to_sorted_keys + +```erlang +to_sorted_keys(Msg, Opts) when is_map(Msg) -> + to_sorted_keys(hb_maps:keys(Msg, Opts), Opts); +``` + +### to_sorted_keys + +```erlang +to_sorted_keys(Msg, _Opts) when is_list(Msg) -> + lists:sort(fun(Key1, Key2) -> Key1 < Key2 end, Msg). +``` + +### key_to_atom + +Convert keys in a map to atoms, lowering `-` to `_`. + +```erlang +key_to_atom(Key) -> key_to_atom(Key, existing). +``` + +### key_to_atom + +Convert keys in a map to atoms, lowering `-` to `_`. + +```erlang +key_to_atom(Key, _Mode) when is_atom(Key) -> Key; +``` + +### key_to_atom + +Convert keys in a map to atoms, lowering `-` to `_`. + +```erlang +key_to_atom(Key, Mode) -> + WithoutDashes = to_lower(binary:replace(Key, <<"-">>, <<"_">>, [global])), + case Mode of + new_atoms -> binary_to_atom(WithoutDashes, utf8); + _ -> binary_to_existing_atom(WithoutDashes, utf8) + end. +``` + +### native_id + +Convert a human readable ID to a native binary ID. If the ID is already + +```erlang +native_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 43 -> + decode(Bin); +``` + +### native_id + +Convert a human readable ID to a native binary ID. If the ID is already + +```erlang +native_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 32 -> + Bin; +``` + +### native_id + +Convert a human readable ID to a native binary ID. If the ID is already + +```erlang +native_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 42 -> + Bin; +``` + +### native_id + +Convert a human readable ID to a native binary ID. If the ID is already + +```erlang +native_id(Wallet = {_Priv, _Pub}) -> + native_id(ar_wallet:to_address(Wallet)). +``` + +### human_id + +Convert a native binary ID to a human readable ID. If the ID is already + +```erlang +human_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 32 -> + encode(Bin); +``` + +### human_id + +Convert a native binary ID to a human readable ID. If the ID is already + +```erlang +human_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 43 -> + Bin; +``` + +### human_id + +Convert a native binary ID to a human readable ID. If the ID is already + +```erlang +human_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 42 -> + Bin; +``` + +### human_id + +Convert a native binary ID to a human readable ID. If the ID is already + +```erlang +human_id(Wallet = {_Priv, _Pub}) -> + human_id(ar_wallet:to_address(Wallet)). +``` + +### human_int + +Add `,` characters to a number every 3 digits to make it human readable. + +```erlang +human_int(Float) when is_float(Float) -> + human_int(erlang:round(Float)); +``` + +### human_int + +Add `,` characters to a number every 3 digits to make it human readable. + +```erlang +human_int(Int) -> + lists:reverse(add_commas(lists:reverse(integer_to_list(Int)))). +``` + +### add_commas + +```erlang +add_commas([A,B,C,Z|Rest]) -> [A,B,C,$,|add_commas([Z|Rest])]; +``` + +### add_commas + +Encode a binary to URL safe base64 binary string. + +```erlang +add_commas(List) -> List. +``` + +### encode + +Encode a binary to URL safe base64 binary string. + +```erlang +encode(Bin) -> + b64fast:encode(Bin). +``` + +### decode + +Try to decode a URL safe base64 into a binary or throw an error when + +```erlang +decode(Input) -> + b64fast:decode(Input). +``` + +### safe_encode + +Safely encode a binary to URL safe base64. + +```erlang +safe_encode(Bin) when is_binary(Bin) -> + encode(Bin); +``` + +### safe_encode + +Safely encode a binary to URL safe base64. + +```erlang +safe_encode(Bin) -> + Bin. +``` + +### safe_decode + +Safely decode a URL safe base64 into a binary returning an ok or error + +```erlang +safe_decode(E) -> + try + D = decode(E), + {ok, D} + catch + _:_ -> {error, invalid} + end. +``` + +### to_hex + +Convert a binary to a hex string. Do not use this for anything other than + +```erlang +to_hex(Bin) when is_binary(Bin) -> + to_lower( + iolist_to_binary( + [io_lib:format("~2.16.0B", [X]) || X <- binary_to_list(Bin)] + ) + ). +``` + +### deep_merge + +Deep merge two maps, recursively merging nested maps. + +```erlang +deep_merge(Map1, Map2, Opts) when is_map(Map1), is_map(Map2) -> + hb_maps:fold( + fun(Key, Value2, AccMap) -> + case deep_get(Key, AccMap, Opts) of + Value1 when is_map(Value1), is_map(Value2) -> + % Both values are maps, recursively merge them + deep_set(Key, deep_merge(Value1, Value2, Opts), AccMap, Opts); + _ -> + % Either the key doesn't exist in Map1 or at least one of + % the values isn't a map. Simply use the value from Map2 + deep_set(Key, Value2, AccMap, Opts) + end + end, + Map1, + Map2, + Opts + ). +``` + +### deep_set + +Set a deep value in a message by its path, _assuming all messages are + +```erlang +deep_set(_Path, undefined, Msg, _Opts) -> Msg; +``` + +### deep_set + +Set a deep value in a message by its path, _assuming all messages are + +```erlang +deep_set(Path, Value, Msg, Opts) when not is_list(Path) -> + deep_set(hb_path:term_to_path_parts(Path, Opts), Value, Msg, Opts); +``` + +### deep_set + +Set a deep value in a message by its path, _assuming all messages are + +```erlang +deep_set([Key], unset, Msg, Opts) -> + hb_maps:remove(Key, Msg, Opts); +``` + +### deep_set + +Set a deep value in a message by its path, _assuming all messages are + +```erlang +deep_set([Key], Value, Msg, Opts) -> + case hb_maps:get(Key, Msg, not_found, Opts) of + ExistingMap when is_map(ExistingMap) andalso is_map(Value) -> + % If both are maps, merge them + Msg#{ Key => hb_maps:merge(ExistingMap, Value, Opts) }; + _ -> + Msg#{ Key => Value } + end; +``` + +### deep_set + +Set a deep value in a message by its path, _assuming all messages are + +```erlang +deep_set([Key|Rest], Value, Map, Opts) -> + SubMap = hb_maps:get(Key, Map, #{}, Opts), + hb_maps:put(Key, deep_set(Rest, Value, SubMap, Opts), Map, Opts). +``` + +### deep_get + +Get a deep value from a message. + +```erlang +deep_get(Path, Msg, Opts) -> deep_get(Path, Msg, not_found, Opts). +``` + +### deep_get + +Get a deep value from a message. + +```erlang +deep_get(Path, Msg, Default, Opts) when not is_list(Path) -> + deep_get(hb_path:term_to_path_parts(Path, Opts), Msg, Default, Opts); +``` + +### deep_get + +Get a deep value from a message. + +```erlang +deep_get([Key], Msg, Default, Opts) -> + case hb_maps:find(Key, Msg, Opts) of + {ok, Value} -> Value; + error -> Default + end; +``` + +### deep_get + +Get a deep value from a message. + +```erlang +deep_get([Key|Rest], Msg, Default, Opts) -> + case hb_maps:find(Key, Msg, Opts) of + {ok, DeepMsg} when is_map(DeepMsg) -> + deep_get(Rest, DeepMsg, Default, Opts); + error -> Default + end. +``` + +### find_target_path + +Find the target path to route for a request message. + +```erlang +find_target_path(Msg, Opts) -> + case hb_ao:get(<<"route-path">>, Msg, not_found, Opts) of + not_found -> + ?event({find_target_path, {msg, Msg}, not_found}), + hb_ao:get(<<"path">>, Msg, no_path, Opts); + RoutePath -> RoutePath + end. +``` + +### template_matches + +Check if a message matches a given template. + +```erlang +template_matches(ToMatch, Template, _Opts) when is_map(Template) -> + case hb_message:match(Template, ToMatch, primary) of + {value_mismatch, _Key, _Val1, _Val2} -> false; + Match -> Match + end; +``` + +### template_matches + +Check if a message matches a given template. + +```erlang +template_matches(ToMatch, Regex, Opts) when is_binary(Regex) -> + MsgPath = find_target_path(ToMatch, Opts), + hb_path:regex_matches(MsgPath, Regex). +``` + +### number + +Label a list of elements with a number. + +```erlang +number(List) -> + lists:map( + fun({N, Item}) -> {integer_to_binary(N), Item} end, + lists:zip(lists:seq(1, length(List)), List) + ). +``` + +### list_to_numbered_message + +Convert a list of elements to a map with numbered keys. + +```erlang +list_to_numbered_message(Msg) when is_map(Msg) -> + case is_ordered_list(Msg, #{}) of + true -> Msg; + false -> + throw({cannot_convert_to_numbered_message, Msg}) + end; +``` + +### list_to_numbered_message + +Convert a list of elements to a map with numbered keys. + +```erlang +list_to_numbered_message(List) -> + hb_maps:from_list(number(List)). +``` + +### is_ordered_list + +Determine if the message given is an ordered list, starting from 1. + +```erlang +is_ordered_list(Msg, _Opts) when is_list(Msg) -> true; +``` + +### is_ordered_list + +Determine if the message given is an ordered list, starting from 1. + +```erlang +is_ordered_list(Msg, Opts) -> + is_ordered_list(1, hb_ao:normalize_keys(Msg, Opts), Opts). +``` + +### is_ordered_list + +```erlang +is_ordered_list(_, Msg, _Opts) when map_size(Msg) == 0 -> true; +``` + +### is_ordered_list + +```erlang +is_ordered_list(N, Msg, _Opts) -> + case maps:get(NormKey = hb_ao:normalize_key(N), Msg, not_found) of + not_found -> false; + _ -> + is_ordered_list( + N + 1, + maps:without([NormKey], Msg), + _Opts + ) + end. +``` + +### list_replace + +Replace a key in a list with a new value. + +```erlang +list_replace(List, Key, Value) -> + lists:foldr( + fun(Elem, Acc) -> + case Elem of + Key when is_list(Value) -> Value ++ Acc; + Key -> [Value | Acc]; + _ -> [Elem | Acc] + end + end, + [], + List + ). +``` + +### unique + +Take a list and return a list of unique elements. The function is + +```erlang +unique(List) -> + Unique = + lists:foldl( + fun(Item, Acc) -> + case lists:member(Item, Acc) of + true -> Acc; + false -> [Item | Acc] + end + end, + [], + List + ), + lists:reverse(Unique). +``` + +### list_with + +Returns the intersection of two lists, with stable ordering. + +```erlang +list_with(List1, List2) -> + lists:filter(fun(Item) -> lists:member(Item, List2) end, List1). +``` + +### list_without + +Remove all occurrences of all items in the first list from the second list. + +```erlang +list_without(List1, List2) -> + lists:filter(fun(Item) -> not lists:member(Item, List1) end, List2). +``` + +### message_to_ordered_list + +Take a message with numbered keys and convert it to a list of tuples + +```erlang +message_to_ordered_list(Message) -> + message_to_ordered_list(Message, #{}). +``` + +### message_to_ordered_list + +```erlang +message_to_ordered_list(Message, _Opts) when ?IS_EMPTY_MESSAGE(Message) -> + []; +``` + +### message_to_ordered_list + +```erlang +message_to_ordered_list(List, _Opts) when is_list(List) -> + List; +``` + +### message_to_ordered_list + +```erlang +message_to_ordered_list(Message, Opts) -> + NormMessage = hb_ao:normalize_keys(Message, Opts), + Keys = hb_maps:keys(NormMessage, Opts) -- [<<"priv">>, <<"commitments">>], + SortedKeys = + lists:map( + fun hb_ao:normalize_key/1, + lists:sort(lists:map(fun int/1, Keys)) + ), + message_to_ordered_list(NormMessage, SortedKeys, erlang:hd(SortedKeys), Opts). +``` + +### message_to_ordered_list + +```erlang +message_to_ordered_list(_Message, [], _Key, _Opts) -> + []; +``` + +### message_to_ordered_list + +```erlang +message_to_ordered_list(Message, [Key|Keys], Key, Opts) -> + case hb_maps:get(Key, Message, undefined, Opts#{ hashpath => ignore }) of + undefined -> + throw( + {missing_key, + {key, Key}, + {remaining_keys, Keys}, + {message, Message} + } + ); + Value -> + [ + Value + | + message_to_ordered_list( + Message, + Keys, + hb_ao:normalize_key(int(Key) + 1), + Opts + ) + ] + end; +``` + +### message_to_ordered_list + +```erlang +message_to_ordered_list(Message, [Key|_Keys], ExpectedKey, _Opts) -> + throw({missing_key, {expected, ExpectedKey, {next, Key}, {message, Message}}}). +``` + +### numbered_keys_to_list + +Convert a message with numbered keys and others to a sorted list with only + +```erlang +numbered_keys_to_list(Message, Opts) -> + OnlyNumbered = + hb_maps:filter( + fun(Key, _Value) -> + try int(hb_ao:normalize_key(Key)) of + IntKey when is_integer(IntKey) -> true; + _ -> false + catch _:_ -> false + end + end, + Message, + Opts + ), + message_to_ordered_list(OnlyNumbered, Opts). +``` + +### hd + +Get the first element (the lowest integer key >= 1) of a numbered map. + +```erlang +hd(Message) -> hd(Message, value). +``` + +### hd + +Get the first element (the lowest integer key >= 1) of a numbered map. + +```erlang +hd(Message, ReturnType) -> + hd(Message, ReturnType, #{ error_strategy => throw }). +``` + +### hd + +```erlang +hd(Message, ReturnType, Opts) -> + hd(Message, hb_ao:keys(Message, Opts), 1, ReturnType, Opts). +``` + +### hd + +```erlang +hd(_Map, [], _Index, _ReturnType, #{ error_strategy := throw }) -> + throw(no_integer_keys); +``` + +### hd + +```erlang +hd(_Map, [], _Index, _ReturnType, _Opts) -> undefined; +``` + +### hd + +```erlang +hd(Message, [Key|Rest], Index, ReturnType, Opts) -> + case hb_ao:normalize_key(Key, Opts#{ error_strategy => return }) of + undefined -> + hd(Message, Rest, Index + 1, ReturnType, Opts); + Key -> + case ReturnType of + key -> Key; + value -> hb_ao:resolve(Message, Key, #{}) + end + end. +``` + +### find_value + +Find the value associated with a key in parsed a JSON structure list. + +```erlang +find_value(Key, List) -> + find_value(Key, List, undefined). +``` + +### find_value + +```erlang +find_value(Key, Map, Default) -> + find_value(Key, Map, Default, #{}). +``` + +### find_value + +```erlang +find_value(Key, Map, Default, Opts) when is_map(Map) -> + case hb_maps:find(Key, Map, Opts) of + {ok, Value} -> Value; + error -> Default + end; +``` + +### find_value + +```erlang +find_value(Key, List, Default, _Opts) -> + case lists:keyfind(Key, 1, List) of + {Key, Val} -> Val; + false -> Default + end. +``` + +### remove_common + +Remove the common prefix from two strings, returning the remainder of the + +```erlang +remove_common(MainStr, SubStr) when is_binary(MainStr) and is_list(SubStr) -> + remove_common(MainStr, list_to_binary(SubStr)); +``` + +### remove_common + +Remove the common prefix from two strings, returning the remainder of the + +```erlang +remove_common(MainStr, SubStr) when is_list(MainStr) and is_binary(SubStr) -> + binary_to_list(remove_common(list_to_binary(MainStr), SubStr)); +``` + +### remove_common + +Remove the common prefix from two strings, returning the remainder of the + +```erlang +remove_common(<< X:8, Rest1/binary>>, << X:8, Rest2/binary>>) -> + remove_common(Rest1, Rest2); +``` + +### remove_common + +Remove the common prefix from two strings, returning the remainder of the + +```erlang +remove_common([X|Rest1], [X|Rest2]) -> + remove_common(Rest1, Rest2); +``` + +### remove_common + +Remove the common prefix from two strings, returning the remainder of the + +```erlang +remove_common([$/|Path], _) -> Path; +``` + +### remove_common + +Remove the common prefix from two strings, returning the remainder of the +Throw an exception if the Opts map has an `error_strategy` key with the + +```erlang +remove_common(Rest, _) -> Rest. +``` + +### maybe_throw + +Remove the common prefix from two strings, returning the remainder of the +Throw an exception if the Opts map has an `error_strategy` key with the + +```erlang +maybe_throw(Val, Opts) -> + case hb_ao:get(error_strategy, Opts) of + throw -> throw(Val); + _ -> Val + end. +``` + +### is_hb_module + +Is the given module part of HyperBEAM? + +```erlang +is_hb_module(Atom) -> + is_hb_module(Atom, hb_opts:get(stack_print_prefixes, [], #{})). +``` + +### is_hb_module + +```erlang +is_hb_module(Atom, Prefixes) when is_atom(Atom) -> + is_hb_module(atom_to_list(Atom), Prefixes); +``` + +### is_hb_module + +```erlang +is_hb_module("hb_event" ++ _, _) -> + % Explicitly exclude hb_event from the stack trace, as it is always included, + % creating noise in the output. +``` + +### is_hb_module + +```erlang +is_hb_module(Str, Prefixes) -> + case string:tokens(Str, "_") of + [Pre|_] -> + lists:member(Pre, Prefixes); + _ -> + false + end. +``` + +### all_hb_modules + +Get all loaded modules that are loaded and are part of HyperBEAM. + +```erlang +all_hb_modules() -> + lists:filter(fun(Module) -> is_hb_module(Module) end, erlang:loaded()). +``` + +### count + +```erlang +count(Item, List) -> + length(lists:filter(fun(X) -> X == Item end, List)). +``` + +### mean + +```erlang +mean(List) -> + lists:sum(List) / length(List). +``` + +### stddev + +```erlang +stddev(List) -> + math:sqrt(variance(List)). +``` + +### variance + +```erlang +variance(List) -> + Mean = mean(List), + lists:sum([ math:pow(X - Mean, 2) || X <- List ]) / length(List). +``` + +### shuffle + +Shuffle a list. + +```erlang +shuffle(List) -> + [ Y || {_, Y} <- lists:sort([ {rand:uniform(), X} || X <- List]) ]. +``` + +### weighted_random + +Return a random element from a list, weighted by the values in the list. + +```erlang +weighted_random(List) -> + TotalWeight = lists:sum([ Weight || {_, Weight} <- List ]), + Normalized = [ {Item, Weight / TotalWeight} || {Item, Weight} <- List ], + Shuffled = shuffle(Normalized), + pick_weighted(Shuffled, rand:uniform()). +``` + +### pick_weighted + +Pick a random element from a list, weighted by the values in the list. + +```erlang +pick_weighted([], _) -> + error(empty_list); +``` + +### pick_weighted + +Pick a random element from a list, weighted by the values in the list. + +```erlang +pick_weighted([{Item, Weight}|_Rest], Remaining) when Remaining < Weight -> + Item; +``` + +### pick_weighted + +Pick a random element from a list, weighted by the values in the list. + +```erlang +pick_weighted([{_Item, Weight}|Rest], Remaining) -> + pick_weighted(Rest, Remaining - Weight). +``` + +### addresses_to_binary + +Serialize the given list of addresses to a binary, using the structured + +```erlang +addresses_to_binary(List) when is_list(List) -> + try + iolist_to_binary( + hb_structured_fields:list( + [ + {item, {string, hb_util:human_id(Addr)}, []} + || + Addr <- List + ] + ) + ) + catch + _:_ -> + error({cannot_parse_list, List}) + end. +``` + +### binary_to_addresses + +Parse a list from a binary. First attempts to parse the binary as a + +```erlang +binary_to_addresses(List) when is_list(List) -> + % If the argument is already a list, return it. +``` + +### binary_to_addresses + +```erlang +binary_to_addresses(List) when is_binary(List) -> + try + Res = lists:map( + fun({item, {string, Item}, []}) -> + Item + end, + hb_structured_fields:parse_list(List) + ), + Res + catch + _:_ -> + try + binary:split( + binary:replace(List, <<"\"">>, <<"">>, [global]), + <<",">>, + [global, trim_all] + ) + catch + _:_ -> + error({cannot_parse_list, List}) + end + end. +``` + +### split_depth_string_aware + +Extract all of the parts from the binary, given (a list of) separators. + +```erlang +split_depth_string_aware(_Sep, <<>>) -> []; +``` + +### split_depth_string_aware + +Extract all of the parts from the binary, given (a list of) separators. + +```erlang +split_depth_string_aware(Sep, Bin) -> + {_MatchedSep, Part, Rest} = split_depth_string_aware_single(Sep, Bin), + [Part | split_depth_string_aware(Sep, Rest)]. +``` + +### split_depth_string_aware_single + +Parse a binary, extracting a part until a separator is found, while + +```erlang +split_depth_string_aware_single(Sep, Bin) when not is_list(Sep) -> + split_depth_string_aware_single([Sep], Bin); +``` + +### split_depth_string_aware_single + +Parse a binary, extracting a part until a separator is found, while + +```erlang +split_depth_string_aware_single(Seps, Bin) -> + split_depth_string_aware_single(Seps, Bin, 0, <<>>). +``` + +### split_depth_string_aware_single + +```erlang +split_depth_string_aware_single(_Seps, <<>>, _Depth, CurrAcc) -> + {no_match, CurrAcc, <<>>}; +``` + +### split_depth_string_aware_single + +```erlang +split_depth_string_aware_single(Seps, << $\", Rest/binary>>, Depth, CurrAcc) -> + {QuotedStr, AfterStr} = split_escaped_single($\", Rest), + split_depth_string_aware_single( + Seps, + AfterStr, + Depth, + << CurrAcc/binary, "\"", QuotedStr/binary, "\"">> + ); +``` + +### split_depth_string_aware_single + +```erlang +split_depth_string_aware_single(Seps, << $\(, Rest/binary>>, Depth, CurrAcc) -> + %% Increase depth + split_depth_string_aware_single(Seps, Rest, Depth + 1, << CurrAcc/binary, "(" >>); +``` + +### split_depth_string_aware_single + +```erlang +split_depth_string_aware_single(Seps, << $\), Rest/binary>>, Depth, Acc) when Depth > 0 -> + %% Decrease depth + split_depth_string_aware_single(Seps, Rest, Depth - 1, << Acc/binary, ")">>); +``` + +### split_depth_string_aware_single + +```erlang +split_depth_string_aware_single(Seps, <>, Depth, CurrAcc) -> + case Depth == 0 andalso lists:member(C, Seps) of + true -> {C, CurrAcc, Rest}; + false -> + split_depth_string_aware_single( + Seps, + Rest, + Depth, + << CurrAcc/binary, C:8/integer >> + ) + end. +``` + +### split_escaped_single + +Read a binary until a separator is found without a preceding backslash. + +```erlang +split_escaped_single(Sep, Bin) -> + split_escaped_single(Sep, Bin, []). +``` + +### split_escaped_single + +```erlang +split_escaped_single(_Sep, <<>>, Acc) -> + {hb_util:bin(lists:reverse(Acc)), <<>>}; +``` + +### split_escaped_single + +```erlang +split_escaped_single(Sep, <<"\\", Char:8/integer, Rest/binary>>, Acc) -> + split_escaped_single(Sep, Rest, [Char, $\\ | Acc]); +``` + +### split_escaped_single + +```erlang +split_escaped_single(Sep, <>, Acc) -> + {hb_util:bin(lists:reverse(Acc)), Rest}; +``` + +### split_escaped_single + +```erlang +split_escaped_single(Sep, <>, Acc) -> + split_escaped_single(Sep, Rest, [C | Acc]). +``` + +### check_size + +Force that a binary is either empty or the given number of bytes. + +```erlang +check_size(Bin, {range, Start, End}) -> + check_type(Bin, binary) + andalso byte_size(Bin) >= Start + andalso byte_size(Bin) =< End; +``` + +### check_size + +Force that a binary is either empty or the given number of bytes. + +```erlang +check_size(Bin, Sizes) -> + check_type(Bin, binary) + andalso lists:member(byte_size(Bin), Sizes). +``` + +### check_value + +```erlang +check_value(Value, ExpectedValues) -> + lists:member(Value, ExpectedValues). +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, binary) -> is_binary(Value); +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, integer) -> is_integer(Value); +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, list) -> is_list(Value); +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, map) -> is_map(Value); +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, tx) -> is_record(Value, tx); +``` + +### check_type + +Ensure that a value is of the given type. + +```erlang +check_type(Value, message) -> + is_record(Value, tx) or is_map(Value) or is_list(Value); +``` + +### check_type + +Ensure that a value is of the given type. +Throw an error if the given value is not ok. + +```erlang +check_type(_Value, _) -> false. +``` + +### ok_or_throw + +Ensure that a value is of the given type. +Throw an error if the given value is not ok. + +```erlang +ok_or_throw(_, true, _) -> true; +``` + +### ok_or_throw + +Ensure that a value is of the given type. +Throw an error if the given value is not ok. + +```erlang +ok_or_throw(_TX, false, Error) -> + throw(Error). +``` + +### all_atoms + +List the loaded atoms in the Erlang VM. + +```erlang +all_atoms() -> all_atoms(0). +``` + +### all_atoms + +List the loaded atoms in the Erlang VM. + +```erlang +all_atoms(N) -> + case atom_from_int(N) of + not_found -> []; + A -> [A | all_atoms(N+1)] + end. +``` + +### atom_from_int + +Find the atom with the given integer reference. + +```erlang +atom_from_int(Int) -> + case catch binary_to_term(<<131,75,Int:24>>) of + A -> A; + _ -> not_found + end. +``` + +### binary_is_atom + +Check if a given binary is already an atom. + +```erlang +binary_is_atom(X) -> + lists:member(X, lists:map(fun hb_util:bin/1, all_atoms())). +``` + +### lower_case_key_map + +```erlang +lower_case_key_map(Map, Opts) -> + hb_maps:fold(fun + (K, V, Acc) when is_map(V) -> + maps:put(hb_util:to_lower(K), lower_case_key_map(V, Opts), Acc); + (K, V, Acc) -> + maps:put(hb_util:to_lower(K), V, Acc) +``` + +--- + +*Generated from [hb_util.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_util.erl)* diff --git a/docs/book/src/hb_volume.erl.md b/docs/book/src/hb_volume.erl.md new file mode 100644 index 000000000..96ae8207b --- /dev/null +++ b/docs/book/src/hb_volume.erl.md @@ -0,0 +1,1047 @@ +# hb_volume + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_volume.erl) + +Helper functions for list_partitions + +--- + +## Exported Functions + +- `change_node_store/2` +- `check_for_device/1` +- `create_partition/2` +- `format_disk/2` +- `list_partitions/0` +- `mount_disk/4` + +--- + +### process_disk_line + +```erlang +-spec list_partitions() -> {ok, map()} | {error, binary()}. +list_partitions() -> + ?event(debug_volume, {list_partitions, entry, starting}), + % Get the partition information using fdisk -l + ?event(debug_volume, {list_partitions, executing_fdisk, command}), + case os:cmd("sudo fdisk -l") of + [] -> + % Empty output indicates an error + Reason = <<"Failed to list partitions: no output">>, + ?event(debug_volume, {list_partitions, fdisk_error, no_output}), + {error, Reason}; + Output -> + ?event(debug_volume, {list_partitions, fdisk_success, parsing}), + % Split output into lines + Lines = string:split(Output, "\n", all), + % Process the output to group information by disk + {_, DiskData} = lists:foldl( + fun process_disk_line/2, + {undefined, []}, + Lines + ), + % Process each disk's data to extract all information + DiskObjects = lists:filtermap( + fun(DiskEntry) -> + Device = maps:get(<<"device">>, DiskEntry), + DiskLines = lists:reverse(maps:get(<<"data">>, DiskEntry)), + DiskInfo = parse_disk_info(Device, DiskLines), + {true, DiskInfo} + end, + DiskData + ), + % Return the partition information + ?event(debug_volume, + {list_partitions, success, + {disk_count, length(DiskObjects)} + } + ), + {ok, #{ + <<"status">> => 200, + <<"content-type">> => <<"application/json">>, + <<"body">> => hb_json:encode(#{<<"disks">> => DiskObjects}) + }} + end. +``` + +```erlang +process_disk_line(Line, {CurrentDisk, Acc}) -> + % Match for a new disk entry + DiskPattern = "^Disk (/dev/(?!ram)\\S+):", + case re:run(Line, DiskPattern, [{capture, [1], binary}]) of + {match, [Device]} -> + % Start a new disk entry + NewDisk = #{ + <<"device">> => Device, + <<"data">> => [Line] + }, + {NewDisk, [NewDisk | Acc]}; + _ when CurrentDisk =:= undefined -> + % Not a disk line and no current disk + {undefined, Acc}; + _ -> + % Add line to current disk's data + CurrentData = maps:get(<<"data">>, CurrentDisk), + UpdatedDisk = CurrentDisk#{ + <<"data">> => [Line | CurrentData] + }, + % Update the list with the modified disk entry + UpdatedAcc = [UpdatedDisk | lists:delete(CurrentDisk, Acc)], + {UpdatedDisk, UpdatedAcc} + end. +``` + +### parse_disk_info + +```erlang +parse_disk_info(Device, Lines) -> + % Initialize with device ID + DiskInfo = #{<<"device">> => Device}, + % Process each line to extract information + lists:foldl( + fun parse_disk_line/2, + DiskInfo, + Lines + ). +``` + +### parse_disk_line + +```erlang +parse_disk_line(Line, Info) -> + % Extract disk size and bytes + SizePattern = "^Disk .+: ([0-9.]+ [KMGT]iB), ([0-9]+) bytes, ([0-9]+) sectors", + case re:run(Line, SizePattern, [{capture, [1, 2, 3], binary}]) of + {match, [Size, Bytes, Sectors]} -> + Info#{ + <<"size">> => Size, + <<"bytes">> => binary_to_integer(Bytes), + <<"sectors">> => binary_to_integer(Sectors) + }; + _ -> + parse_disk_model_line(Line, Info) + end. +``` + +### parse_disk_model_line + +```erlang +parse_disk_model_line(Line, Info) -> + % Extract disk model + ModelPattern = "^Disk model: (.+)\\s*$", + case re:run(Line, ModelPattern, [{capture, [1], binary}]) of + {match, [Model]} -> + Info#{<<"model">> => string:trim(Model)}; + _ -> + parse_disk_units_line(Line, Info) + end. +``` + +### parse_disk_units_line + +```erlang +parse_disk_units_line(Line, Info) -> + % Extract units information + UnitsPattern = "^Units: (.+)$", + case re:run(Line, UnitsPattern, [{capture, [1], binary}]) of + {match, [Units]} -> + Info#{<<"units">> => Units}; + _ -> + parse_sector_size_line(Line, Info) + end. +``` + +### parse_sector_size_line + +```erlang +parse_sector_size_line(Line, Info) -> + % Extract sector size + SectorPattern = "^Sector size \\(logical/physical\\): ([^/]+)/(.+)$", + case re:run(Line, SectorPattern, [{capture, [1, 2], binary}]) of + {match, [LogicalSize, PhysicalSize]} -> + Info#{ + <<"sector_size">> => #{ + <<"logical">> => string:trim(LogicalSize), + <<"physical">> => string:trim(PhysicalSize) + } + }; + _ -> + parse_io_size_line(Line, Info) + end. +``` + +### parse_io_size_line + +```erlang +parse_io_size_line(Line, Info) -> + % Extract I/O size + IOPattern = "^I/O size \\(minimum/optimal\\): ([^/]+)/(.+)$", + case re:run(Line, IOPattern, [{capture, [1, 2], binary}]) of + {match, [MinSize, OptSize]} -> + Info#{ + <<"io_size">> => #{ + <<"minimum">> => string:trim(MinSize), + <<"optimal">> => string:trim(OptSize) + } + }; + _ -> + Info + end. +``` + +### create_partition + +```erlang +-spec create_partition(Device :: binary(), PartType :: binary()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +create_partition(undefined, _PartType) -> + ?event(debug_volume, {create_partition, error, device_undefined}), + {error, <<"Device path not specified">>}; +``` + +### create_partition + +```erlang +-spec create_partition(Device :: binary(), PartType :: binary()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +create_partition(Device, PartType) -> + ?event(debug_volume, + {create_partition, entry, + {device, Device, part_type, PartType} + } + ), + % Create a GPT partition table + DeviceStr = binary_to_list(Device), + MklabelCmd = "sudo parted " ++ DeviceStr ++ " mklabel gpt", + ?event(debug_volume, + {create_partition, creating_gpt_label, + {device, Device} + } + ), + ?event(debug_volume, + {create_partition, executing_mklabel, + {command, MklabelCmd} + } + ), + case safe_exec(MklabelCmd) of + {ok, Result} -> + ?event(debug_volume, + {create_partition, gpt_label_success, + {result, Result} + } + ), + create_actual_partition(Device, PartType); + {error, ErrorMsg} -> + ?event(debug_volume, + {create_partition, gpt_label_error, + {error, ErrorMsg} + } + ), + {error, ErrorMsg} + end. +``` + +### create_actual_partition + +```erlang +create_actual_partition(Device, PartType) -> + ?event(debug_volume, + {create_actual_partition, entry, + {device, Device, part_type, PartType} + } + ), + DeviceStr = binary_to_list(Device), + PartTypeStr = binary_to_list(PartType), + % Build the parted command to create the partition + MkpartCmd = + "sudo parted -a optimal " ++ DeviceStr ++ + " mkpart primary " ++ PartTypeStr ++ " 0% 100%", + ?event(debug_volume, + {create_actual_partition, executing_mkpart, + {command, MkpartCmd} + } + ), + case safe_exec(MkpartCmd) of + {ok, Result} -> + ?event(debug_volume, + {create_actual_partition, mkpart_success, + {result, Result} + } + ), + get_partition_info(Device); + {error, ErrorMsg} -> + ?event(debug_volume, + {create_actual_partition, mkpart_error, + {error, ErrorMsg} + } + ), + {error, ErrorMsg} + end. +``` + +### get_partition_info + +```erlang +get_partition_info(Device) -> + ?event(debug_volume, {get_partition_info, entry, {device, Device}}), + DeviceStr = binary_to_list(Device), + % Print partition information + PrintCmd = "sudo parted " ++ DeviceStr ++ " print", + ?event(debug_volume, + {get_partition_info, executing_print, {command, PrintCmd}} + ), + PartitionInfo = os:cmd(PrintCmd), + ?event(debug_volume, + {get_partition_info, success, partition_created, + {result, PartitionInfo} + } + ), + {ok, #{ + <<"status">> => 200, + <<"message">> => <<"Partition created successfully.">>, + <<"device_path">> => Device, + <<"partition_info">> => list_to_binary(PartitionInfo) + }}. +``` + +### format_disk + +```erlang +-spec format_disk(Partition :: binary(), EncKey :: binary()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +format_disk(undefined, _EncKey) -> + ?event(debug_volume, {format_disk, error, partition_undefined}), + {error, <<"Partition path not specified">>}; +``` + +### format_disk + +```erlang +-spec format_disk(Partition :: binary(), EncKey :: binary()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +format_disk(_Partition, undefined) -> + ?event(debug_volume, {format_disk, error, key_undefined}), + {error, <<"Encryption key not specified">>}; +``` + +### format_disk + +```erlang +-spec format_disk(Partition :: binary(), EncKey :: binary()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +format_disk(Partition, EncKey) -> + ?event(debug_volume, + {format_disk, entry, + { + partition, Partition, + key_present, true + } + } + ), + PartitionStr = binary_to_list(Partition), + ?event(debug_volume, {format_disk, creating_secure_key_file, starting}), + with_secure_key_file(EncKey, fun(KeyFile) -> + FormatCmd = + "sudo cryptsetup luksFormat --batch-mode " ++ + "--key-file " ++ KeyFile ++ " " ++ PartitionStr, + ?event(debug_volume, + {format_disk, executing_luks_format, {command, FormatCmd}} + ), + case safe_exec(FormatCmd, ["failed"]) of + {ok, Result} -> + ?event(debug_volume, + {format_disk, luks_format_success, completed, + {result, Result} + } + ), + {ok, #{ + <<"status">> => 200, + <<"message">> => + <<"Partition formatted with LUKS encryption " + "successfully.">> + }}; + {error, ErrorMsg} -> + ?event(debug_volume, + {format_disk, luks_format_error, ErrorMsg} + ), + {error, ErrorMsg} + end + end). +``` + +### mount_disk + +```erlang +-spec mount_disk( + Partition :: binary(), + EncKey :: binary(), + MountPoint :: binary(), + VolumeName :: binary() +) -> {ok, map()} | {error, binary()}. +``` + +```erlang +mount_disk(undefined, _EncKey, _MountPoint, _VolumeName) -> + ?event(debug_volume, {mount_disk, error, partition_undefined}), + {error, <<"Partition path not specified">>}; +``` + +### mount_disk + +```erlang +-spec mount_disk( + Partition :: binary(), + EncKey :: binary(), + MountPoint :: binary(), + VolumeName :: binary() +) -> {ok, map()} | {error, binary()}. +``` + +```erlang +mount_disk(_Partition, undefined, _MountPoint, _VolumeName) -> + ?event(debug_volume, {mount_disk, error, key_undefined}), + {error, <<"Encryption key not specified">>}; +``` + +### mount_disk + +```erlang +-spec mount_disk( + Partition :: binary(), + EncKey :: binary(), + MountPoint :: binary(), + VolumeName :: binary() +) -> {ok, map()} | {error, binary()}. +``` + +```erlang +mount_disk(_Partition, _EncKey, undefined, _VolumeName) -> + ?event(debug_volume, {mount_disk, error, mount_point_undefined}), + {error, <<"Mount point not specified">>}; +``` + +### mount_disk + +```erlang +-spec mount_disk( + Partition :: binary(), + EncKey :: binary(), + MountPoint :: binary(), + VolumeName :: binary() +) -> {ok, map()} | {error, binary()}. +``` + +```erlang +mount_disk(Partition, EncKey, MountPoint, VolumeName) -> + ?event(debug_volume, + {mount_disk, entry, + { + partition, Partition, + mount_point, MountPoint, + volume_name, VolumeName} + } + ), + PartitionStr = binary_to_list(Partition), + VolumeNameStr = binary_to_list(VolumeName), + ?event(debug_volume, {mount_disk, opening_luks_volume, starting}), + with_secure_key_file(EncKey, fun(KeyFile) -> + OpenCmd = + "sudo cryptsetup luksOpen --key-file " ++ KeyFile ++ + " " ++ PartitionStr ++ " " ++ VolumeNameStr, + ?event(debug_volume, {mount_disk, executing_luks_open, {command, OpenCmd}}), + case safe_exec(OpenCmd, ["failed"]) of + {ok, Result} -> + ?event(debug_volume, + {mount_disk, luks_open_success, proceeding_to_mount, + {result, Result} + } + ), + mount_opened_volume(Partition, MountPoint, VolumeName); + {error, ErrorMsg} -> + ?event(debug_volume, {mount_disk, luks_open_error, ErrorMsg}), + {error, ErrorMsg} + end + end). +``` + +### mount_opened_volume + +```erlang +mount_opened_volume(Partition, MountPoint, VolumeName) -> + ?event(debug_volume, + {mount_opened_volume, entry, + { + partition, Partition, + mount_point, MountPoint, + volume_name, VolumeName + } + } + ), + % Create mount point if it doesn't exist + MountPointStr = binary_to_list(MountPoint), + ?event(debug_volume, + {mount_opened_volume, creating_mount_point, MountPoint} + ), + os:cmd("sudo mkdir -p " ++ MountPointStr), + % Check if filesystem exists on the opened LUKS volume + VolumeNameStr = binary_to_list(VolumeName), + DeviceMapperPath = "/dev/mapper/" ++ VolumeNameStr, + % Check filesystem type + FSCheckCmd = "sudo blkid " ++ DeviceMapperPath, + ?event(debug_volume, + {mount_opened_volume, checking_filesystem, {command, FSCheckCmd}} + ), + FSCheckResult = os:cmd(FSCheckCmd), + ?event(debug_volume, + {mount_opened_volume, filesystem_check_result, FSCheckResult} + ), + % Create filesystem if none exists + case string:find(FSCheckResult, "TYPE=") of + nomatch -> + % No filesystem found, create ext4 + ?event(debug_volume, + {mount_opened_volume, creating_filesystem, ext4} + ), + MkfsCmd = "sudo mkfs.ext4 -F " ++ DeviceMapperPath, + ?event(debug_volume, + {mount_opened_volume, executing_mkfs, {command, MkfsCmd}} + ), + MkfsResult = os:cmd(MkfsCmd), + ?event(debug_volume, + {mount_opened_volume, mkfs_result, MkfsResult} + ); + _ -> + ?event(debug_volume, + {mount_opened_volume, filesystem_exists, skipping_creation} + ) + end, + % Mount the unlocked LUKS volume + MountCmd = "sudo mount " ++ DeviceMapperPath ++ " " ++ MountPointStr, + ?event(debug_volume, + {mount_opened_volume, executing_mount, + {command, MountCmd} + } + ), + case safe_exec(MountCmd, ["failed"]) of + {ok, Result} -> + ?event(debug_volume, + {mount_opened_volume, mount_success, + creating_info, {result, Result} + } + ), + create_mount_info(Partition, MountPoint, VolumeName); + {error, ErrorMsg} -> + ?event(debug_volume, + {mount_opened_volume, mount_error, + {error, ErrorMsg, closing_luks} + } + ), + % Close the LUKS volume if mounting failed + os:cmd("sudo cryptsetup luksClose " ++ VolumeNameStr), + {error, ErrorMsg} + end. +``` + +### create_mount_info + +```erlang +create_mount_info(Partition, MountPoint, VolumeName) -> + ?event(debug_volume, + {create_mount_info, success, + { + partition, Partition, + mount_point, MountPoint, + volume_name, VolumeName + } + } + ), + {ok, #{ + <<"status">> => 200, + <<"message">> => + <<"Encrypted partition mounted successfully.">>, + <<"mount_point">> => MountPoint, + <<"mount_info">> => #{ + partition => Partition, + mount_point => MountPoint, + volume_name => VolumeName + } + }}. +``` + +### change_node_store + +```erlang +-spec change_node_store(StorePath :: binary(), + CurrentStore :: list()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +change_node_store(undefined, _CurrentStore) -> + ?event(debug_volume, {change_node_store, error, store_path_undefined}), + {error, <<"Store path not specified">>}; +``` + +### change_node_store + +```erlang +-spec change_node_store(StorePath :: binary(), + CurrentStore :: list()) -> + {ok, map()} | {error, binary()}. +``` + +```erlang +change_node_store(StorePath, CurrentStore) -> + ?event(debug_volume, + {change_node_store, entry, + {store_path, StorePath, current_store, CurrentStore} + } + ), + % Create the store directory if it doesn't exist + StorePathStr = binary_to_list(StorePath), + ?event(debug_volume, {change_node_store, creating_directory, StorePath}), + os:cmd("sudo mkdir -p " ++ StorePathStr), + % Update the store configuration with the new path + ?event(debug_volume, + {change_node_store, updating_config, + {current_store, CurrentStore} + } + ), + NewStore = update_store_config(CurrentStore, StorePath), + % Return the result + ?event(debug_volume, + {change_node_store, success, {new_store_config, NewStore}} + ), + {ok, #{ + <<"status">> => 200, + <<"message">> => + <<"Node store updated to use encrypted disk.">>, + <<"store_path">> => StorePath, + <<"store">> => NewStore + }}. +``` + +### safe_exec + +```erlang +safe_exec(Command) -> + safe_exec(Command, ["Error", "failed", "bad", "error"]). +``` + +### safe_exec + +```erlang +safe_exec(Command, ErrorKeywords) -> + Result = os:cmd(Command), + case check_command_errors(Result, ErrorKeywords) of + ok -> {ok, Result}; + error -> {error, list_to_binary(Result)} + end. +``` + +### check_command_errors + +```erlang +check_command_errors(Result, Keywords) -> + case lists:any(fun(Keyword) -> + string:find(Result, Keyword) =/= nomatch + end, Keywords) of + true -> error; + false -> ok + end. +``` + +### with_secure_key_file + +```erlang +with_secure_key_file(EncKey, Fun) -> + ?event(debug_volume, {with_secure_key_file, entry, creating_temp_file}), + os:cmd("sudo mkdir -p /root/tmp"), + % Get process ID and create filename + PID = os:getpid(), + ?event(debug_volume, {with_secure_key_file, process_id, PID}), + KeyFile = "/root/tmp/luks_key_" ++ PID, + ?event(debug_volume, {with_secure_key_file, key_file_path, KeyFile}), + % Check if directory was created successfully + DirCheck = os:cmd("ls -la /root/tmp/"), + ?event(debug_volume, {with_secure_key_file, directory_check, DirCheck}), + try + % Convert EncKey to binary using hb_util + BinaryEncKey = case EncKey of + % Handle RSA wallet tuples - extract private key or use hash + {{rsa, _}, PrivKey, _PubKey} when is_binary(PrivKey) -> + % Use first 32 bytes of private key for AES-256 + case byte_size(PrivKey) of + Size when Size >= 32 -> + binary:part(PrivKey, 0, 32); + _ -> + % If private key is too short, hash it to get 32 bytes + crypto:hash(sha256, PrivKey) + end; + % Handle other complex terms + _ when not is_binary(EncKey) andalso not is_list(EncKey) -> + try + hb_util:bin(EncKey) + catch + _:_ -> + % Fallback to term_to_binary and hash to get consistent + % key size + crypto:hash(sha256, term_to_binary(EncKey)) + end; + % Simple cases handled by hb_util:bin + _ -> + hb_util:bin(EncKey) + end, + WriteResult = file:write_file(KeyFile, BinaryEncKey, [raw]), + ?event(debug_volume, + {with_secure_key_file, write_result, WriteResult} + ), + % Check if file was created + FileExists = filelib:is_regular(KeyFile), + ?event(debug_volume, + {with_secure_key_file, file_exists_check, FileExists} + ), + % If file exists, get its info + case FileExists of + true -> + FileInfo = file:read_file_info(KeyFile), + ?event(debug_volume, + {with_secure_key_file, file_info, FileInfo} + ); + false -> + ?event(debug_volume, + {with_secure_key_file, file_not_found, KeyFile} + ) + end, + % Execute function with key file path + ?event(debug_volume, + {with_secure_key_file, executing_function, with_key_file} + ), + Result = Fun(KeyFile), + % Always clean up the key file + ?event(debug_volume, + {with_secure_key_file, cleanup, shredding_key_file} + ), + os:cmd("sudo shred -u " ++ KeyFile), + ?event(debug_volume, {with_secure_key_file, success, completed}), + Result + catch + Class:Reason:Stacktrace -> + ?event(debug_volume, + {with_secure_key_file, exception, + {class, Class, reason, Reason, cleanup, starting} + } + ), + % Ensure cleanup even if function fails + os:cmd("sudo shred -u " ++ KeyFile), + ?event(debug_volume, + {with_secure_key_file, exception_cleanup, completed} + ), + erlang:raise(Class, Reason, Stacktrace) + end. +``` + +### update_store_config + +```erlang +-spec update_store_config(StoreConfig :: term(), + NewPath :: binary()) -> term(). +``` + +```erlang +update_store_config(StoreConfig, NewPath) when is_list(StoreConfig) -> + % For a list, update each element + [update_store_config(Item, NewPath) || Item <- StoreConfig]; +``` + +### update_store_config + +```erlang +-spec update_store_config(StoreConfig :: term(), + NewPath :: binary()) -> term(). +``` + +```erlang +update_store_config( + #{<<"store-module">> := Module} = StoreConfig, + NewPath +) when is_map(StoreConfig) -> + % Handle various store module types differently + case Module of + hb_store_fs -> + % For filesystem store, prefix the existing path with the new path + ExistingPath = maps:get(<<"name">>, StoreConfig, <<"">>), + NewName = <>, + ?event(debug_volume, {fs, StoreConfig, NewPath, NewName}), + StoreConfig#{<<"name">> => NewName}; + hb_store_lmdb -> + ExistingPath = maps:get(<<"name">>, StoreConfig, <<"">>), + NewName = <>, + ?event(debug_volume, {migrate_start, ExistingPath, NewName}), + safe_stop_lmdb_store(StoreConfig), + ?event(debug_volume, {using_existing_store, NewName}), + FinalConfig = StoreConfig#{<<"name">> => NewName}, + safe_start_lmdb_store(FinalConfig), + FinalConfig; + hb_store_rocksdb -> + StoreConfig; + hb_store_gateway -> + % For gateway store, recursively update nested store configs + NestedStore = maps:get(<<"store">>, StoreConfig, []), + StoreConfig#{ + <<"store">> => update_store_config(NestedStore, NewPath) + }; + _ -> + % For any other store type, update the prefix + % StoreConfig#{<<"name">> => NewPath} + ?event(debug_volume, {other, StoreConfig, NewPath}), + StoreConfig + end; +``` + +### update_store_config + +```erlang +-spec update_store_config(StoreConfig :: term(), + NewPath :: binary()) -> term(). +``` + +```erlang +update_store_config({Type, _OldPath, Opts}, NewPath) -> + % For tuple format with options + {Type, NewPath, Opts}; +``` + +### update_store_config + +```erlang +-spec update_store_config(StoreConfig :: term(), + NewPath :: binary()) -> term(). +``` + +```erlang +update_store_config({Type, _OldPath}, NewPath) -> + % For tuple format without options + {Type, NewPath}; +``` + +### update_store_config + +```erlang +-spec update_store_config(StoreConfig :: term(), + NewPath :: binary()) -> term(). +``` + +```erlang +update_store_config(StoreConfig, _NewPath) -> + % Return unchanged for any other format + StoreConfig. +``` + +### safe_stop_lmdb_store + +```erlang +safe_stop_lmdb_store(StoreConfig) -> + ?event(debug_volume, {stopping_current_store, StoreConfig}), + try + hb_store_lmdb:stop(StoreConfig) + catch + error:StopReason -> + ?event(debug_volume, {stop_error, StopReason}) + end. +``` + +### safe_start_lmdb_store + +```erlang +safe_start_lmdb_store(StoreConfig) -> + NewName = maps:get(<<"name">>, StoreConfig), + ?event(debug_volume, {starting_new_store, NewName}), + hb_store_lmdb:start(StoreConfig). +``` + +### check_command_errors_test + +```erlang +-spec check_for_device(Device :: binary()) -> boolean(). +check_for_device(Device) -> + ?event(debug_volume, {check_for_device, entry, {device, Device}}), + Command = + io_lib:format( + "ls -l ~s 2>/dev/null || echo 'not_found'", + [binary_to_list(Device)] + ), + ?event(debug_volume, {check_for_device, executing_command, ls_check}), + Result = os:cmd(Command), + DeviceExists = string:find(Result, "not_found") =:= nomatch, + ?event(debug_volume, + {check_for_device, result, + {device, Device, exists, DeviceExists} + } + ), + DeviceExists. +``` + +```erlang +check_command_errors_test() -> + % Test successful case - no errors + ?assertEqual( + ok, + check_command_errors( + "Success: operation completed", + ["Error", "failed"] + ) + ), + % Test error detection + ?assertEqual( + error, + check_command_errors( + "Error: something went wrong", + ["Error", "failed"] + ) + ), + ?assertEqual( + error, + check_command_errors( + "Operation failed", + ["Error", "failed"] + ) + ), + % Test case sensitivity + ?assertEqual( + ok, + check_command_errors( + "error (lowercase)", + ["Error", "failed"] + ) + ), + % Test multiple keywords + ?assertEqual( + error, + check_command_errors( + "Command failed with Error", + ["Error", "failed"] + ) + ). +``` + +### update_store_config_test + +```erlang +update_store_config_test() -> + % Test filesystem store + FSStore = #{ + <<"store-module">> => hb_store_fs, + <<"name">> => <<"cache">> + }, + NewPath = <<"/encrypted/mount">>, + Updated = update_store_config(FSStore, NewPath), + Expected = FSStore#{<<"name">> => <<"/encrypted/mount/cache">>}, + ?assertEqual(Expected, Updated), + % Test list of stores + StoreList = [FSStore, #{<<"store-module">> => hb_store_gateway}], + UpdatedList = update_store_config(StoreList, NewPath), + ?assertEqual(2, length(UpdatedList)), + % Test tuple format + TupleStore = {fs, <<"old_path">>, []}, + UpdatedTuple = update_store_config(TupleStore, NewPath), + ?assertEqual({fs, NewPath, []}, UpdatedTuple). +``` + +### with_secure_key_file_test + +```erlang +with_secure_key_file_test() -> + TestKey = <<"test_encryption_key_123">>, + % Create a safe test version that doesn't use /root/tmp + TestWithSecureKeyFile = fun(EncKey, Fun) -> + % Use /tmp instead of /root/tmp for testing + TmpDir = "/tmp", + KeyFile = TmpDir ++ "/test_luks_key_" ++ os:getpid(), + try + % Write key to temporary file + file:write_file(KeyFile, EncKey, [raw]), + % Execute function with key file path + Result = Fun(KeyFile), + % Clean up the key file + file:delete(KeyFile), + Result + catch + Class:Reason:Stacktrace -> + % Ensure cleanup even if function fails + file:delete(KeyFile), + erlang:raise(Class, Reason, Stacktrace) + end + end, + % Test successful execution + Result = TestWithSecureKeyFile(TestKey, fun(KeyFile) -> + % Verify key file was created and contains the key + ?assert(filelib:is_regular(KeyFile)), + {ok, FileContent} = file:read_file(KeyFile), + ?assertEqual(TestKey, FileContent), + {ok, <<"success">>} + end), + ?assertEqual({ok, <<"success">>}, Result), + % Test exception handling and cleanup + TestException = fun() -> + TestWithSecureKeyFile(TestKey, fun(KeyFile) -> + ?assert(filelib:is_regular(KeyFile)), + error(test_error) + end) + end, + ?assertError(test_error, TestException()). +``` + +### check_for_device_test + +```erlang +check_for_device_test() -> + % This test would need mocking of os:cmd to be fully testable + % For now, test with /dev/null which should always exist + ?assertEqual(true, check_for_device(<<"/dev/null">>)), + % Test non-existent device + ?assertEqual( + false, + check_for_device(<<"/dev/nonexistent_device_123">>) + ). +``` + +### safe_exec_mock_test + +```erlang +safe_exec_mock_test() -> + % We can't easily mock os:cmd, but we can test the error checking logic + % This is covered by check_command_errors_test above + % Test with default error keywords + TestResult1 = + check_command_errors( + "Operation completed successfully", + ["Error", "failed"] + ), + ?assertEqual(ok, TestResult1), + TestResult2 = + check_command_errors( + "Error: disk not found", + ["Error", "failed"] + ), +``` + +--- + +*Generated from [hb_volume.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_volume.erl)* diff --git a/docs/book/src/introduction.md b/docs/book/src/introduction.md new file mode 100644 index 000000000..c3ffa0966 --- /dev/null +++ b/docs/book/src/introduction.md @@ -0,0 +1,37 @@ +# HyperBEAM Literate Documentation + +Welcome to the comprehensive literate documentation for HyperBEAM, a reference implementation of AO-Core written in Erlang. + +## About This Documentation + +This documentation combines source code with comprehensive explanations using a literate programming approach. Each module page includes: + +- **Module overview** with purpose and functionality +- **Exported functions** with signatures and documentation +- **Implementation details** with annotated source code +- **Test functions** demonstrating usage patterns + +## Navigation + +Use the sidebar to browse modules organized by category: + +- **Arweave Foundation**: Core Arweave protocol implementations +- **Device Framework**: AO device implementations and utilities +- **Codec Modules**: Data encoding/decoding functionality +- **Core Services**: Essential HyperBEAM services and components +- **HyperBEAM Core**: Foundation modules and utilities + +## Copy Functionality + +Each page includes a copy button (📋) in the top-right corner that copies the original markdown content to your clipboard - perfect for sharing with LLMs or analysis tools. + +## About HyperBEAM + +HyperBEAM is a client implementation of the AO-Core protocol, providing a framework for decentralized computations. It offers: + +1. **Hashpaths**: Mechanism for referencing program state-space locations +2. **Unified data structures**: HTTP document representation of program states +3. **Commitment protocol**: Cryptographic proofs of state representations +4. **Meta-VM**: Support for multiple virtual machines and computational models + +For more information, visit the [HyperBEAM repository](https://github.com/permaweb/HyperBEAM). \ No newline at end of file diff --git a/docs/book/src/rsa_pss.erl.md b/docs/book/src/rsa_pss.erl.md new file mode 100644 index 000000000..e81d9b9ad --- /dev/null +++ b/docs/book/src/rsa_pss.erl.md @@ -0,0 +1,354 @@ +# rsa_pss + +[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/rsa_pss.erl) + +**Author:** Andrew Bennett +**Copyright:** 2014-2015, Andrew Bennett +Distributed under the Mozilla Public License v2.0. +Original available at: +https://github.com/potatosalad/erlang-crypto_rsassa_pss +Created : 20 Jul 2015 by Andrew Bennett +Modified: 17 Nov 2017 by The Arweave Team + +--- + +## Exported Functions + +- `sign/3` +- `sign/4` +- `verify_legacy/4` +- `verify/4` + +--- + +### sign + +```erlang +-spec sign(Message, DigestType, PrivateKey) -> Signature + when + Message :: binary() | {digest, binary()}, + DigestType :: rsa_digest_type() | atom(), + PrivateKey :: rsa_private_key(), + Signature :: binary(). +``` + +```erlang +sign(Message, DigestType, PrivateKey) when is_binary(Message) -> + sign({digest, crypto:hash(DigestType, Message)}, DigestType, PrivateKey); +``` + +### sign + +```erlang +-spec sign(Message, DigestType, PrivateKey) -> Signature + when + Message :: binary() | {digest, binary()}, + DigestType :: rsa_digest_type() | atom(), + PrivateKey :: rsa_private_key(), + Signature :: binary(). +``` + +```erlang +sign(Message={digest, _}, DigestType, PrivateKey) -> + SaltLen = byte_size(crypto:hash(DigestType, <<>>)), + Salt = crypto:strong_rand_bytes(SaltLen), + sign(Message, DigestType, Salt, PrivateKey). +``` + +### sign + +```erlang +-spec sign(Message, DigestType, Salt, PrivateKey) -> Signature + when + Message :: binary() | {digest, binary()}, + DigestType :: rsa_digest_type() | atom(), + Salt :: binary(), + PrivateKey :: rsa_private_key(), + Signature :: binary(). +``` + +```erlang +sign(Message, DigestType, Salt, PrivateKey) when is_binary(Message) -> + sign({digest, crypto:hash(DigestType, Message)}, DigestType, Salt, PrivateKey); +``` + +### sign + +```erlang +-spec sign(Message, DigestType, Salt, PrivateKey) -> Signature + when + Message :: binary() | {digest, binary()}, + DigestType :: rsa_digest_type() | atom(), + Salt :: binary(), + PrivateKey :: rsa_private_key(), + Signature :: binary(). +``` + +```erlang +sign({digest, Digest}, DigestType, Salt, PrivateKey=#'RSAPrivateKey'{modulus=N}) -> + DigestLen = byte_size(Digest), + SaltLen = byte_size(Salt), + PublicBitSize = int_to_bit_size(N), + PrivateByteSize = (PublicBitSize + 7) div 8, + PublicByteSize = int_to_byte_size(N), + case PublicByteSize < (DigestLen + SaltLen + 2) of + false -> + DBLen = PrivateByteSize - DigestLen - 1, + M = << 0:64, Digest/binary, Salt/binary >>, + H = crypto:hash(DigestType, M), + DB = << 0:((DBLen - SaltLen - 1) * 8), 1, Salt/binary >>, + DBMask = mgf1(DigestType, H, DBLen), + MaskedDB = normalize_to_key_size(PublicBitSize, crypto:exor(DB, DBMask)), + EM = << MaskedDB/binary, H/binary, ?PSS_TRAILER_FIELD >>, + DM = pad_to_key_size(PublicByteSize, dp(EM, PrivateKey)), + DM; + true -> + erlang:error(badarg, [{digest, Digest}, DigestType, Salt, PrivateKey]) + end. +``` + +### verify + +```erlang +-spec verify(Message, DigestType, Signature, PublicKey) -> boolean() + when + Message :: binary() | {digest, binary()}, + DigestType :: rsa_digest_type() | atom(), + Signature :: binary(), + PublicKey :: rsa_public_key(). +``` + +```erlang +verify(Message, DigestType, Signature, PublicKey) when is_binary(Message) -> + verify({digest, crypto:hash(DigestType, Message)}, DigestType, Signature, PublicKey); +``` + +### verify + +```erlang +-spec verify(Message, DigestType, Signature, PublicKey) -> boolean() + when + Message :: binary() | {digest, binary()}, + DigestType :: rsa_digest_type() | atom(), + Signature :: binary(), + PublicKey :: rsa_public_key(). +``` + +```erlang +verify({digest, Digest}, DigestType, Signature, PublicKey=#'RSAPublicKey'{modulus=N}) -> + DigestLen = byte_size(Digest), + PublicBitSize = int_to_bit_size(N), + PrivateByteSize = (PublicBitSize + 7) div 8, + PublicByteSize = int_to_byte_size(N), + SignatureSize = byte_size(Signature), + case PublicByteSize =:= SignatureSize of + true -> + SignatureNumber = binary:decode_unsigned(Signature, big), + case SignatureNumber >= 0 andalso SignatureNumber < N of + true -> + DBLen = PrivateByteSize - DigestLen - 1, + EM = pad_to_key_size(PrivateByteSize, ep(Signature, PublicKey)), + case binary:last(EM) of + ?PSS_TRAILER_FIELD -> + MaskedDB = binary:part(EM, 0, byte_size(EM) - DigestLen - 1), + H = binary:part(EM, byte_size(MaskedDB), DigestLen), + DBMask = mgf1(DigestType, H, DBLen), + DB = normalize_to_key_size(PublicBitSize, crypto:exor(MaskedDB, DBMask)), + case binary:match(DB, << 1 >>) of + {Pos, Len} -> + PS = binary:decode_unsigned(binary:part(DB, 0, Pos)), + case PS =:= 0 of + true -> + Salt = binary:part(DB, Pos + Len, byte_size(DB) - Pos - Len), + M = << 0:64, Digest/binary, Salt/binary >>, + HOther = crypto:hash(DigestType, M), + H =:= HOther; + false -> + false + end; + nomatch -> + false + end; + _BadTrailer -> + false + end; + _ -> + false + end; + false -> + false + end. +``` + +### verify_legacy + +```erlang +verify_legacy(Message, DigestType, Signature, PublicKey) when is_binary(Message) -> + verify_legacy({digest, crypto:hash(DigestType, Message)}, DigestType, Signature, PublicKey); +``` + +### verify_legacy + +```erlang +verify_legacy({digest, Digest}, DigestType, Signature, PublicKey=#'RSAPublicKey'{modulus=N}) -> + DigestLen = byte_size(Digest), + PublicBitSize = int_to_bit_size(N), + PrivateByteSize = PublicBitSize div 8, + PublicByteSize = int_to_byte_size(N), + SignatureSize = byte_size(Signature), + case PublicByteSize =:= SignatureSize of + true -> + SignatureNumber = binary:decode_unsigned(Signature, big), + case SignatureNumber >= 0 andalso SignatureNumber < N of + true -> + DBLen = PrivateByteSize - DigestLen - 1, + EM = pad_to_key_size(PrivateByteSize, ep(Signature, PublicKey)), + case binary:last(EM) of + ?PSS_TRAILER_FIELD -> + MaskedDB = binary:part(EM, 0, byte_size(EM) - DigestLen - 1), + H = binary:part(EM, byte_size(MaskedDB), DigestLen), + DBMask = mgf1(DigestType, H, DBLen), + DB = normalize_to_key_size(PublicBitSize, crypto:exor(MaskedDB, DBMask)), + case binary:match(DB, << 1 >>) of + {Pos, Len} -> + PS = binary:decode_unsigned(binary:part(DB, 0, Pos)), + case PS =:= 0 of + true -> + Salt = binary:part(DB, Pos + Len, byte_size(DB) - Pos - Len), + M = << 0:64, Digest/binary, Salt/binary >>, + HOther = crypto:hash(DigestType, M), + H =:= HOther; + false -> + false + end; + nomatch -> + false + end; + _BadTrailer -> + false + end; + _ -> + false + end; + false -> + false + end. +``` + +### dp + +```erlang +dp(B, #'RSAPrivateKey'{modulus=N, privateExponent=E}) -> + crypto:mod_pow(B, E, N). +``` + +### ep + +```erlang +ep(B, #'RSAPublicKey'{modulus=N, publicExponent=E}) -> + crypto:mod_pow(B, E, N). +``` + +### int_to_bit_size + +```erlang +int_to_bit_size(I) -> + int_to_bit_size(I, 0). +``` + +### int_to_bit_size + +```erlang +int_to_bit_size(0, B) -> + B; +``` + +### int_to_bit_size + +```erlang +int_to_bit_size(I, B) -> + int_to_bit_size(I bsr 1, B + 1). +``` + +### int_to_byte_size + +```erlang +int_to_byte_size(I) -> + int_to_byte_size(I, 0). +``` + +### int_to_byte_size + +```erlang +int_to_byte_size(0, B) -> + B; +``` + +### int_to_byte_size + +```erlang +int_to_byte_size(I, B) -> + int_to_byte_size(I bsr 8, B + 1). +``` + +### mgf1 + +```erlang +mgf1(DigestType, Seed, Len) -> + mgf1(DigestType, Seed, Len, <<>>, 0). +``` + +### mgf1 + +```erlang +mgf1(_DigestType, _Seed, Len, T, _Counter) when byte_size(T) >= Len -> + binary:part(T, 0, Len); +``` + +### mgf1 + +```erlang +mgf1(DigestType, Seed, Len, T, Counter) -> + CounterBin = << Counter:8/unsigned-big-integer-unit:4 >>, + NewT = << T/binary, (crypto:hash(DigestType, << Seed/binary, CounterBin/binary >>))/binary >>, + mgf1(DigestType, Seed, Len, NewT, Counter + 1). +``` + +### normalize_to_key_size + +```erlang +normalize_to_key_size(_, <<>>) -> + <<>>; +``` + +### normalize_to_key_size + +```erlang +normalize_to_key_size(Bits, _A = << C, Rest/binary >>) -> + SH = (Bits - 1) band 16#7, + Mask = case SH > 0 of + false -> + 16#FF; + true -> + 16#FF bsr (8 - SH) + end, + B = << (C band Mask), Rest/binary >>, + B. +``` + +### pad_to_key_size + +```erlang +pad_to_key_size(Bytes, Data) when byte_size(Data) < Bytes -> + pad_to_key_size(Bytes, << 0, Data/binary >>); +``` + +### pad_to_key_size + +```erlang +pad_to_key_size(_Bytes, Data) -> +``` + +--- + +*Generated from [rsa_pss.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/rsa_pss.erl)* From db801f1d47581e6dab4ce740c6a45f1f9a8b0efd Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Thu, 18 Sep 2025 13:25:52 -0400 Subject: [PATCH 02/17] docs: Remove generated .erl.md files from version control These are generated artifacts that duplicate source code and should not be committed. The build process now generates them on-demand. --- docs/book/src/ar_bundles.erl.md | 2070 ----------------- docs/book/src/ar_deep_hash.erl.md | 62 - docs/book/src/ar_rate_limiter.erl.md | 187 -- docs/book/src/ar_timestamp.erl.md | 97 - docs/book/src/ar_tx.erl.md | 259 --- docs/book/src/ar_wallet.erl.md | 456 ---- docs/book/src/dev_apply.erl.md | 350 --- docs/book/src/dev_arweave.erl.md | 355 --- docs/book/src/dev_arweave_block_cache.erl.md | 99 - docs/book/src/dev_auth_hook.erl.md | 431 ---- docs/book/src/dev_cache.erl.md | 310 --- docs/book/src/dev_cacheviz.erl.md | 115 - docs/book/src/dev_codec_ans104.erl.md | 511 ---- docs/book/src/dev_codec_ans104_from.erl.md | 355 --- docs/book/src/dev_codec_ans104_to.erl.md | 240 -- docs/book/src/dev_codec_cookie.erl.md | 570 ----- docs/book/src/dev_codec_cookie_auth.erl.md | 329 --- .../src/dev_codec_cookie_test_vectors.erl.md | 904 ------- docs/book/src/dev_codec_flat.erl.md | 266 --- docs/book/src/dev_codec_http_auth.erl.md | 210 -- docs/book/src/dev_codec_httpsig.erl.md | 447 ---- docs/book/src/dev_codec_httpsig_conv.erl.md | 564 ----- docs/book/src/dev_codec_httpsig_keyid.erl.md | 195 -- docs/book/src/dev_codec_httpsig_proxy.erl.md | 54 - .../book/src/dev_codec_httpsig_siginfo.erl.md | 387 --- docs/book/src/dev_codec_json.erl.md | 170 -- docs/book/src/dev_codec_structured.erl.md | 500 ---- docs/book/src/dev_copycat.erl.md | 42 - docs/book/src/dev_copycat_arweave.erl.md | 112 - docs/book/src/dev_copycat_graphql.erl.md | 219 -- docs/book/src/dev_cron.erl.md | 400 ---- docs/book/src/dev_cu.erl.md | 46 - docs/book/src/dev_dedup.erl.md | 93 - docs/book/src/dev_delegated_compute.erl.md | 242 -- docs/book/src/dev_faff.erl.md | 62 - docs/book/src/dev_genesis_wasm.erl.md | 756 ------ docs/book/src/dev_green_zone.erl.md | 342 --- docs/book/src/dev_hook.erl.md | 272 --- docs/book/src/dev_hyperbuddy.erl.md | 291 --- docs/book/src/dev_json_iface.erl.md | 685 ------ docs/book/src/dev_local_name.erl.md | 258 -- docs/book/src/dev_lookup.erl.md | 91 - docs/book/src/dev_lua.erl.md | 977 -------- docs/book/src/dev_lua_lib.erl.md | 199 -- docs/book/src/dev_lua_test.erl.md | 166 -- docs/book/src/dev_lua_test_ledgers.erl.md | 887 ------- docs/book/src/dev_manifest.erl.md | 188 -- docs/book/src/dev_message.erl.md | 625 ----- docs/book/src/dev_meta.erl.md | 981 -------- docs/book/src/dev_monitor.erl.md | 75 - docs/book/src/dev_multipass.erl.md | 77 - docs/book/src/dev_name.erl.md | 200 -- docs/book/src/dev_node_process.erl.md | 204 -- docs/book/src/dev_p4.erl.md | 308 --- docs/book/src/dev_patch.erl.md | 288 --- docs/book/src/dev_poda.erl.md | 298 --- docs/book/src/dev_process.erl.md | 1221 ---------- docs/book/src/dev_process_cache.erl.md | 204 -- docs/book/src/dev_process_worker.erl.md | 213 -- docs/book/src/dev_profile.erl.md | 414 ---- docs/book/src/dev_push.erl.md | 911 -------- docs/book/src/dev_query.erl.md | 359 --- docs/book/src/dev_query_arweave.erl.md | 546 ----- docs/book/src/dev_query_graphql.erl.md | 497 ---- docs/book/src/dev_query_test_vectors.erl.md | 842 ------- docs/book/src/dev_relay.erl.md | 301 --- docs/book/src/dev_router.erl.md | 1450 ------------ docs/book/src/dev_scheduler.erl.md | 1859 --------------- docs/book/src/dev_scheduler_cache.erl.md | 645 ----- docs/book/src/dev_scheduler_formats.erl.md | 309 --- docs/book/src/dev_scheduler_registry.erl.md | 186 -- docs/book/src/dev_scheduler_server.erl.md | 238 -- docs/book/src/dev_secret.erl.md | 845 ------- docs/book/src/dev_simple_pay.erl.md | 408 ---- docs/book/src/dev_snp.erl.md | 693 ------ docs/book/src/dev_snp_nif.erl.md | 134 -- docs/book/src/dev_stack.erl.md | 774 ------ docs/book/src/dev_test.erl.md | 333 --- docs/book/src/dev_volume.erl.md | 576 ----- docs/book/src/dev_wasi.erl.md | 364 --- docs/book/src/dev_wasm.erl.md | 458 ---- docs/book/src/dev_whois.erl.md | 94 - docs/book/src/hb.erl.md | 423 ---- docs/book/src/hb_ao.erl.md | 1509 ------------ docs/book/src/hb_ao_test_vectors.erl.md | 892 ------- docs/book/src/hb_app.erl.md | 37 - docs/book/src/hb_beamr.erl.md | 443 ---- docs/book/src/hb_beamr_io.erl.md | 238 -- docs/book/src/hb_cache.erl.md | 967 -------- docs/book/src/hb_cache_control.erl.md | 554 ----- docs/book/src/hb_cache_render.erl.md | 511 ---- docs/book/src/hb_client.erl.md | 244 -- docs/book/src/hb_crypto.erl.md | 143 -- docs/book/src/hb_debugger.erl.md | 242 -- docs/book/src/hb_escape.erl.md | 394 ---- docs/book/src/hb_event.erl.md | 505 ---- docs/book/src/hb_examples.erl.md | 227 -- docs/book/src/hb_features.erl.md | 132 -- docs/book/src/hb_format.erl.md | 1201 ---------- docs/book/src/hb_gateway_client.erl.md | 371 --- docs/book/src/hb_http.erl.md | 914 -------- docs/book/src/hb_http_benchmark_tests.erl.md | 8 - docs/book/src/hb_http_client.erl.md | 923 -------- docs/book/src/hb_http_client_sup.erl.md | 34 - docs/book/src/hb_http_multi.erl.md | 393 ---- docs/book/src/hb_http_server.erl.md | 607 ----- docs/book/src/hb_json.erl.md | 39 - docs/book/src/hb_keccak.erl.md | 123 - docs/book/src/hb_link.erl.md | 264 --- docs/book/src/hb_logger.erl.md | 130 -- docs/book/src/hb_maps.erl.md | 398 ---- docs/book/src/hb_message.erl.md | 1150 --------- docs/book/src/hb_message_test_vectors.erl.md | 1736 -------------- docs/book/src/hb_metrics_collector.erl.md | 76 - docs/book/src/hb_name.erl.md | 292 --- docs/book/src/hb_opts.erl.md | 683 ------ docs/book/src/hb_path.erl.md | 695 ------ docs/book/src/hb_persistent.erl.md | 585 ----- docs/book/src/hb_private.erl.md | 279 --- docs/book/src/hb_process_monitor.erl.md | 106 - docs/book/src/hb_router.erl.md | 44 - docs/book/src/hb_singleton.erl.md | 1104 --------- docs/book/src/hb_store.erl.md | 1123 --------- docs/book/src/hb_store_fs.erl.md | 305 --- docs/book/src/hb_store_gateway.erl.md | 388 --- docs/book/src/hb_store_lmdb.erl.md | 1206 ---------- docs/book/src/hb_store_lru.erl.md | 1078 --------- docs/book/src/hb_store_opts.erl.md | 316 --- docs/book/src/hb_store_remote_node.erl.md | 194 -- docs/book/src/hb_store_rocksdb.erl.md | 884 ------- docs/book/src/hb_structured_fields.erl.md | 1426 ------------ docs/book/src/hb_sup.erl.md | 89 - docs/book/src/hb_test_utils.erl.md | 351 --- docs/book/src/hb_tracer.erl.md | 186 -- docs/book/src/hb_util.erl.md | 1666 ------------- docs/book/src/hb_volume.erl.md | 1047 --------- docs/book/src/rsa_pss.erl.md | 354 --- 137 files changed, 64980 deletions(-) delete mode 100644 docs/book/src/ar_bundles.erl.md delete mode 100644 docs/book/src/ar_deep_hash.erl.md delete mode 100644 docs/book/src/ar_rate_limiter.erl.md delete mode 100644 docs/book/src/ar_timestamp.erl.md delete mode 100644 docs/book/src/ar_tx.erl.md delete mode 100644 docs/book/src/ar_wallet.erl.md delete mode 100644 docs/book/src/dev_apply.erl.md delete mode 100644 docs/book/src/dev_arweave.erl.md delete mode 100644 docs/book/src/dev_arweave_block_cache.erl.md delete mode 100644 docs/book/src/dev_auth_hook.erl.md delete mode 100644 docs/book/src/dev_cache.erl.md delete mode 100644 docs/book/src/dev_cacheviz.erl.md delete mode 100644 docs/book/src/dev_codec_ans104.erl.md delete mode 100644 docs/book/src/dev_codec_ans104_from.erl.md delete mode 100644 docs/book/src/dev_codec_ans104_to.erl.md delete mode 100644 docs/book/src/dev_codec_cookie.erl.md delete mode 100644 docs/book/src/dev_codec_cookie_auth.erl.md delete mode 100644 docs/book/src/dev_codec_cookie_test_vectors.erl.md delete mode 100644 docs/book/src/dev_codec_flat.erl.md delete mode 100644 docs/book/src/dev_codec_http_auth.erl.md delete mode 100644 docs/book/src/dev_codec_httpsig.erl.md delete mode 100644 docs/book/src/dev_codec_httpsig_conv.erl.md delete mode 100644 docs/book/src/dev_codec_httpsig_keyid.erl.md delete mode 100644 docs/book/src/dev_codec_httpsig_proxy.erl.md delete mode 100644 docs/book/src/dev_codec_httpsig_siginfo.erl.md delete mode 100644 docs/book/src/dev_codec_json.erl.md delete mode 100644 docs/book/src/dev_codec_structured.erl.md delete mode 100644 docs/book/src/dev_copycat.erl.md delete mode 100644 docs/book/src/dev_copycat_arweave.erl.md delete mode 100644 docs/book/src/dev_copycat_graphql.erl.md delete mode 100644 docs/book/src/dev_cron.erl.md delete mode 100644 docs/book/src/dev_cu.erl.md delete mode 100644 docs/book/src/dev_dedup.erl.md delete mode 100644 docs/book/src/dev_delegated_compute.erl.md delete mode 100644 docs/book/src/dev_faff.erl.md delete mode 100644 docs/book/src/dev_genesis_wasm.erl.md delete mode 100644 docs/book/src/dev_green_zone.erl.md delete mode 100644 docs/book/src/dev_hook.erl.md delete mode 100644 docs/book/src/dev_hyperbuddy.erl.md delete mode 100644 docs/book/src/dev_json_iface.erl.md delete mode 100644 docs/book/src/dev_local_name.erl.md delete mode 100644 docs/book/src/dev_lookup.erl.md delete mode 100644 docs/book/src/dev_lua.erl.md delete mode 100644 docs/book/src/dev_lua_lib.erl.md delete mode 100644 docs/book/src/dev_lua_test.erl.md delete mode 100644 docs/book/src/dev_lua_test_ledgers.erl.md delete mode 100644 docs/book/src/dev_manifest.erl.md delete mode 100644 docs/book/src/dev_message.erl.md delete mode 100644 docs/book/src/dev_meta.erl.md delete mode 100644 docs/book/src/dev_monitor.erl.md delete mode 100644 docs/book/src/dev_multipass.erl.md delete mode 100644 docs/book/src/dev_name.erl.md delete mode 100644 docs/book/src/dev_node_process.erl.md delete mode 100644 docs/book/src/dev_p4.erl.md delete mode 100644 docs/book/src/dev_patch.erl.md delete mode 100644 docs/book/src/dev_poda.erl.md delete mode 100644 docs/book/src/dev_process.erl.md delete mode 100644 docs/book/src/dev_process_cache.erl.md delete mode 100644 docs/book/src/dev_process_worker.erl.md delete mode 100644 docs/book/src/dev_profile.erl.md delete mode 100644 docs/book/src/dev_push.erl.md delete mode 100644 docs/book/src/dev_query.erl.md delete mode 100644 docs/book/src/dev_query_arweave.erl.md delete mode 100644 docs/book/src/dev_query_graphql.erl.md delete mode 100644 docs/book/src/dev_query_test_vectors.erl.md delete mode 100644 docs/book/src/dev_relay.erl.md delete mode 100644 docs/book/src/dev_router.erl.md delete mode 100644 docs/book/src/dev_scheduler.erl.md delete mode 100644 docs/book/src/dev_scheduler_cache.erl.md delete mode 100644 docs/book/src/dev_scheduler_formats.erl.md delete mode 100644 docs/book/src/dev_scheduler_registry.erl.md delete mode 100644 docs/book/src/dev_scheduler_server.erl.md delete mode 100644 docs/book/src/dev_secret.erl.md delete mode 100644 docs/book/src/dev_simple_pay.erl.md delete mode 100644 docs/book/src/dev_snp.erl.md delete mode 100644 docs/book/src/dev_snp_nif.erl.md delete mode 100644 docs/book/src/dev_stack.erl.md delete mode 100644 docs/book/src/dev_test.erl.md delete mode 100644 docs/book/src/dev_volume.erl.md delete mode 100644 docs/book/src/dev_wasi.erl.md delete mode 100644 docs/book/src/dev_wasm.erl.md delete mode 100644 docs/book/src/dev_whois.erl.md delete mode 100644 docs/book/src/hb.erl.md delete mode 100644 docs/book/src/hb_ao.erl.md delete mode 100644 docs/book/src/hb_ao_test_vectors.erl.md delete mode 100644 docs/book/src/hb_app.erl.md delete mode 100644 docs/book/src/hb_beamr.erl.md delete mode 100644 docs/book/src/hb_beamr_io.erl.md delete mode 100644 docs/book/src/hb_cache.erl.md delete mode 100644 docs/book/src/hb_cache_control.erl.md delete mode 100644 docs/book/src/hb_cache_render.erl.md delete mode 100644 docs/book/src/hb_client.erl.md delete mode 100644 docs/book/src/hb_crypto.erl.md delete mode 100644 docs/book/src/hb_debugger.erl.md delete mode 100644 docs/book/src/hb_escape.erl.md delete mode 100644 docs/book/src/hb_event.erl.md delete mode 100644 docs/book/src/hb_examples.erl.md delete mode 100644 docs/book/src/hb_features.erl.md delete mode 100644 docs/book/src/hb_format.erl.md delete mode 100644 docs/book/src/hb_gateway_client.erl.md delete mode 100644 docs/book/src/hb_http.erl.md delete mode 100644 docs/book/src/hb_http_benchmark_tests.erl.md delete mode 100644 docs/book/src/hb_http_client.erl.md delete mode 100644 docs/book/src/hb_http_client_sup.erl.md delete mode 100644 docs/book/src/hb_http_multi.erl.md delete mode 100644 docs/book/src/hb_http_server.erl.md delete mode 100644 docs/book/src/hb_json.erl.md delete mode 100644 docs/book/src/hb_keccak.erl.md delete mode 100644 docs/book/src/hb_link.erl.md delete mode 100644 docs/book/src/hb_logger.erl.md delete mode 100644 docs/book/src/hb_maps.erl.md delete mode 100644 docs/book/src/hb_message.erl.md delete mode 100644 docs/book/src/hb_message_test_vectors.erl.md delete mode 100644 docs/book/src/hb_metrics_collector.erl.md delete mode 100644 docs/book/src/hb_name.erl.md delete mode 100644 docs/book/src/hb_opts.erl.md delete mode 100644 docs/book/src/hb_path.erl.md delete mode 100644 docs/book/src/hb_persistent.erl.md delete mode 100644 docs/book/src/hb_private.erl.md delete mode 100644 docs/book/src/hb_process_monitor.erl.md delete mode 100644 docs/book/src/hb_router.erl.md delete mode 100644 docs/book/src/hb_singleton.erl.md delete mode 100644 docs/book/src/hb_store.erl.md delete mode 100644 docs/book/src/hb_store_fs.erl.md delete mode 100644 docs/book/src/hb_store_gateway.erl.md delete mode 100644 docs/book/src/hb_store_lmdb.erl.md delete mode 100644 docs/book/src/hb_store_lru.erl.md delete mode 100644 docs/book/src/hb_store_opts.erl.md delete mode 100644 docs/book/src/hb_store_remote_node.erl.md delete mode 100644 docs/book/src/hb_store_rocksdb.erl.md delete mode 100644 docs/book/src/hb_structured_fields.erl.md delete mode 100644 docs/book/src/hb_sup.erl.md delete mode 100644 docs/book/src/hb_test_utils.erl.md delete mode 100644 docs/book/src/hb_tracer.erl.md delete mode 100644 docs/book/src/hb_util.erl.md delete mode 100644 docs/book/src/hb_volume.erl.md delete mode 100644 docs/book/src/rsa_pss.erl.md diff --git a/docs/book/src/ar_bundles.erl.md b/docs/book/src/ar_bundles.erl.md deleted file mode 100644 index 4bcdb534f..000000000 --- a/docs/book/src/ar_bundles.erl.md +++ /dev/null @@ -1,2070 +0,0 @@ -# ar_bundles - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_bundles.erl) - -Module for creating, signing, and verifying Arweave data items and bundles. - ---- - -## Exported Functions - -- `data_item_signature_data/1` -- `decode_tags/1` -- `deserialize/1` -- `deserialize/2` -- `encode_tags/1` -- `find/2` -- `format/1` -- `format/2` -- `format/3` -- `hd/1` -- `id/1` -- `id/2` -- `is_signed/1` -- `manifest_item/1` -- `manifest/1` -- `map/1` -- `member/2` -- `new_item/4` -- `normalize/1` -- `parse_manifest/1` -- `print/1` -- `reset_ids/1` -- `serialize/1` -- `serialize/2` -- `sign_item/2` -- `signer/1` -- `type/1` -- `verify_item/1` - ---- - -### print - -Module for creating, signing, and verifying Arweave data items and bundles. - -```erlang -print(Item) -> - io:format(standard_error, "~s", [lists:flatten(format(Item))]). -``` - -### format - -Module for creating, signing, and verifying Arweave data items and bundles. - -```erlang -format(Item) -> format(Item, 0). -``` - -### format - -Module for creating, signing, and verifying Arweave data items and bundles. - -```erlang -format(Item, Indent) -> format(Item, Indent, #{}). -``` - -### format - -Module for creating, signing, and verifying Arweave data items and bundles. - -```erlang -format(Item, Indent, Opts) when is_list(Item); is_map(Item) -> - format(normalize(Item), Indent, Opts); -``` - -### format - -Module for creating, signing, and verifying Arweave data items and bundles. - -```erlang -format(Item, Indent, Opts) when is_record(Item, tx) -> - MustVerify = hb_opts:get(debug_ids, true, Opts), - Valid = - if MustVerify -> verify_item(Item); - true -> true - end, - UnsignedID = - if MustVerify -> hb_util:encode(id(Item, unsigned)); - true -> <<"[SKIPPED ID]">> - end, - SignedID = - if MustVerify -> - case id(Item, signed) of - not_signed -> <<"[NOT SIGNED]">>; - ID -> hb_util:encode(ID) - end; - true -> <<"[SKIPPED ID]">> - end, - format_line( - "TX ( ~s: ~s ) {", - [ - if - MustVerify andalso Item#tx.signature =/= ?DEFAULT_SIG -> - lists:flatten( - io_lib:format( - "~s (signed) ~s (unsigned)", - [SignedID, UnsignedID] - ) - ); - true -> UnsignedID - end, - if - not MustVerify -> "[SKIPPED VERIFICATION]"; - Valid == true -> "[SIGNED+VALID]"; - true -> "[UNSIGNED/INVALID]" - end - ], - Indent - ) ++ - case MustVerify andalso (not Valid) andalso Item#tx.signature =/= ?DEFAULT_SIG of - true -> - format_line("!!! CAUTION: ITEM IS SIGNED BUT INVALID !!!", Indent + 1); - false -> [] - end ++ - case is_signed(Item) of - true -> - format_line("Signer: ~s", [hb_util:encode(signer(Item))], Indent + 1); - false -> [] - end ++ - format_line("Target: ~s", [ - case Item#tx.target of - <<>> -> "[NONE]"; - Target -> hb_util:id(Target) - end - ], Indent + 1) ++ - format_line("Last TX: ~s", [ - case Item#tx.anchor of - ?DEFAULT_LAST_TX -> "[NONE]"; - LastTX -> hb_util:encode(LastTX) - end - ], Indent + 1) ++ - format_line("Tags:", Indent + 1) ++ - lists:map( - fun({Key, Val}) -> format_line("~s -> ~s", [Key, Val], Indent + 2) end, - Item#tx.tags - ) ++ - format_line("Data:", Indent + 1) ++ format_data(Item, Indent + 2) ++ - format_line("}", Indent); -``` - -### format - -Module for creating, signing, and verifying Arweave data items and bundles. - -```erlang -format(Item, Indent, _Opts) -> - % Whatever we have, its not a tx... -``` - -### format_data - -```erlang -format_data(Item, Indent) when is_binary(Item#tx.data) -> - case lists:keyfind(<<"bundle-format">>, 1, Item#tx.tags) of - {_, _} -> - format_data(deserialize(serialize(Item)), Indent); - false -> - format_line( - "Binary: ~p... <~p bytes>", - [format_binary(Item#tx.data), byte_size(Item#tx.data)], - Indent - ) - end; -``` - -### format_data - -```erlang -format_data(Item, Indent) when is_map(Item#tx.data) -> - format_line("Map:", Indent) ++ - lists:map( - fun({Name, MapItem}) -> - format_line("~s ->", [Name], Indent + 1) ++ - format(MapItem, Indent + 2) - end, - maps:to_list(Item#tx.data) - ); -``` - -### format_data - -```erlang -format_data(Item, Indent) when is_list(Item#tx.data) -> - format_line("List:", Indent) ++ - lists:map( - fun(ListItem) -> - format(ListItem, Indent + 1) - end, - Item#tx.data - ). -``` - -### format_binary - -```erlang -format_binary(Bin) -> - lists:flatten( - io_lib:format( - "~p", - [ - binary:part( - Bin, - 0, - case byte_size(Bin) of - X when X < ?BIN_PRINT -> X; - _ -> ?BIN_PRINT - end - ) - ] - ) - ). -``` - -### format_line - -```erlang -format_line(Str, Indent) -> format_line(Str, "", Indent). -``` - -### format_line - -```erlang -format_line(RawStr, Fmt, Ind) -> - io_lib:format( - [$\s || _ <- lists:seq(1, Ind * ?INDENT_SPACES)] ++ - lists:flatten(RawStr) ++ "\n", - Fmt - ). -``` - -### signer - -Return the address of the signer of an item, if it is signed. - -```erlang -signer(#tx { owner = ?DEFAULT_OWNER }) -> undefined; -``` - -### signer - -Return the address of the signer of an item, if it is signed. -Check if an item is signed. - -```erlang -signer(Item) -> crypto:hash(sha256, Item#tx.owner). -``` - -### is_signed - -Return the address of the signer of an item, if it is signed. -Check if an item is signed. - -```erlang -is_signed(Item) -> - Item#tx.signature =/= ?DEFAULT_SIG. -``` - -### id - -Return the ID of an item -- either signed or unsigned as specified. - -```erlang -id(Item) -> id(Item, unsigned). -``` - -### id - -Return the ID of an item -- either signed or unsigned as specified. - -```erlang -id(Item, Type) when not is_record(Item, tx) -> - id(normalize(Item), Type); -``` - -### id - -Return the ID of an item -- either signed or unsigned as specified. - -```erlang -id(Item = #tx { unsigned_id = ?DEFAULT_ID }, unsigned) -> - CorrectedItem = reset_ids(Item), - CorrectedItem#tx.unsigned_id; -``` - -### id - -Return the ID of an item -- either signed or unsigned as specified. - -```erlang -id(#tx { unsigned_id = UnsignedID }, unsigned) -> - UnsignedID; -``` - -### id - -Return the ID of an item -- either signed or unsigned as specified. - -```erlang -id(#tx { id = ?DEFAULT_ID }, signed) -> - not_signed; -``` - -### id - -Return the ID of an item -- either signed or unsigned as specified. - -```erlang -id(#tx { id = ID }, signed) -> - ID. -``` - -### hd - -Return the first item in a bundle-map/list. - -```erlang -hd(#tx { data = #{ <<"1">> := Msg } }) -> Msg; -``` - -### hd - -Return the first item in a bundle-map/list. - -```erlang -hd(#tx { data = [First | _] }) -> First; -``` - -### hd - -Return the first item in a bundle-map/list. - -```erlang -hd(TX = #tx { data = Binary }) when is_binary(Binary) -> - ?MODULE:hd((deserialize(serialize(TX), binary))#tx.data); -``` - -### hd - -Return the first item in a bundle-map/list. - -```erlang -hd(#{ <<"1">> := Msg }) -> Msg; -``` - -### hd - -Return the first item in a bundle-map/list. -Convert an item containing a map or list into an Erlang map. - -```erlang -hd(_) -> undefined. -``` - -### map - -Return the first item in a bundle-map/list. -Convert an item containing a map or list into an Erlang map. - -```erlang -map(#tx { data = Map }) when is_map(Map) -> Map; -``` - -### map - -Return the first item in a bundle-map/list. -Convert an item containing a map or list into an Erlang map. - -```erlang -map(#tx { data = Data }) when is_list(Data) -> - maps:from_list( - lists:zipwith( - fun({Index, Item}) -> {integer_to_binary(Index), map(Item)} end, - lists:seq(1, length(Data)), - Data - ) - ); -``` - -### map - -Return the first item in a bundle-map/list. -Convert an item containing a map or list into an Erlang map. - -```erlang -map(Item = #tx { data = Data }) when is_binary(Data) -> - (maybe_unbundle(Item))#tx.data. -``` - -### member - -Check if an item exists in a bundle-map/list. - -```erlang -member(Key, Item) -> - find(Key, Item) =/= not_found. -``` - -### find - -Find an item in a bundle-map/list and return it. - -```erlang -find(Key, Map) when is_map(Map) -> - case maps:get(Key, Map, not_found) of - not_found -> find(Key, maps:values(Map)); - Item -> Item - end; -``` - -### find - -Find an item in a bundle-map/list and return it. - -```erlang -find(_Key, []) -> not_found; -``` - -### find - -Find an item in a bundle-map/list and return it. - -```erlang -find(Key, [Item|Rest]) -> - case find(Key, Item) of - not_found -> find(Key, Rest); - CorrectItem -> CorrectItem - end; -``` - -### find - -Find an item in a bundle-map/list and return it. - -```erlang -find(Key, Item = #tx { id = Key }) -> Item; -``` - -### find - -Find an item in a bundle-map/list and return it. - -```erlang -find(Key, Item = #tx { data = Data }) -> - case id(Item, unsigned) of - Key -> Item; - _ -> - case is_binary(Data) of - false -> find(Key, Data); - true -> not_found - end - end; -``` - -### find - -Find an item in a bundle-map/list and return it. - -```erlang -find(_Key, _) -> - not_found. -``` - -### manifest_item - -Return the manifest item in a bundle-map/list. - -```erlang -manifest_item(#tx { manifest = Manifest }) when is_record(Manifest, tx) -> - Manifest; -``` - -### manifest_item - -Return the manifest item in a bundle-map/list. -Create a new data item. Should only be used for testing. - -```erlang -manifest_item(_Item) -> undefined. -``` - -### new_item - -Return the manifest item in a bundle-map/list. -Create a new data item. Should only be used for testing. - -```erlang -new_item(Target, Anchor, Tags, Data) -> - reset_ids( - #tx{ - format = ans104, - target = Target, - anchor = Anchor, - tags = Tags, - data = Data, - data_size = byte_size(Data) - } - ). -``` - -### sign_item - -Sign a data item. - -```erlang -sign_item(_, undefined) -> throw(wallet_not_found); -``` - -### sign_item - -Sign a data item. - -```erlang -sign_item(RawItem, {PrivKey, {KeyType, Owner}}) -> - Item = (normalize_data(RawItem))#tx{format = ans104, owner = Owner, signature_type = KeyType}, - % Generate the signature from the data item's data segment in 'signed'-ready mode. -``` - -### verify_item - -Verify the validity of a data item. - -```erlang -verify_item(DataItem) -> - ValidID = verify_data_item_id(DataItem), - ValidSignature = verify_data_item_signature(DataItem), - ValidTags = verify_data_item_tags(DataItem), - ValidID andalso ValidSignature andalso ValidTags. -``` - -### type - -```erlang -type(Item) when is_record(Item, tx) -> - case lists:keyfind(<<"bundle-map">>, 1, Item#tx.tags) of - {<<"bundle-map">>, _} -> - case lists:keyfind(<<"map-format">>, 1, Item#tx.tags) of - {<<"map-format">>, <<"list">>} -> list; - _ -> map - end; - _ -> - binary - end; -``` - -### type - -```erlang -type(Data) when erlang:is_map(Data) -> - map; -``` - -### type - -```erlang -type(Data) when erlang:is_list(Data) -> - list; -``` - -### type - -```erlang -type(_) -> - binary. -``` - -### data_item_signature_data - -Generate the data segment to be signed for a data item. - -```erlang -data_item_signature_data(RawItem) -> - data_item_signature_data(RawItem, signed). -``` - -### data_item_signature_data - -```erlang -data_item_signature_data(RawItem, unsigned) -> - data_item_signature_data(RawItem#tx { owner = ?DEFAULT_OWNER }, signed); -``` - -### data_item_signature_data - -```erlang -data_item_signature_data(RawItem, signed) -> - true = enforce_valid_tx(RawItem), - NormItem = normalize_data(RawItem), - ar_deep_hash:hash([ - utf8_encoded("dataitem"), - utf8_encoded("1"), - %% Only SignatureType 1 is supported for now (RSA 4096) - utf8_encoded("1"), - <<(NormItem#tx.owner)/binary>>, - <<(NormItem#tx.target)/binary>>, - <<(NormItem#tx.anchor)/binary>>, - encode_tags(NormItem#tx.tags), - <<(NormItem#tx.data)/binary>> - ]). -``` - -### verify_data_item_id - -Verify the data item's ID matches the signature. - -```erlang -verify_data_item_id(DataItem) -> - ExpectedID = crypto:hash(sha256, DataItem#tx.signature), - DataItem#tx.id == ExpectedID. -``` - -### verify_data_item_signature - -Verify the data item's signature. - -```erlang -verify_data_item_signature(DataItem) -> - SignatureData = data_item_signature_data(DataItem), - %?event({unsigned_id, hb_util:encode(id(DataItem, unsigned)), hb_util:encode(SignatureData)}), - ar_wallet:verify( - {DataItem#tx.signature_type, DataItem#tx.owner}, SignatureData, DataItem#tx.signature - ). -``` - -### verify_data_item_tags - -Verify the validity of the data item's tags. - -```erlang -verify_data_item_tags(DataItem) -> - ValidCount = length(DataItem#tx.tags) =< 128, - ValidTags = lists:all( - fun({Name, Value}) -> - byte_size(Name) =< 1024 andalso byte_size(Value) =< 3072 - end, - DataItem#tx.tags - ), - ValidCount andalso ValidTags. -``` - -### normalize - -Ensure that a data item (potentially containing a map or list) has a - -```erlang -normalize(Item) -> reset_ids(normalize_data(Item)). -``` - -### normalize_data - -Ensure that a data item (potentially containing a map or list) has a - -```erlang -normalize_data(not_found) -> throw(not_found); -``` - -### normalize_data - -Ensure that a data item (potentially containing a map or list) has a - -```erlang -normalize_data(Item = #tx{data = Bin}) when is_binary(Bin) -> - ?event({normalize_data, binary, Item}), - normalize_data_size(Item); -``` - -### normalize_data - -Ensure that a data item (potentially containing a map or list) has a - -```erlang -normalize_data(Bundle) when is_list(Bundle); is_map(Bundle) -> - ?event({normalize_data, bundle, Bundle}), - normalize_data(#tx{ data = Bundle }); -``` - -### normalize_data - -Ensure that a data item (potentially containing a map or list) has a - -```erlang -normalize_data(Item = #tx { data = Data }) when is_list(Data) -> - ?event({normalize_data, list, Item}), - normalize_data( - Item#tx{ - tags = add_list_tags(Item#tx.tags), - data = - maps:from_list( - lists:zipwith( - fun(Index, MapItem) -> - { - integer_to_binary(Index), - update_ids(normalize_data(MapItem)) - } - end, - lists:seq(1, length(Data)), - Data - ) - ) - } - ); -``` - -### normalize_data - -Ensure that a data item (potentially containing a map or list) has a - -```erlang -normalize_data(Item = #tx{data = Data}) -> - ?event({normalize_data, map, Item}), - normalize_data_size( - case serialize_bundle_data(Data, Item#tx.manifest) of - {Manifest, Bin} -> - Item#tx{ - data = Bin, - manifest = Manifest, - tags = - add_manifest_tags( - add_bundle_tags(Item#tx.tags), - id(Manifest, unsigned) - ) - }; - DirectBin -> - Item#tx{ - data = DirectBin, - tags = add_bundle_tags(Item#tx.tags) - } - end - ). -``` - -### normalize_data_size - -Reset the data size of a data item. Assumes that the data is already normalized. - -```erlang -normalize_data_size(Item = #tx{data = Bin}) when is_binary(Bin) -> - Item#tx{data_size = byte_size(Bin)}; -``` - -### normalize_data_size - -Reset the data size of a data item. Assumes that the data is already normalized. -Convert a #tx record to its binary representation. - -```erlang -normalize_data_size(Item) -> Item. -``` - -### serialize - -Reset the data size of a data item. Assumes that the data is already normalized. -Convert a #tx record to its binary representation. - -```erlang -serialize(not_found) -> throw(not_found); -``` - -### serialize - -Reset the data size of a data item. Assumes that the data is already normalized. -Convert a #tx record to its binary representation. - -```erlang -serialize(TX) -> serialize(TX, binary). -``` - -### serialize - -Reset the data size of a data item. Assumes that the data is already normalized. -Convert a #tx record to its binary representation. - -```erlang -serialize(TX, binary) when is_binary(TX) -> TX; -``` - -### serialize - -Reset the data size of a data item. Assumes that the data is already normalized. -Convert a #tx record to its binary representation. - -```erlang -serialize(RawTX, binary) -> - true = enforce_valid_tx(RawTX), - TX = normalize(RawTX), - EncodedTags = encode_tags(TX#tx.tags), - << - (encode_signature_type(TX#tx.signature_type))/binary, - (TX#tx.signature)/binary, - (TX#tx.owner)/binary, - (encode_optional_field(TX#tx.target))/binary, - (encode_optional_field(TX#tx.anchor))/binary, - (encode_tags_size(TX#tx.tags, EncodedTags))/binary, - EncodedTags/binary, - (TX#tx.data)/binary - >>; -``` - -### serialize - -Reset the data size of a data item. Assumes that the data is already normalized. -Convert a #tx record to its binary representation. -Take an item and ensure that it is of valid form. Useful for ensuring - -```erlang -serialize(TX, json) -> - true = enforce_valid_tx(TX), - hb_json:encode(hb_message:convert(TX, <<"ans104@1.0">>, #{})). -``` - -### enforce_valid_tx - -Reset the data size of a data item. Assumes that the data is already normalized. -Convert a #tx record to its binary representation. -Take an item and ensure that it is of valid form. Useful for ensuring - -```erlang -enforce_valid_tx(List) when is_list(List) -> - lists:all(fun enforce_valid_tx/1, List); -``` - -### enforce_valid_tx - -Reset the data size of a data item. Assumes that the data is already normalized. -Convert a #tx record to its binary representation. -Take an item and ensure that it is of valid form. Useful for ensuring - -```erlang -enforce_valid_tx(Map) when is_map(Map) -> - lists:all(fun(Item) -> enforce_valid_tx(Item) end, maps:values(Map)); -``` - -### enforce_valid_tx - -Reset the data size of a data item. Assumes that the data is already normalized. -Convert a #tx record to its binary representation. -Take an item and ensure that it is of valid form. Useful for ensuring - -```erlang -enforce_valid_tx(TX) -> - ok_or_throw(TX, - check_type(TX, message), - {invalid_tx, TX} - ), - ok_or_throw(TX, - check_size(TX#tx.id, [0, 32]), - {invalid_field, id, TX#tx.id} - ), - ok_or_throw(TX, - check_size(TX#tx.unsigned_id, [0, 32]), - {invalid_field, unsigned_id, TX#tx.unsigned_id} - ), - ok_or_throw(TX, - check_size(TX#tx.anchor, [0, 32]), - {invalid_field, last_tx, TX#tx.anchor} - ), - ok_or_throw(TX, - check_size(TX#tx.owner, [0, byte_size(?DEFAULT_OWNER)]), - {invalid_field, owner, TX#tx.owner} - ), - ok_or_throw(TX, - check_size(TX#tx.target, [0, 32]), - {invalid_field, target, TX#tx.target} - ), - ok_or_throw(TX, - check_size(TX#tx.signature, [0, 65, byte_size(?DEFAULT_SIG)]), - {invalid_field, signature, TX#tx.signature} - ), - ok_or_throw(TX, - check_type(TX#tx.tags, list), - {invalid_field, tags, TX#tx.tags} - ), - lists:foreach( - fun({Name, Value}) -> - ok_or_throw(TX, - check_type(Name, binary), - {invalid_field, tag_name, Name} - ), - ok_or_throw(TX, - check_size(Name, {range, 0, ?MAX_TAG_NAME_SIZE}), - {invalid_field, tag_name, Name} - ), - ok_or_throw(TX, - check_type(Value, binary), - {invalid_field, tag_value, {Name, Value}} - ), - ok_or_throw(TX, - check_size(Value, {range, 0, ?MAX_TAG_VALUE_SIZE}), - {invalid_field, tag_value, {Name, Value}} - ); - (InvalidTagForm) -> - throw({invalid_field, tag, InvalidTagForm}) - end, - TX#tx.tags - ), - ok_or_throw( - TX, - check_type(TX#tx.data, binary) - orelse check_type(TX#tx.data, map) - orelse check_type(TX#tx.data, list), - {invalid_field, data, TX#tx.data} - ), - true. -``` - -### check_size - -Force that a binary is either empty or the given number of bytes. - -```erlang -check_size(Bin, {range, Start, End}) -> - check_type(Bin, binary) - andalso byte_size(Bin) >= Start - andalso byte_size(Bin) =< End; -``` - -### check_size - -Force that a binary is either empty or the given number of bytes. - -```erlang -check_size(Bin, Sizes) -> - check_type(Bin, binary) - andalso lists:member(byte_size(Bin), Sizes). -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, binary) when is_binary(Value) -> true; -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, _) when is_binary(Value) -> false; -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, list) when is_list(Value) -> true; -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, _) when is_list(Value) -> false; -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, map) when is_map(Value) -> true; -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, _) when is_map(Value) -> false; -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, message) -> - is_record(Value, tx) or is_map(Value) or is_list(Value); -``` - -### check_type - -Ensure that a value is of the given type. -Throw an error if the given value is not ok. - -```erlang -check_type(_Value, _) -> false. -``` - -### ok_or_throw - -Ensure that a value is of the given type. -Throw an error if the given value is not ok. - -```erlang -ok_or_throw(_, true, _) -> true; -``` - -### ok_or_throw - -Ensure that a value is of the given type. -Throw an error if the given value is not ok. - -```erlang -ok_or_throw(_TX, false, Error) -> - throw(Error). -``` - -### update_ids - -Take an item and ensure that both the unsigned and signed IDs are - -```erlang -update_ids(Item = #tx { unsigned_id = ?DEFAULT_ID }) -> - update_ids( - Item#tx { - unsigned_id = - crypto:hash( - sha256, - data_item_signature_data(Item, unsigned) - ) - } - ); -``` - -### update_ids - -Take an item and ensure that both the unsigned and signed IDs are - -```erlang -update_ids(Item = #tx { id = ?DEFAULT_ID, signature = ?DEFAULT_SIG }) -> - Item; -``` - -### update_ids - -Take an item and ensure that both the unsigned and signed IDs are - -```erlang -update_ids(Item = #tx { signature = ?DEFAULT_SIG }) -> - Item#tx { id = ?DEFAULT_ID }; -``` - -### update_ids - -Take an item and ensure that both the unsigned and signed IDs are - -```erlang -update_ids(Item = #tx { signature = Sig }) when Sig =/= ?DEFAULT_SIG -> - Item#tx { id = crypto:hash(sha256, Sig) }; -``` - -### update_ids - -Take an item and ensure that both the unsigned and signed IDs are -Re-calculate both of the IDs for an item. This is a wrapper - -```erlang -update_ids(TX) -> TX. -``` - -### reset_ids - -Take an item and ensure that both the unsigned and signed IDs are -Re-calculate both of the IDs for an item. This is a wrapper - -```erlang -reset_ids(Item) -> - update_ids(Item#tx { unsigned_id = ?DEFAULT_ID, id = ?DEFAULT_ID }). -``` - -### add_bundle_tags - -```erlang -add_bundle_tags(Tags) -> ?BUNDLE_TAGS ++ (Tags -- ?BUNDLE_TAGS). -``` - -### add_list_tags - -```erlang -add_list_tags(Tags) -> - (?BUNDLE_TAGS ++ (Tags -- ?BUNDLE_TAGS)) ++ ?LIST_TAGS. -``` - -### add_manifest_tags - -```erlang -add_manifest_tags(Tags, ManifestID) -> - lists:filter( - fun - ({<<"bundle-map">>, _}) -> false; - (_) -> true - end, - Tags - ) ++ [{<<"bundle-map">>, hb_util:encode(ManifestID)}]. -``` - -### finalize_bundle_data - -```erlang -finalize_bundle_data(Processed) -> - Length = <<(length(Processed)):256/integer>>, - Index = <<<<(byte_size(Data)):256/integer, ID/binary>> || {ID, Data} <- Processed>>, - Items = <<<> || {_, Data} <- Processed>>, - <>. -``` - -### to_serialized_pair - -```erlang -to_serialized_pair(Item) when is_binary(Item) -> - % Support bundling of bare binary payloads by wrapping them in a TX that - % is explicitly marked as a binary data item. -``` - -### to_serialized_pair - -```erlang -to_serialized_pair(Item) -> - % TODO: This is a hack to get the ID of the item. We need to do this because we may not - % have the ID in 'item' if it is just a map/list. We need to make this more efficient. -``` - -### serialize_bundle_data - -```erlang -serialize_bundle_data(Map, _Manifest) when is_map(Map) -> - % TODO: Make this compatible with the normal manifest spec. -``` - -### serialize_bundle_data - -```erlang -serialize_bundle_data(List, _Manifest) when is_list(List) -> - finalize_bundle_data(lists:map(fun to_serialized_pair/1, List)); -``` - -### serialize_bundle_data - -```erlang -serialize_bundle_data(Data, _Manifest) -> - throw({cannot_serialize_tx_data, must_be_map_or_list, Data}). -``` - -### new_manifest - -```erlang -new_manifest(Index) -> - TX = normalize(#tx{ - format = ans104, - tags = [ - {<<"data-protocol">>, <<"bundle-map">>}, - {<<"variant">>, <<"0.0.1">>} - ], - data = hb_json:encode(Index) - }), - TX. -``` - -### manifest - -```erlang -manifest(Map) when is_map(Map) -> Map; -``` - -### manifest - -```erlang -manifest(#tx { manifest = undefined }) -> undefined; -``` - -### manifest - -```erlang -manifest(#tx { manifest = ManifestTX }) -> - hb_json:decode(ManifestTX#tx.data). -``` - -### parse_manifest - -```erlang -parse_manifest(Item) when is_record(Item, tx) -> - parse_manifest(Item#tx.data); -``` - -### parse_manifest - -```erlang -parse_manifest(Bin) -> - hb_json:decode(Bin). -``` - -### encode_signature_type - -Only RSA 4096 is currently supported. - -```erlang -encode_signature_type({rsa, 65537}) -> - <<1, 0>>; -``` - -### encode_signature_type - -Only RSA 4096 is currently supported. - -```erlang -encode_signature_type(_) -> - unsupported_tx_format. -``` - -### encode_optional_field - -Encode an optional field (target, anchor) with a presence byte. - -```erlang -encode_optional_field(<<>>) -> - <<0>>; -``` - -### encode_optional_field - -Encode an optional field (target, anchor) with a presence byte. - -```erlang -encode_optional_field(Field) -> - <<1:8/integer, Field/binary>>. -``` - -### utf8_encoded - -Encode a UTF-8 string to binary. - -```erlang -utf8_encoded(String) -> - unicode:characters_to_binary(String, utf8). -``` - -### encode_tags_size - -```erlang -encode_tags_size([], <<>>) -> - <<0:64/little-integer, 0:64/little-integer>>; -``` - -### encode_tags_size - -```erlang -encode_tags_size(Tags, EncodedTags) -> - <<(length(Tags)):64/little-integer, (byte_size(EncodedTags)):64/little-integer>>. -``` - -### encode_tags - -Encode tags into a binary format using Apache Avro. - -```erlang -encode_tags([]) -> - <<>>; -``` - -### encode_tags - -Encode tags into a binary format using Apache Avro. - -```erlang -encode_tags(Tags) -> - EncodedBlocks = lists:flatmap( - fun({Name, Value}) -> - Res = [encode_avro_name(Name), encode_avro_value(Value)], - case lists:member(error, Res) of - true -> - throw({cannot_encode_empty_string, Name, Value}); - false -> - Res - end - end, - Tags - ), - TagCount = length(Tags), - ZigZagCount = encode_zigzag(TagCount), - <>. -``` - -### encode_avro_name - -Encode a string for Avro using ZigZag and VInt encoding. - -```erlang -encode_avro_name(<<>>) -> - % Zero length names are treated as a special case, due to the Avro encoder. -``` - -### encode_avro_name - -```erlang -encode_avro_name(String) -> - StringBytes = utf8_encoded(String), - Length = byte_size(StringBytes), - <<(encode_zigzag(Length))/binary, StringBytes/binary>>. -``` - -### encode_avro_value - -```erlang -encode_avro_value(<<>>) -> - % Zero length values are treated as a special case, due to the Avro encoder. -``` - -### encode_avro_value - -```erlang -encode_avro_value(Value) when is_binary(Value) -> - % Tag values can be raw binaries - Length = byte_size(Value), - <<(encode_zigzag(Length))/binary, Value/binary>>. -``` - -### encode_zigzag - -Encode an integer using ZigZag encoding. - -```erlang -encode_zigzag(Int) when Int >= 0 -> - encode_vint(Int bsl 1); -``` - -### encode_zigzag - -Encode an integer using ZigZag encoding. - -```erlang -encode_zigzag(Int) -> - encode_vint(Int bsl 1, -1). -``` - -### encode_vint - -Encode a ZigZag integer to VInt binary format. - -```erlang -encode_vint(ZigZag) -> - encode_vint(ZigZag, []). -``` - -### encode_vint - -```erlang -encode_vint(0, Acc) -> - list_to_binary(lists:reverse(Acc)); -``` - -### encode_vint - -```erlang -encode_vint(ZigZag, Acc) -> - VIntByte = ZigZag band 16#7F, - ZigZagShifted = ZigZag bsr 7, - case ZigZagShifted of - 0 -> encode_vint(0, [VIntByte | Acc]); - _ -> encode_vint(ZigZagShifted, [VIntByte bor 16#80 | Acc]) - end. -``` - -### deserialize - -Convert binary data back to a #tx record. - -```erlang -deserialize(not_found) -> throw(not_found); -``` - -### deserialize - -Convert binary data back to a #tx record. - -```erlang -deserialize(Binary) -> deserialize(Binary, binary). -``` - -### deserialize - -Convert binary data back to a #tx record. - -```erlang -deserialize(Item, binary) when is_record(Item, tx) -> - maybe_unbundle(Item); -``` - -### deserialize - -Convert binary data back to a #tx record. - -```erlang -deserialize(Binary, binary) -> - %try - {SignatureType, Signature, Owner, Rest} = decode_signature(Binary), - {Target, Rest2} = decode_optional_field(Rest), - {Anchor, Rest3} = decode_optional_field(Rest2), - {Tags, Data} = decode_tags(Rest3), - maybe_unbundle( - reset_ids(#tx{ - format = ans104, - signature_type = SignatureType, - signature = Signature, - owner = Owner, - target = Target, - anchor = Anchor, - tags = Tags, - data = Data, - data_size = byte_size(Data) - }) - ); -%catch -% _:_:_Stack -> -% {error, invalid_item} -%end; -``` - -### deserialize - -Convert binary data back to a #tx record. - -```erlang -deserialize(Bin, json) -> - try - Map = hb_json:decode(Bin), - hb_message:convert(Map, <<"ans104@1.0">>, #{}) - catch - _:_:_Stack -> - {error, invalid_item} - end. -``` - -### maybe_unbundle - -```erlang -maybe_unbundle(Item) -> - Format = lists:keyfind(<<"bundle-format">>, 1, Item#tx.tags), - Version = lists:keyfind(<<"bundle-version">>, 1, Item#tx.tags), - case {Format, Version} of - {{<<"bundle-format">>, <<"binary">>}, {<<"bundle-version">>, <<"2.0.0">>}} -> - maybe_map_to_list(maybe_unbundle_map(Item)); - _ -> - Item - end. -``` - -### maybe_map_to_list - -```erlang -maybe_map_to_list(Item) -> - case lists:keyfind(<<"map-format">>, 1, Item#tx.tags) of - {<<"map-format">>, <<"List">>} -> - unbundle_list(Item); - _ -> - Item - end. -``` - -### unbundle_list - -```erlang -unbundle_list(Item) -> - Item#tx{ - data = - lists:map( - fun(Index) -> - maps:get(list_to_binary(integer_to_list(Index)), Item#tx.data) - end, - lists:seq(1, maps:size(Item#tx.data)) - ) - }. -``` - -### maybe_unbundle_map - -```erlang -maybe_unbundle_map(Bundle) -> - case lists:keyfind(<<"bundle-map">>, 1, Bundle#tx.tags) of - {<<"bundle-map">>, MapTXID} -> - case unbundle(Bundle) of - detached -> Bundle#tx { data = detached }; - Items -> - MapItem = find_single_layer(hb_util:decode(MapTXID), Items), - Map = hb_json:decode(MapItem#tx.data), - Bundle#tx{ - manifest = MapItem, - data = - maps:map( - fun(_K, TXID) -> - find_single_layer(hb_util:decode(TXID), Items) - end, - Map - ) - } - end; - _ -> - unbundle(Bundle) - end. -``` - -### find_single_layer - -An internal helper for finding an item in a single-layer of a bundle. - -```erlang -find_single_layer(UnsignedID, TX) when is_record(TX, tx) -> - find_single_layer(UnsignedID, TX#tx.data); -``` - -### find_single_layer - -An internal helper for finding an item in a single-layer of a bundle. - -```erlang -find_single_layer(UnsignedID, Items) -> - TX = lists:keyfind(UnsignedID, #tx.unsigned_id, Items), - case is_record(TX, tx) of - true -> TX; - false -> - throw({cannot_find_item, hb_util:encode(UnsignedID)}) - end. -``` - -### unbundle - -```erlang -unbundle(Item = #tx{data = <>}) -> - {ItemsBin, Items} = decode_bundle_header(Count, Content), - Item#tx{data = decode_bundle_items(Items, ItemsBin)}; -``` - -### unbundle - -```erlang -unbundle(#tx{data = <<>>}) -> detached. -``` - -### decode_bundle_items - -```erlang -decode_bundle_items([], <<>>) -> - []; -``` - -### decode_bundle_items - -```erlang -decode_bundle_items([{_ID, Size} | RestItems], ItemsBin) -> - [ - deserialize(binary:part(ItemsBin, 0, Size)) - | - decode_bundle_items( - RestItems, - binary:part( - ItemsBin, - Size, - byte_size(ItemsBin) - Size - ) - ) - ]. -``` - -### decode_bundle_header - -```erlang -decode_bundle_header(Count, Bin) -> decode_bundle_header(Count, Bin, []). -``` - -### decode_bundle_header - -```erlang -decode_bundle_header(0, ItemsBin, Header) -> - {ItemsBin, lists:reverse(Header)}; -``` - -### decode_bundle_header - -```erlang -decode_bundle_header(Count, <>, Header) -> - decode_bundle_header(Count - 1, Rest, [{ID, Size} | Header]). -``` - -### decode_signature - -Decode the signature from a binary format. Only RSA 4096 is currently supported. - -```erlang -decode_signature(<<1, 0, Signature:512/binary, Owner:512/binary, Rest/binary>>) -> - {{rsa, 65537}, Signature, Owner, Rest}; -``` - -### decode_signature - -Decode the signature from a binary format. Only RSA 4096 is currently supported. - -```erlang -decode_signature(Other) -> - ?event({error_decoding_signature, Other}), - unsupported_tx_format. -``` - -### decode_tags - -Decode tags from a binary format using Apache Avro. - -```erlang -decode_tags(<<0:64/little-integer, 0:64/little-integer, Rest/binary>>) -> - {[], Rest}; -``` - -### decode_tags - -Decode tags from a binary format using Apache Avro. - -```erlang -decode_tags(<<_TagCount:64/little-integer, _TagSize:64/little-integer, Binary/binary>>) -> - {Count, BlocksBinary} = decode_zigzag(Binary), - {Tags, Rest} = decode_avro_tags(BlocksBinary, Count), - %% Pull out the terminating zero - {0, Rest2} = decode_zigzag(Rest), - {Tags, Rest2}. -``` - -### decode_optional_field - -```erlang -decode_optional_field(<<0, Rest/binary>>) -> - {<<>>, Rest}; -``` - -### decode_optional_field - -```erlang -decode_optional_field(<<1:8/integer, Field:32/binary, Rest/binary>>) -> - {Field, Rest}. -``` - -### decode_avro_tags - -Decode Avro blocks (for tags) from binary. - -```erlang -decode_avro_tags(<<>>, _) -> - {[], <<>>}; -``` - -### decode_avro_tags - -Decode Avro blocks (for tags) from binary. - -```erlang -decode_avro_tags(Binary, Count) when Count =:= 0 -> - {[], Binary}; -``` - -### decode_avro_tags - -Decode Avro blocks (for tags) from binary. - -```erlang -decode_avro_tags(Binary, Count) -> - {NameSize, Rest} = decode_zigzag(Binary), - decode_avro_name(NameSize, Rest, Count). -``` - -### decode_avro_name - -```erlang -decode_avro_name(0, Rest, _) -> - {[], Rest}; -``` - -### decode_avro_name - -```erlang -decode_avro_name(NameSize, Rest, Count) -> - <> = Rest, - {ValueSize, Rest3} = decode_zigzag(Rest2), - decode_avro_value(ValueSize, Name, Rest3, Count). -``` - -### decode_avro_value - -```erlang -decode_avro_value(0, Name, Rest, Count) -> - {DecodedTags, NonAvroRest} = decode_avro_tags(Rest, Count - 1), - {[{Name, <<>>} | DecodedTags], NonAvroRest}; -``` - -### decode_avro_value - -```erlang -decode_avro_value(ValueSize, Name, Rest, Count) -> - <> = Rest, - {DecodedTags, NonAvroRest} = decode_avro_tags(Rest2, Count - 1), - {[{Name, Value} | DecodedTags], NonAvroRest}. -``` - -### decode_zigzag - -Decode a VInt encoded ZigZag integer from binary. - -```erlang -decode_zigzag(Binary) -> - {ZigZag, Rest} = decode_vint(Binary, 0, 0), - case ZigZag band 1 of - 1 -> {-(ZigZag bsr 1) - 1, Rest}; - 0 -> {ZigZag bsr 1, Rest} - end. -``` - -### decode_vint - -```erlang -decode_vint(<<>>, Result, _Shift) -> - {Result, <<>>}; -``` - -### decode_vint - -```erlang -decode_vint(<>, Result, Shift) -> - VIntPart = Byte band 16#7F, - NewResult = Result bor (VIntPart bsl Shift), - case Byte band 16#80 of - 0 -> {NewResult, Rest}; - _ -> decode_vint(Rest, NewResult, Shift + 7) - end. -``` - -### ar_bundles_test_ - -```erlang -ar_bundles_test_() -> - [ - {timeout, 30, fun test_no_tags/0}, - {timeout, 30, fun test_with_tags/0}, - {timeout, 30, fun test_with_zero_length_tag/0}, - {timeout, 30, fun test_unsigned_data_item_id/0}, - {timeout, 30, fun test_unsigned_data_item_normalization/0}, - {timeout, 30, fun test_empty_bundle/0}, - {timeout, 30, fun test_bundle_with_one_item/0}, - {timeout, 30, fun test_bundle_with_two_items/0}, - {timeout, 30, fun test_recursive_bundle/0}, - {timeout, 30, fun test_bundle_map/0}, - {timeout, 30, fun test_basic_member_id/0}, - {timeout, 30, fun test_deep_member/0}, - {timeout, 30, fun test_extremely_large_bundle/0}, - {timeout, 30, fun test_serialize_deserialize_deep_signed_bundle/0}, - {timeout, 30, fun test_encode_tags/0} - ]. -``` - -### test_encode_tags - -```erlang -test_encode_tags() -> - BinValue = <<1, 2, 3, 255, 254>>, - TestCases = [ - {simple_string_tags, [{<<"tag1">>, <<"value1">>}]}, - {binary_value_tag, [{<<"binary-tag">>, BinValue}]}, - {mixed_tags, - [ - {<<"string-tag">>, <<"string-value">>}, - {<<"binary-tag">>, BinValue} - ] - }, - {empty_value_tag, [{<<"empty-value-tag">>, <<>>}]}, - {unicode_tag, [{<<"unicode-tag">>, <<"你好世界">>}]} - ], - lists:foreach( - fun({Label, InputTags}) -> - Encoded = encode_tags(InputTags), - Wrapped = - << - (length(InputTags)):64/little, - (byte_size(Encoded)):64/little, - Encoded/binary - >>, - {DecodedTags, <<>>} = decode_tags(Wrapped), - ?assertEqual(InputTags, DecodedTags, Label) - end, - TestCases - ), - % Test case: Empty tags list - EmptyTags = [], - EncodedEmpty = encode_tags(EmptyTags), - ?assertEqual(<<>>, EncodedEmpty), - WrappedEmpty = <<0:64/little, 0:64/little>>, - {[], <<>>} = decode_tags(WrappedEmpty). -``` - -### run_test - -```erlang -run_test() -> - test_with_zero_length_tag(). -``` - -### test_no_tags - -```erlang -test_no_tags() -> - {Priv, Pub} = ar_wallet:new(), - {KeyType, Owner} = Pub, - Target = crypto:strong_rand_bytes(32), - Anchor = crypto:strong_rand_bytes(32), - DataItem = new_item(Target, Anchor, [], <<"data">>), - SignedDataItem = sign_item(DataItem, {Priv, Pub}), - ?assertEqual(true, verify_item(SignedDataItem)), - assert_data_item(KeyType, Owner, Target, Anchor, [], <<"data">>, SignedDataItem), - SignedDataItem2 = deserialize(serialize(SignedDataItem)), - ?assertEqual(SignedDataItem, SignedDataItem2), - ?assertEqual(true, verify_item(SignedDataItem2)), - assert_data_item(KeyType, Owner, Target, Anchor, [], <<"data">>, SignedDataItem2). -``` - -### test_with_tags - -```erlang -test_with_tags() -> - {Priv, Pub} = ar_wallet:new(), - {KeyType, Owner} = Pub, - Target = crypto:strong_rand_bytes(32), - Anchor = crypto:strong_rand_bytes(32), - Tags = [{<<"tag1">>, <<"value1">>}, {<<"tag2">>, <<"value2">>}], - DataItem = new_item(Target, Anchor, Tags, <<"taggeddata">>), - SignedDataItem = sign_item(DataItem, {Priv, Pub}), - ?assertEqual(true, verify_item(SignedDataItem)), - assert_data_item(KeyType, Owner, Target, Anchor, Tags, <<"taggeddata">>, SignedDataItem), - SignedDataItem2 = deserialize(serialize(SignedDataItem)), - ?assertEqual(SignedDataItem, SignedDataItem2), - ?assertEqual(true, verify_item(SignedDataItem2)), - assert_data_item(KeyType, Owner, Target, Anchor, Tags, <<"taggeddata">>, SignedDataItem2). -``` - -### test_with_zero_length_tag - -```erlang -test_with_zero_length_tag() -> - Item = normalize(#tx{ - format = ans104, - tags = [ - {<<"normal-tag-1">>, <<"tag1">>}, - {<<"empty-tag">>, <<>>}, - {<<"normal-tag-2">>, <<"tag2">>} - ], - data = <<"Typical data field.">> - }), - Serialized = serialize(Item), - Deserialized = deserialize(Serialized), - ?assertEqual(Item, Deserialized). -``` - -### test_unsigned_data_item_id - -```erlang -test_unsigned_data_item_id() -> - Item1 = deserialize( - serialize(reset_ids(#tx{format = ans104, data = <<"data1">>})) - ), - Item2 = deserialize( - serialize(reset_ids(#tx{format = ans104, data = <<"data2">>}))), - ?assertNotEqual(Item1#tx.unsigned_id, Item2#tx.unsigned_id). -``` - -### test_unsigned_data_item_normalization - -```erlang -test_unsigned_data_item_normalization() -> - NewItem = normalize(#tx{ format = ans104, data = <<"Unsigned data">> }), - ReNormItem = deserialize(serialize(NewItem)), - ?assertEqual(NewItem, ReNormItem). -``` - -### assert_data_item - -```erlang -assert_data_item(KeyType, Owner, Target, Anchor, Tags, Data, DataItem) -> - ?assertEqual(KeyType, DataItem#tx.signature_type), - ?assertEqual(Owner, DataItem#tx.owner), - ?assertEqual(Target, DataItem#tx.target), - ?assertEqual(Anchor, DataItem#tx.anchor), - ?assertEqual(Tags, DataItem#tx.tags), - ?assertEqual(Data, DataItem#tx.data), - ?assertEqual(byte_size(Data), DataItem#tx.data_size). -``` - -### test_empty_bundle - -```erlang -test_empty_bundle() -> - Bundle = serialize([]), - BundleItem = deserialize(Bundle), - ?assertEqual(#{}, BundleItem#tx.data). -``` - -### test_bundle_with_one_item - -```erlang -test_bundle_with_one_item() -> - Item = new_item( - crypto:strong_rand_bytes(32), - crypto:strong_rand_bytes(32), - [], - ItemData = crypto:strong_rand_bytes(1000) - ), - ?event({item, Item}), - Bundle = serialize([Item]), - ?event({bundle, Bundle}), - BundleItem = deserialize(Bundle), - ?event({bundle_item, BundleItem}), - ?assertEqual(ItemData, (maps:get(<<"1">>, BundleItem#tx.data))#tx.data). -``` - -### test_bundle_with_two_items - -```erlang -test_bundle_with_two_items() -> - Item1 = new_item( - crypto:strong_rand_bytes(32), - crypto:strong_rand_bytes(32), - [], - ItemData1 = crypto:strong_rand_bytes(32) - ), - Item2 = new_item( - crypto:strong_rand_bytes(32), - crypto:strong_rand_bytes(32), - [{<<"tag1">>, <<"value1">>}, {<<"tag2">>, <<"value2">>}], - ItemData2 = crypto:strong_rand_bytes(32) - ), - Bundle = serialize([Item1, Item2]), - BundleItem = deserialize(Bundle), - ?assertEqual(ItemData1, (maps:get(<<"1">>, BundleItem#tx.data))#tx.data), - ?assertEqual(ItemData2, (maps:get(<<"2">>, BundleItem#tx.data))#tx.data). -``` - -### test_recursive_bundle - -```erlang -test_recursive_bundle() -> - W = ar_wallet:new(), - Item1 = sign_item(#tx{ - id = crypto:strong_rand_bytes(32), - anchor = crypto:strong_rand_bytes(32), - data = <<1:256/integer>> - }, W), - Item2 = sign_item(#tx{ - id = crypto:strong_rand_bytes(32), - anchor = crypto:strong_rand_bytes(32), - data = [Item1] - }, W), - Item3 = sign_item(#tx{ - id = crypto:strong_rand_bytes(32), - anchor = crypto:strong_rand_bytes(32), - data = [Item2] - }, W), - Bundle = serialize([Item3]), - BundleItem = deserialize(Bundle), - #{<<"1">> := UnbundledItem3} = BundleItem#tx.data, - #{<<"1">> := UnbundledItem2} = UnbundledItem3#tx.data, - #{<<"1">> := UnbundledItem1} = UnbundledItem2#tx.data, - ?assert(verify_item(UnbundledItem1)), - % TODO: Verify bundled lists... -``` - -### test_bundle_map - -```erlang -test_bundle_map() -> - W = ar_wallet:new(), - Item1 = sign_item(#tx{ - format = ans104, - data = <<"item1_data">> - }, W), - Item2 = sign_item(#tx{ - format = ans104, - anchor = crypto:strong_rand_bytes(32), - data = #{<<"key1">> => Item1} - }, W), - Bundle = serialize(Item2), - BundleItem = deserialize(Bundle), - ?assertEqual(Item1#tx.data, (maps:get(<<"key1">>, BundleItem#tx.data))#tx.data), - ?assert(verify_item(BundleItem)). -``` - -### test_extremely_large_bundle - -```erlang -test_extremely_large_bundle() -> - W = ar_wallet:new(), - Data = crypto:strong_rand_bytes(100_000_000), - Norm = normalize(#tx { data = #{ <<"key">> => #tx { data = Data } } }), - Signed = sign_item(Norm, W), - Serialized = serialize(Signed), - Deserialized = deserialize(Serialized), - ?assert(verify_item(Deserialized)). -``` - -### test_basic_member_id - -```erlang -test_basic_member_id() -> - W = ar_wallet:new(), - Item = sign_item( - #tx{ - data = <<"data">> - }, - W - ), - ?assertEqual(true, member(Item#tx.id, Item)), - ?assertEqual(true, member(id(Item, unsigned), Item)), - ?assertEqual(false, member(crypto:strong_rand_bytes(32), Item)). -``` - -### test_deep_member - -```erlang -test_deep_member() -> - W = ar_wallet:new(), - Item = sign_item( - #tx{ - data = - #{<<"key1">> => - sign_item(#tx{ - data = <<"data">> - }, W) - } - }, - W - ), - Item2 = deserialize(serialize(sign_item( - #tx{ - data = #{ <<"key2">> => Item } - }, - W - ))), - ?assertEqual(true, member(<<"key1">>, Item2)), - ?assertEqual(true, member(<<"key2">>, Item2)), - ?assertEqual(true, member(Item#tx.id, Item2)), - ?assertEqual(true, member(Item2#tx.id, Item2)), - ?assertEqual(true, member(id(Item, unsigned), Item2)), - ?assertEqual(true, member(id(Item2, unsigned), Item2)), - ?assertEqual(false, member(crypto:strong_rand_bytes(32), Item2)). -``` - -### test_serialize_deserialize_deep_signed_bundle - -```erlang -test_serialize_deserialize_deep_signed_bundle() -> - W = ar_wallet:new(), - % Test that we can serialize, deserialize, and get the same IDs back. -``` - ---- - -*Generated from [ar_bundles.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_bundles.erl)* diff --git a/docs/book/src/ar_deep_hash.erl.md b/docs/book/src/ar_deep_hash.erl.md deleted file mode 100644 index 3cf098728..000000000 --- a/docs/book/src/ar_deep_hash.erl.md +++ /dev/null @@ -1,62 +0,0 @@ -# ar_deep_hash - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_deep_hash.erl) - -INTERNAL - ---- - -## Exported Functions - -- `hash/1` - ---- - -### hash - -```erlang -hash(List) when is_list(List) -> hash_bin_or_list(List). -%%% INTERNAL -``` - -### hash_bin_or_list - -```erlang -hash_bin_or_list(Bin) when is_binary(Bin) -> - Tag = <<"blob", (integer_to_binary(byte_size(Bin)))/binary>>, - hash_bin(<<(hash_bin(Tag))/binary, (hash_bin(Bin))/binary>>); -``` - -### hash_bin_or_list - -```erlang -hash_bin_or_list(List) when is_list(List) -> - Tag = <<"list", (integer_to_binary(length(List)))/binary>>, - hash_list(List, hash_bin(Tag)). -``` - -### hash_list - -```erlang -hash_list([], Acc) -> - Acc; -``` - -### hash_list - -```erlang -hash_list([Head | List], Acc) -> - HashPair = <>, - NewAcc = hash_bin(HashPair), - hash_list(List, NewAcc). -``` - -### hash_bin - -```erlang -hash_bin(Bin) when is_binary(Bin) -> -``` - ---- - -*Generated from [ar_deep_hash.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_deep_hash.erl)* diff --git a/docs/book/src/ar_rate_limiter.erl.md b/docs/book/src/ar_rate_limiter.erl.md deleted file mode 100644 index 70f86b8c9..000000000 --- a/docs/book/src/ar_rate_limiter.erl.md +++ /dev/null @@ -1,187 +0,0 @@ -# ar_rate_limiter - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_rate_limiter.erl) - -=================================================================== -Public interface. -=================================================================== - ---- - -## Exported Functions - -- `handle_call/3` -- `handle_cast/2` -- `handle_info/2` -- `init/1` -- `off/0` -- `on/0` -- `start_link/1` -- `terminate/2` -- `throttle/3` - ---- - -### start_link - -```erlang -start_link(Opts) -> - gen_server:start_link({local, ?MODULE}, ?MODULE, Opts, []). -``` - -### throttle - -Hang until it is safe to make another request to the given Peer with the - -```erlang -throttle(Peer, Path, Opts) -> - case lists:member(Peer, hb_opts:get(throttle_exempt_peers, [], Opts)) of - true -> - ok; - false -> - throttle2(Peer, Path, Opts) - end. -``` - -### throttle2 - -```erlang -throttle2(Peer, Path, Opts) -> - Routes = hb_opts:get(throttle_exempt_paths, [], Opts), - IsExempt = - lists:any(fun(Route) -> hb_path:regex_matches(Path, Route) end, Routes), - case IsExempt of - true -> ok; - false -> - Res = catch gen_server:call(?MODULE, {throttle, Peer, Path}, infinity), - case Res of - {'EXIT', {noproc, {gen_server, call, _}}} -> - ok; - {'EXIT', Reason} -> - exit(Reason); - _ -> - ok - end - end. -``` - -### off - -Turn rate limiting off. - -```erlang -off() -> - gen_server:cast(?MODULE, turn_off). -``` - -### on - -Turn rate limiting on. - -```erlang -on() -> - gen_server:cast(?MODULE, turn_on). -``` - -### init - -```erlang -init(Opts) -> - process_flag(trap_exit, true), - {ok, #state{ traces = #{}, off = false, opts = Opts }}. -``` - -### handle_call - -```erlang -handle_call({throttle, _Peer, _Path}, _From, #state{ off = true } = State) -> - {reply, ok, State}; -``` - -### handle_call - -```erlang -handle_call({throttle, Peer, Path}, From, State) -> - gen_server:cast(?MODULE, {throttle, Peer, Path, From}), - {noreply, State}; -``` - -### handle_call - -```erlang -handle_call(Request, _From, State) -> - ?event(warning, {unhandled_call, {module, ?MODULE}, {request, Request}}), - {reply, ok, State}. -``` - -### handle_cast - -```erlang -handle_cast({throttle, Peer, Path, From}, State) -> - #state{ traces = Traces, opts = Opts } = State, - {Type, Limit} = hb_opts:get(throttle_rpm_by_path, Path, Opts), - Now = os:system_time(millisecond), - case hb_maps:get({Peer, Type}, Traces, not_found, Opts) of - not_found -> - gen_server:reply(From, ok), - Traces2 = hb_maps:put({Peer, Type}, {1, queue:from_list([Now])}, Traces, Opts), - {noreply, State#state{ traces = Traces2 }}; - {N, Trace} -> - {N2, Trace2} = cut_trace(N, queue:in(Now, Trace), Now, Opts), - %% The macro specifies requests per minute while the throttling window - %% is 30 seconds. -``` - -### handle_cast - -```erlang -handle_cast(turn_off, State) -> - {noreply, State#state{ off = true }}; -``` - -### handle_cast - -```erlang -handle_cast(turn_on, State) -> - {noreply, State#state{ off = false }}; -``` - -### handle_cast - -```erlang -handle_cast(Cast, State) -> - ?event(warning, {unhandled_cast, {module, ?MODULE}, {cast, Cast}}), - {noreply, State}. -``` - -### handle_info - -```erlang -handle_info(Message, State) -> - ?event(warning, {unhandled_info, {module, ?MODULE}, {message, Message}}), - {noreply, State}. -``` - -### terminate - -```erlang -terminate(_Reason, _State) -> - ok. -``` - -### cut_trace - -```erlang -cut_trace(N, Trace, Now, Opts) -> - {{value, Timestamp}, Trace2} = queue:out(Trace), - case Timestamp < Now - hb_opts:get(throttle_period, 30000, Opts) of - true -> - cut_trace(N - 1, Trace2, Now, Opts); - false -> - {N, Trace} - end. -``` - ---- - -*Generated from [ar_rate_limiter.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_rate_limiter.erl)* diff --git a/docs/book/src/ar_timestamp.erl.md b/docs/book/src/ar_timestamp.erl.md deleted file mode 100644 index ddf4894c6..000000000 --- a/docs/book/src/ar_timestamp.erl.md +++ /dev/null @@ -1,97 +0,0 @@ -# ar_timestamp - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_timestamp.erl) - -A simple Erlang server that caches the current Arweave timestamp and -refreshes it periodically. - ---- - -## Exported Functions - -- `get/0` -- `start/0` - ---- - -### start - -Check if the server is already running, and if not, start it. - -```erlang -start() -> - ?event(starting_ar_timestamp_server), - case whereis(?MODULE) of - undefined -> spawn_server(); - PID -> - case is_process_alive(PID) of - true -> PID; - false -> spawn_server() - end - end. -``` - -### spawn_server - -Spawn a new server and its refresher. - -```erlang -spawn_server() -> - TSServer = - spawn(fun() -> cache(hb_client:arweave_timestamp()) end), - spawn(fun() -> refresher(TSServer) end), - register(?MODULE, TSServer), - TSServer. -``` - -### get - -Get the current timestamp from the server, starting the server if it - -```erlang -get() -> - ?event(getting_ar_timestamp), - PID = start(), - ?event({got_ar_timestamp_pid, PID}), - PID ! {get, self()}, - ?event(waiting_for_ar_timestamp), - receive - {timestamp, Timestamp} -> - ?event({got_ar_timestamp, Timestamp}), - Timestamp - end. -``` - -### cache - -Cache the current timestamp from Arweave. - -```erlang -cache(Current) -> - ?event(cache_waiting), - receive - {get, Pid} -> - ?event({got_get_request, Pid}), - Pid ! {timestamp, Current}, - ?event({sent_timestamp, Current}), - cache(Current); - {refresh, New} -> - ?event({refreshed_ar_timestamp, New}), - cache(New) - end. -``` - -### refresher - -Refresh the timestamp cache periodically. - -```erlang -refresher(TSServer) -> - timer:sleep(?TIMEOUT), - TS = hb_client:arweave_timestamp(), - TSServer ! {refresh, TS}, -``` - ---- - -*Generated from [ar_timestamp.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_timestamp.erl)* diff --git a/docs/book/src/ar_tx.erl.md b/docs/book/src/ar_tx.erl.md deleted file mode 100644 index 72b14d7cb..000000000 --- a/docs/book/src/ar_tx.erl.md +++ /dev/null @@ -1,259 +0,0 @@ -# ar_tx - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_tx.erl) - -The module with utilities for transaction creation, signing, and verification. - ---- - -## Exported Functions - -- `json_struct_to_tx/1` -- `new/4` -- `new/5` -- `sign/2` -- `tx_to_json_struct/1` -- `verify_tx_id/2` -- `verify/1` - ---- - -### new - -The module with utilities for transaction creation, signing, and verification. -Create a new transaction. - -```erlang -new(Dest, Reward, Qty, Last) -> - #tx{ - id = crypto:strong_rand_bytes(32), - anchor = Last, - quantity = Qty, - target = Dest, - data = <<>>, - data_size = 0, - reward = Reward - }. -``` - -### new - -```erlang -new(Dest, Reward, Qty, Last, SigType) -> - #tx{ - id = crypto:strong_rand_bytes(32), - anchor = Last, - quantity = Qty, - target = Dest, - data = <<>>, - data_size = 0, - reward = Reward, - signature_type = SigType - }. -``` - -### sign - -Cryptographically sign (claim ownership of) a transaction. - -```erlang -sign(TX, {PrivKey, {KeyType, Owner}}) -> - NewTX = TX#tx{ owner = Owner, signature_type = KeyType }, - Sig = ar_wallet:sign(PrivKey, signature_data_segment(NewTX)), - ID = crypto:hash(sha256, <>), - NewTX#tx{ id = ID, signature = Sig }. -``` - -### verify - -Verify whether a transaction is valid. - -```erlang -verify(TX) -> - do_verify(TX, verify_signature). -``` - -### verify_tx_id - -Verify the given transaction actually has the given identifier. - -```erlang -verify_tx_id(ExpectedID, #tx{ id = ID } = TX) -> - ExpectedID == ID andalso verify_signature(TX, verify_signature) andalso verify_hash(TX). -``` - -### signature_data_segment - -Generate the data segment to be signed for a given TX. - -```erlang -signature_data_segment(TX) -> - List = [ - << (integer_to_binary(TX#tx.format))/binary >>, - << (TX#tx.owner)/binary >>, - << (TX#tx.target)/binary >>, - << (list_to_binary(integer_to_list(TX#tx.quantity)))/binary >>, - << (list_to_binary(integer_to_list(TX#tx.reward)))/binary >>, - << (TX#tx.anchor)/binary >>, - << (integer_to_binary(TX#tx.data_size))/binary >>, - << (TX#tx.data_root)/binary >> - ], - ar_deep_hash:hash(List). -``` - -### verify_signature - -Verify the transaction's signature. - -```erlang -verify_signature(TX = #tx{ signature_type = SigType }, verify_signature) -> - SignatureDataSegment = signature_data_segment(TX), - ar_wallet:verify({SigType, TX#tx.owner}, SignatureDataSegment, TX#tx.signature). -``` - -### verify_hash - -Verify that the transaction's ID is a hash of its signature. - -```erlang -verify_hash(#tx{ signature = Sig, id = ID }) -> - ID == crypto:hash(sha256, << Sig/binary >>). -``` - -### do_verify - -Verify transaction. - -```erlang -do_verify(TX, VerifySignature) -> - From = ar_wallet:to_address(TX#tx.owner, TX#tx.signature_type), - Checks = [ - {"quantity_negative", TX#tx.quantity >= 0}, - {"same_owner_as_target", (From =/= TX#tx.target)}, - {"tx_id_not_valid", verify_hash(TX)}, - {"tx_signature_not_valid", verify_signature(TX, VerifySignature)}, - {"tx_data_size_negative", TX#tx.data_size >= 0}, - {"tx_data_size_data_root_mismatch", (TX#tx.data_size == 0) == (TX#tx.data_root == <<>>)} - ], - collect_validation_results(TX#tx.id, Checks). -``` - -### collect_validation_results - -```erlang -collect_validation_results(_TXID, Checks) -> - KeepFailed = fun - ({_, true}) -> false; - ({ErrorCode, false}) -> {true, ErrorCode} - end, - case lists:filtermap(KeepFailed, Checks) of - [] -> true; - _ -> false - end. -``` - -### json_struct_to_tx - -```erlang -json_struct_to_tx(TXStruct) -> - Tags = - case hb_util:find_value(<<"tags">>, TXStruct) of - undefined -> - []; - Xs -> - Xs - end, - Data = hb_util:decode(hb_util:find_value(<<"data">>, TXStruct)), - Format = - case hb_util:find_value(<<"format">>, TXStruct) of - undefined -> - 1; - N when is_integer(N) -> - N; - N when is_binary(N) -> - binary_to_integer(N) - end, - Denomination = - case hb_util:find_value(<<"denomination">>, TXStruct) of - undefined -> - 0; - EncodedDenomination -> - MaybeDenomination = binary_to_integer(EncodedDenomination), - true = MaybeDenomination > 0, - MaybeDenomination - end, - TXID = hb_util:decode(hb_util:find_value(<<"id">>, TXStruct)), - 32 = byte_size(TXID), - #tx{ - format = Format, - id = TXID, - anchor = hb_util:decode(hb_util:find_value(<<"anchor">>, TXStruct)), - owner = hb_util:decode(hb_util:find_value(<<"owner">>, TXStruct)), - tags = [{hb_util:decode(Name), hb_util:decode(Value)} - %% Only the elements matching this pattern are included in the list. -``` - -### tx_to_json_struct - -```erlang -tx_to_json_struct( - #tx{ - id = ID, - format = Format, - anchor = Anchor, - owner = Owner, - tags = Tags, - target = Target, - quantity = Quantity, - data = Data, - reward = Reward, - signature = Sig, - data_size = DataSize, - data_root = DataRoot, - denomination = Denomination - }) -> - Fields = [ - {format, - case Format of - undefined -> - 1; - _ -> - Format - end}, - {id, hb_util:encode(ID)}, - {anchor, hb_util:encode(Anchor)}, - {owner, hb_util:encode(Owner)}, - {tags, - lists:map( - fun({Name, Value}) -> - { - [ - {name, hb_util:encode(Name)}, - {value, hb_util:encode(Value)} - ] - } - end, - Tags - ) - }, - {target, hb_util:encode(Target)}, - {quantity, integer_to_binary(Quantity)}, - {data, hb_util:encode(Data)}, - {data_size, integer_to_binary(DataSize)}, - {data_tree, []}, - {data_root, hb_util:encode(DataRoot)}, - {reward, integer_to_binary(Reward)}, - {signature, hb_util:encode(Sig)} - ], - Fields2 = - case Denomination > 0 of - true -> - Fields ++ [{denomination, integer_to_binary(Denomination)}]; - false -> - Fields - end, -``` - ---- - -*Generated from [ar_tx.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_tx.erl)* diff --git a/docs/book/src/ar_wallet.erl.md b/docs/book/src/ar_wallet.erl.md deleted file mode 100644 index 390076def..000000000 --- a/docs/book/src/ar_wallet.erl.md +++ /dev/null @@ -1,456 +0,0 @@ -# ar_wallet - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_wallet.erl) - -Utilities for manipulating wallets. - ---- - -## Exported Functions - -- `from_json/1` -- `from_json/2` -- `hmac/1` -- `hmac/2` -- `load_key/1` -- `load_key/2` -- `load_keyfile/1` -- `load_keyfile/2` -- `new_keyfile/2` -- `new/0` -- `new/1` -- `sign/2` -- `sign/3` -- `to_address/1` -- `to_address/2` -- `to_json/1` -- `to_pubkey/1` -- `to_pubkey/2` -- `verify/3` -- `verify/4` - ---- - -### new - -Utilities for manipulating wallets. - -```erlang -new() -> - new({rsa, 65537}). -``` - -### new - -```erlang -new(KeyType = {KeyAlg, PublicExpnt}) when KeyType =:= {rsa, 65537} -> - {[_, Pub], [_, Pub, Priv|_]} = {[_, Pub], [_, Pub, Priv|_]} - = crypto:generate_key(KeyAlg, {4096, PublicExpnt}), - {{KeyType, Priv, Pub}, {KeyType, Pub}}. -``` - -### sign - -Sign some data with a private key. - -```erlang -sign(Key, Data) -> - sign(Key, Data, sha256). -``` - -### sign - -sign some data, hashed using the provided DigestType. - -```erlang -sign({{rsa, PublicExpnt}, Priv, Pub}, Data, DigestType) when PublicExpnt =:= 65537 -> - rsa_pss:sign( - Data, - DigestType, - #'RSAPrivateKey'{ - publicExponent = PublicExpnt, - modulus = binary:decode_unsigned(Pub), - privateExponent = binary:decode_unsigned(Priv) - } - ); -``` - -### sign - -sign some data, hashed using the provided DigestType. - -```erlang -sign({{KeyType, Priv, Pub}, {KeyType, Pub}}, Data, DigestType) -> - sign({KeyType, Priv, Pub}, Data, DigestType). -``` - -### hmac - -```erlang -hmac(Data) -> - hmac(Data, sha256). -``` - -### hmac - -Verify that a signature is correct. - -```erlang -hmac(Data, DigestType) -> crypto:mac(hmac, DigestType, <<"ar">>, Data). -``` - -### verify - -Verify that a signature is correct. - -```erlang -verify(Key, Data, Sig) -> - verify(Key, Data, Sig, sha256). -``` - -### verify - -```erlang -verify({{rsa, PublicExpnt}, Pub}, Data, Sig, DigestType) when PublicExpnt =:= 65537 -> - rsa_pss:verify( - Data, - DigestType, - Sig, - #'RSAPublicKey'{ - publicExponent = PublicExpnt, - modulus = binary:decode_unsigned(Pub) - } - ). -``` - -### to_pubkey - -Find a public key from a wallet. - -```erlang -to_pubkey(Pubkey) -> - to_pubkey(Pubkey, ?DEFAULT_KEY_TYPE). -``` - -### to_pubkey - -```erlang -to_pubkey(PubKey, {rsa, 65537}) when bit_size(PubKey) == 256 -> - % Small keys are not secure, nobody is using them, the clause - % is for backwards-compatibility. -``` - -### to_pubkey - -```erlang -to_pubkey({{_, _, PubKey}, {_, PubKey}}, {rsa, 65537}) -> - PubKey; -``` - -### to_pubkey - -```erlang -to_pubkey(PubKey, {rsa, 65537}) -> - PubKey. -``` - -### to_address - -Generate an address from a public key. - -```erlang -to_address(Pubkey) -> - to_address(Pubkey, ?DEFAULT_KEY_TYPE). -``` - -### to_address - -```erlang -to_address(PubKey, {rsa, 65537}) when bit_size(PubKey) == 256 -> - PubKey; -``` - -### to_address - -```erlang -to_address({{_, _, PubKey}, {_, PubKey}}, _) -> - to_address(PubKey); -``` - -### to_address - -```erlang -to_address(PubKey, {rsa, 65537}) -> - to_rsa_address(PubKey); -``` - -### to_address - -```erlang -to_address(PubKey, {ecdsa, 256}) -> - to_ecdsa_address(PubKey). -``` - -### new_keyfile - -Generate a new wallet public and private key, with a corresponding keyfile. - -```erlang -new_keyfile(KeyType, WalletName) when is_list(WalletName) -> - new_keyfile(KeyType, list_to_binary(WalletName)); -``` - -### new_keyfile - -Generate a new wallet public and private key, with a corresponding keyfile. - -```erlang -new_keyfile(KeyType, WalletName) -> - {Pub, Priv, Key} = - case KeyType of - {?RSA_SIGN_ALG, PublicExpnt} -> - {[Expnt, Pb], [Expnt, Pb, Prv, P1, P2, E1, E2, C]} = - crypto:generate_key(rsa, {?RSA_PRIV_KEY_SZ, PublicExpnt}), - PrivKey = {KeyType, Prv, Pb}, - Ky = to_json(PrivKey), - {Pb, Prv, Ky}; - {?ECDSA_SIGN_ALG, secp256k1} -> - {OrigPub, Prv} = crypto:generate_key(ecdh, secp256k1), - CompressedPub = compress_ecdsa_pubkey(OrigPub), - PrivKey = {KeyType, Prv, CompressedPub}, - Ky = to_json(PrivKey), - {CompressedPub, Prv, Ky}; - {?EDDSA_SIGN_ALG, ed25519} -> - {{_, Prv, Pb}, _} = new(KeyType), - PrivKey = {KeyType, Prv, Pb}, - Ky = to_json(PrivKey), - {Pb, Prv, Ky} - end, - Filename = wallet_filepath(WalletName, Pub, KeyType), - filelib:ensure_dir(Filename), - file:write_file(Filename, Key), - {{KeyType, Priv, Pub}, {KeyType, Pub}}. -``` - -### wallet_filepath - -```erlang -wallet_filepath(Wallet) -> - filename:join([?WALLET_DIR, binary_to_list(Wallet)]). -``` - -### wallet_filepath2 - -```erlang -wallet_filepath2(Wallet) -> - filename:join([?WALLET_DIR, binary_to_list(Wallet)]). -``` - -### load_key - -Read the keyfile for the key with the given address from disk. - -```erlang -load_key(Addr) -> - load_key(Addr, #{}). -``` - -### load_key - -Read the keyfile for the key with the given address from disk. - -```erlang -load_key(Addr, Opts) -> - Path = hb_util:encode(Addr), - case filelib:is_file(Path) of - false -> - Path2 = wallet_filepath2(hb_util:encode(Addr)), - case filelib:is_file(Path2) of - false -> - not_found; - true -> - load_keyfile(Path2, Opts) - end; - true -> - load_keyfile(Path, Opts) - end. -``` - -### load_keyfile - -Extract the public and private key from a keyfile. - -```erlang -load_keyfile(File) -> - load_keyfile(File, #{}). -``` - -### load_keyfile - -Extract the public and private key from a keyfile. - -```erlang -load_keyfile(File, Opts) -> - {ok, Body} = file:read_file(File), - from_json(Body, Opts). -``` - -### to_json - -Convert a wallet private key to JSON (JWK) format - -```erlang -to_json({PrivKey, _PubKey}) -> - to_json(PrivKey); -``` - -### to_json - -Convert a wallet private key to JSON (JWK) format - -```erlang -to_json({{?RSA_SIGN_ALG, PublicExpnt}, Priv, Pub}) when PublicExpnt =:= 65537 -> - hb_json:encode(#{ - kty => <<"RSA">>, - ext => true, - e => hb_util:encode(<>), - n => hb_util:encode(Pub), - d => hb_util:encode(Priv) - }); -``` - -### to_json - -Convert a wallet private key to JSON (JWK) format - -```erlang -to_json({{?ECDSA_SIGN_ALG, secp256k1}, Priv, CompressedPub}) -> - % For ECDSA, we need to expand the compressed pubkey to get X,Y coordinates - % This is a simplified version - ideally we'd implement pubkey expansion - hb_json:encode(#{ - kty => <<"EC">>, - crv => <<"secp256k1">>, - d => hb_util:encode(Priv) - % TODO: Add x and y coordinates from expanded pubkey - }); -``` - -### to_json - -Convert a wallet private key to JSON (JWK) format - -```erlang -to_json({{?EDDSA_SIGN_ALG, ed25519}, Priv, Pub}) -> - hb_json:encode(#{ - kty => <<"OKP">>, - alg => <<"EdDSA">>, - crv => <<"Ed25519">>, - x => hb_util:encode(Pub), - d => hb_util:encode(Priv) - }). -``` - -### from_json - -Parse a wallet from JSON (JWK) format - -```erlang -from_json(JsonBinary) -> - from_json(JsonBinary, #{}). -``` - -### from_json - -Parse a wallet from JSON (JWK) format with options - -```erlang -from_json(JsonBinary, Opts) -> - Key = hb_json:decode(JsonBinary), - {Pub, Priv, KeyType} = - case hb_maps:get(<<"kty">>, Key, undefined, Opts) of - <<"EC">> -> - XEncoded = hb_maps:get(<<"x">>, Key, undefined, Opts), - YEncoded = hb_maps:get(<<"y">>, Key, undefined, Opts), - PrivEncoded = hb_maps:get(<<"d">>, Key, undefined, Opts), - OrigPub = iolist_to_binary([<<4:8>>, hb_util:decode(XEncoded), - hb_util:decode(YEncoded)]), - Pb = compress_ecdsa_pubkey(OrigPub), - Prv = hb_util:decode(PrivEncoded), - KyType = {?ECDSA_SIGN_ALG, secp256k1}, - {Pb, Prv, KyType}; - <<"OKP">> -> - PubEncoded = hb_maps:get(<<"x">>, Key, undefined, Opts), - PrivEncoded = hb_maps:get(<<"d">>, Key, undefined, Opts), - Pb = hb_util:decode(PubEncoded), - Prv = hb_util:decode(PrivEncoded), - KyType = {?EDDSA_SIGN_ALG, ed25519}, - {Pb, Prv, KyType}; - _ -> - PubEncoded = hb_maps:get(<<"n">>, Key, undefined, Opts), - PrivEncoded = hb_maps:get(<<"d">>, Key, undefined, Opts), - Pb = hb_util:decode(PubEncoded), - Prv = hb_util:decode(PrivEncoded), - KyType = {?RSA_SIGN_ALG, 65537}, - {Pb, Prv, KyType} - end, - {{KeyType, Priv, Pub}, {KeyType, Pub}}. -``` - -### to_rsa_address - -```erlang -to_rsa_address(PubKey) -> - hash_address(PubKey). -``` - -### hash_address - -```erlang -hash_address(PubKey) -> - crypto:hash(sha256, PubKey). -``` - -### to_ecdsa_address - -```erlang -to_ecdsa_address(PubKey) -> - hb_keccak:key_to_ethereum_address(PubKey). -``` - -### wallet_filepath - -```erlang -wallet_filepath(WalletName, PubKey, KeyType) -> - wallet_filepath(wallet_name(WalletName, PubKey, KeyType)). -``` - -### wallet_name - -```erlang -wallet_name(wallet_address, PubKey, KeyType) -> - hb_util:encode(to_address(PubKey, KeyType)); -``` - -### wallet_name - -```erlang -wallet_name(WalletName, _, _) -> - WalletName. -``` - -### compress_ecdsa_pubkey - -```erlang -compress_ecdsa_pubkey(<<4:8, PubPoint/binary>>) -> - PubPointMid = byte_size(PubPoint) div 2, - <> = PubPoint, - PubKeyHeader = - case Y rem 2 of - 0 -> <<2:8>>; - 1 -> <<3:8>> - end, -``` - ---- - -*Generated from [ar_wallet.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/ar_wallet.erl)* diff --git a/docs/book/src/dev_apply.erl.md b/docs/book/src/dev_apply.erl.md deleted file mode 100644 index 7b838406c..000000000 --- a/docs/book/src/dev_apply.erl.md +++ /dev/null @@ -1,350 +0,0 @@ -# dev_apply - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_apply.erl) - -A device that executes AO resolutions. It can be passed a key that -refers to a path stored in the base message to execute upon the base or -message referenced by the `source` key. -Alternatively, a `base` and `request` pair can be passed to execute -together via invoking the `pair` key. -When given a message with a `base` and `request` key, the default handler -will invoke `pair` upon it, setting the `path` in the resulting request to -the key that `apply` was invoked with. -Paths found in keys interpreted by this device can contain a `base:` or -`request:` prefix to indicate the message from which the path should be -retrieved. If no such prefix is present, the `Request` message is checked -first, and the `Base` message is checked second. - ---- - -## Exported Functions - -- `default/4` -- `info/1` -- `pair/3` - ---- - -### info - -A device that executes AO resolutions. It can be passed a key that -The device info. Forwards all keys aside `pair`, `keys` and `set` are - -```erlang -info(_) -> - #{ - excludes => [<<"keys">>, <<"set">>, <<"set_path">>, <<"remove">>], - default => fun default/4 - }. -``` - -### default - -The default handler. If the `base` and `request` keys are present in - -```erlang -default(Key, Base, Request, Opts) -> - ?event(debug_apply, {req, {key, Key}, {base, Base}, {request, Request}}), - FoundBase = hb_maps:get(<<"base">>, Request, not_found, Opts), - FoundRequest = hb_maps:get(<<"request">>, Request, not_found, Opts), - case {FoundBase, FoundRequest} of - {B, R} when B =/= not_found andalso R =/= not_found -> - pair(Key, Base, Request, Opts); - _ -> - eval(Base, Request#{ <<"apply-path">> => Key }, Opts) - end. -``` - -### eval - -Apply a request. We source the `base` message for the request either - -```erlang -eval(Base, Request, Opts) -> - maybe - ?event({eval, {base, Base}, {request, Request}}), - {ok, ApplyBase} ?= - case find_path(<<"source">>, Base, Request, Opts) of - {ok, SourcePath} -> - find_key(SourcePath, Base, Request, Opts); - {error, path_not_found, _} -> - % If the base is not found, we return the base for this - % request, minus the device (which will, inherently, be - % `apply@1.0' and cause recursion). -``` - -### pair - -Apply the message found at `request` to the message found at `base`. - -```erlang -pair(Base, Request, Opts) -> - pair(<<"undefined">>, Base, Request, Opts). -``` - -### pair - -Apply the message found at `request` to the message found at `base`. - -```erlang -pair(PathToSet, Base, Request, Opts) -> - maybe - {ok, RequestPath} ?= find_path(<<"request">>, Base, Request, Opts), - {ok, BasePath} ?= find_path(<<"base">>, Base, Request, Opts), - ?event({eval_pair, {base_source, BasePath}, {request_source, RequestPath}}), - {ok, RequestSource} ?= find_key(RequestPath, Base, Request, Opts), - {ok, BaseSource} ?= find_key(BasePath, Base, Request, Opts), - PreparedRequest = - case PathToSet of - <<"undefined">> -> RequestSource; - _ -> RequestSource#{ <<"path">> => PathToSet } - end, - ?event({eval_pair, {base, BaseSource}, {request, PreparedRequest}}), - hb_ao:resolve(BaseSource, PreparedRequest, Opts) - else - Error -> error_to_message(Error) - end. -``` - -### find_path - -Resolve the given path on the message as `message@1.0`. - -```erlang -find_path(Path, Base, Request, Opts) -> - Res = - hb_ao:get_first( - [ - {{as, <<"message@1.0">>, Request}, Path}, - {{as, <<"message@1.0">>, Base}, Path} - ], - path_not_found, - Opts - ), - case Res of - path_not_found -> {error, path_not_found, Path}; - Value -> {ok, Value} - end. -``` - -### find_key - -Find the value of the source key, supporting `base:` and `request:` - -```erlang -find_key(Path, Base, Request, Opts) -> - BaseAs = {as, <<"message@1.0">>, Base}, - RequestAs = {as, <<"message@1.0">>, Request}, - MaybeResolve = - case hb_path:term_to_path_parts(Path) of - [BinKey|RestKeys] -> - case binary:split(BinKey, <<":">>) of - [<<"base">>, <<"">>] -> - {message, Base}; - [<<"request">>, <<"">>] -> - {message, Request}; - [<<"base">>, Key] -> - {resolve, [{BaseAs, normalize_path([Key|RestKeys])}]}; - [Req, Key] when Req == <<"request">> orelse Req == <<"req">> -> - {resolve, [{RequestAs, normalize_path([Key|RestKeys])}]}; - [_] -> - {resolve, [ - {RequestAs, normalize_path(Path)}, - {BaseAs, normalize_path(Path)} - ]} - end; - _ -> {error, invalid_path, Path} - end, - case MaybeResolve of - Err = {error, _, _} -> Err; - {message, Message} -> {ok, Message}; - {resolve, Sources} -> - ?event( - {resolving_from_sources, - {path, Path}, - {sources, Sources} - } - ), - case hb_ao:get_first(Sources, source_not_found, Opts) of - source_not_found -> {error, source_not_found, Path}; - Source -> {ok, Source} - end - end. -``` - -### normalize_path - -Normalize the path. - -```erlang -normalize_path(Path) -> - case hb_path:to_binary(Path) of - <<"">> -> <<"/">>; - P -> P - end. -``` - -### error_to_message - -Convert an error to a message. - -```erlang -error_to_message({error, invalid_path, ErrPath}) -> - {error, #{ - <<"body">> => - <<"Path `", (normalize_path(ErrPath))/binary, "` is invalid.">> - }}; -``` - -### error_to_message - -Convert an error to a message. - -```erlang -error_to_message({error, source_not_found, ErrPath}) -> - {error, #{ - <<"body">> => - << - "Source path `", - (normalize_path(ErrPath))/binary, - "` to apply not found." - >> - }}; -``` - -### error_to_message - -Convert an error to a message. - -```erlang -error_to_message({error, path_not_found, ErrPath}) -> - {error, #{ - <<"body">> => - << - "Path `", - (normalize_path(ErrPath))/binary, - "` to apply not found." - >> - }}; -``` - -### error_to_message - -Convert an error to a message. - -```erlang -error_to_message(Error) -> - Error. -``` - -### resolve_key_test - -```erlang -resolve_key_test() -> - hb:init(), - Base = #{ - <<"device">> => <<"apply@1.0">>, - <<"body">> => <<"/~meta@1.0/build/node">>, - <<"irrelevant">> => <<"irrelevant">> - }, - Request = #{ - <<"irrelevant2">> => <<"irrelevant2">>, - <<"path">> => <<"body">> - }, - ?assertEqual({ok, <<"HyperBEAM">>}, hb_ao:resolve(Base, Request, #{})). -``` - -### resolve_pair_test - -```erlang -resolve_pair_test() -> - Base = #{ - <<"device">> => <<"apply@1.0">>, - <<"data-container">> => #{ <<"relevant">> => <<"DATA">> }, - <<"base">> => <<"data-container">>, - <<"irrelevant">> => <<"irrelevant">> - }, - Request = #{ - <<"irrelevant2">> => <<"irrelevant2">>, - <<"data-path">> => <<"relevant">>, - <<"request">> => <<"data-path">>, - <<"path">> => <<"pair">> - }, - ?assertEqual({ok, <<"DATA">>}, hb_ao:resolve(Base, Request, #{})). -``` - -### reverse_resolve_pair_test - -```erlang -reverse_resolve_pair_test() -> - ?assertEqual( - {ok, <<"TEST">>}, - hb_ao:resolve( - << - "/~meta@1.0/build", - "/node~apply@1.0&node=TEST&base=request:&request=base:" - >>, - #{} - ) - ). -``` - -### resolve_with_prefix_test - -```erlang -resolve_with_prefix_test() -> - ShortTraceLen = hb_opts:get(short_trace_len), - Node = hb_http_server:start_node(), - ?assertEqual( - {ok, ShortTraceLen}, - hb_http:request( - <<"GET">>, - Node, - <<"/~meta@1.0/info/request:debug-info~apply@1.0">>, - #{ - <<"debug-info">> => <<"short_trace_len">> - }, - #{} - ) - ). -``` - -### apply_over_http_test - -```erlang -apply_over_http_test() -> - Node = hb_http_server:start_node(), - Signed = - hb_message:commit( - #{ - <<"device">> => <<"apply@1.0">>, - <<"user-path">> => <<"/user-request/test-key">>, - <<"user-request">> => - #{ - <<"test-key">> => <<"DATA">> - } - }, - #{ priv_wallet => hb:wallet() } - ), - ?assertEqual( - {ok, <<"DATA">>}, - hb_ao:resolve( - Signed#{ <<"path">> => <<"/user-path">> }, - #{ priv_wallet => hb:wallet() } - ) - ), - ?assertEqual( - {ok, <<"DATA">>}, - hb_http:request( - <<"GET">>, - Node, - <<"/user-path">>, - Signed, - #{ priv_wallet => hb:wallet() } - ) - ). -``` - ---- - -*Generated from [dev_apply.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_apply.erl)* diff --git a/docs/book/src/dev_arweave.erl.md b/docs/book/src/dev_arweave.erl.md deleted file mode 100644 index 9177e68dc..000000000 --- a/docs/book/src/dev_arweave.erl.md +++ /dev/null @@ -1,355 +0,0 @@ -# dev_arweave - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_arweave.erl) - -A device that provides access to Arweave network information, relayed -from a designated node. -The node(s) that are used to query data may be configured by altering the -`/arweave` route in the node`s configuration message. - ---- - -## Exported Functions - -- `block/3` -- `current/3` -- `status/3` -- `tx/3` - ---- - -### status - -A device that provides access to Arweave network information, relayed -Proxy the `/info` endpoint from the Arweave node. -Returns the given transaction, if known to the client node(s), as an - -```erlang -status(_Base, _Request, Opts) -> - request(<<"GET">>, <<"/info">>, Opts). -``` - -### tx - -A device that provides access to Arweave network information, relayed -Proxy the `/info` endpoint from the Arweave node. -Returns the given transaction, if known to the client node(s), as an - -```erlang -tx(Base, Request, Opts) -> - case hb_maps:get(<<"method">>, Request, <<"GET">>, Opts) of - <<"POST">> -> post_tx(Base, Request, Opts); - <<"GET">> -> get_tx(Base, Request, Opts) - end. -``` - -### post_tx - -Upload a transaction to Arweave, using the node's default bundler (see - -```erlang -post_tx(_Base, Request, Opts) -> - case hb_client:upload(Request, Opts) of - Res = {ok, _} -> - ?event(arweave, {uploaded, Request}), - CacheRes = hb_cache:write(Request, Opts), - ?event(arweave, - {cache_uploaded_message, - {msg, Request}, - {status, - case CacheRes of {ok, _} -> ok; - _ -> failed - end - } - } - ), - Res; - Res -> - Res - end. -``` - -### get_tx - -Get a transaction ID from the Arweave node, as indicated by the `tx` key - -```erlang -get_tx(Base, Request, Opts) -> - case find_txid(Base, Request, Opts) of - not_found -> {error, not_found}; - TXID -> - case request(<<"GET">>, <<"/tx/", TXID/binary>>, Opts) of - {ok, TXHeader} -> - ?event(arweave, {retrieved_tx_header, {tx, TXID}}), - maybe_add_data(TXID, TXHeader, Base, Request, Opts); - Other -> Other - end - end. -``` - -### maybe_add_data - -Handle the optional adding of data to the transaction header, depending - -```erlang -maybe_add_data(TXID, Header, Base, Request, Opts) -> - GetData = - hb_util:atom(hb_ao:get_first( - [ - {Request, <<"data">>}, - {Base, <<"data">>} - ], - true, - Opts - )), - case hb_util:atom(GetData) of - false -> - {ok, Header}; - _ -> - case data(Base, Request, Opts) of - {ok, Data} -> - FullMessage = Header#{ <<"data">> => Data }, - ?event( - arweave, - {retrieved_tx_with_data, - {id, TXID}, - {data_size, byte_size(Data)}, - {message, FullMessage} - } - ), - {ok, FullMessage}; - {error, Reason} -> - ?event(arweave, - {data_retrieval_failed_after_header, - {id, TXID}, - {error, Reason} - } - ), - if GetData =/= always -> {ok, Header}; - true -> {error, Reason} - end - end - end. -``` - -### data - -Retrieve the data of a transaction from Arweave. - -```erlang -data(Base, Request, Opts) -> - case find_txid(Base, Request, Opts) of - not_found -> {error, not_found}; - TXID -> - ?event(arweave, {retrieving_tx_data, {tx, TXID}}), - request(<<"GET">>, <<"/raw/", TXID/binary>>, Opts) - end. -``` - -### block - -Retrieve (and cache) block information from Arweave. If the `block` key - -```erlang -block(Base, Request, Opts) -> - Block = - hb_ao:get_first( - [ - {Request, <<"block">>}, - {Base, <<"block">>} - ], - not_found, - Opts - ), - case Block of - <<"current">> -> current(Base, Request, Opts); - not_found -> current(Base, Request, Opts); - ID when ?IS_ID(ID) -> block({id, ID}, Opts); - MaybeHeight -> - try hb_util:int(MaybeHeight) of - Int -> block({height, Int}, Opts) - catch - _:_ -> - { - error, - <<"Invalid block reference `", MaybeHeight/binary, "`">> - } - end - end. -``` - -### block - -```erlang -block({id, ID}, Opts) -> - case hb_cache:read(ID, Opts) of - {ok, Block} -> - ?event(arweave, {retrieved_block_from_cache, {id, ID}}), - {ok, Block}; - not_found -> - request(<<"GET">>, <<"/block/hash/", ID/binary>>, Opts) - end; -``` - -### block - -```erlang -block({height, Height}, Opts) -> - case dev_arweave_block_cache:read(Height, Opts) of - {ok, Block} -> - ?event(arweave, {retrieved_block_from_cache, {height, Height}}), - {ok, Block}; - not_found -> - request( - <<"GET">>, - <<"/block/height/", (hb_util:bin(Height))/binary>>, - Opts - ) - end. -``` - -### current - -Retrieve the current block information from Arweave. -Find the transaction ID to retrieve from Arweave based on the request or - -```erlang -current(_Base, _Request, Opts) -> - request(<<"GET">>, <<"/block/current">>, Opts). -%%% Internal Functions -``` - -### find_txid - -Retrieve the current block information from Arweave. -Find the transaction ID to retrieve from Arweave based on the request or - -```erlang -find_txid(Base, Request, Opts) -> - hb_ao:get_first( - [ - {Request, <<"tx">>}, - {Base, <<"tx">>} - ], - not_found, - Opts - ). -``` - -### request - -Make a request to the Arweave node and parse the response into an - -```erlang -request(Method, Path, Opts) -> - ?event(arweave, {arweave_request, {method, Method}, {path, Path}}), - Res = - hb_http:request( - #{ - <<"path">> => <<"/arweave", Path/binary>>, - <<"method">> => Method - }, - Opts - ), - to_message(Path, Res, Opts). -``` - -### to_message - -Transform a response from the Arweave node into an AO-Core message. - -```erlang -to_message(Path = <<"/raw/", _/binary>>, {ok, #{ <<"body">> := Body }}, _Opts) -> - ?event(arweave, - {arweave_raw_response, - {path, Path}, - {data_size, byte_size(Body)} - } - ), - {ok, Body}; -``` - -### to_message - -Transform a response from the Arweave node into an AO-Core message. - -```erlang -to_message(Path = <<"/block/", _/binary>>, {ok, #{ <<"body">> := Body }}, Opts) -> - Block = hb_message:convert(Body, <<"structured@1.0">>, <<"json@1.0">>, Opts), - ?event(arweave, - {arweave_block_response, - {path, Path}, - {block, Block} - } - ), - CacheRes = dev_arweave_block_cache:write(Block, Opts), - ?event(arweave, - {cached_arweave_block, - {path, Path}, - {result, CacheRes} - } - ), - {ok, Block}; -``` - -### to_message - -Transform a response from the Arweave node into an AO-Core message. - -```erlang -to_message(Path, {ok, #{ <<"body">> := Body }}, Opts) -> - % All other responses that are `OK' status are converted from JSON to an - % AO-Core message. -``` - -### post_ans104_tx_test - -```erlang -post_ans104_tx_test() -> - ServerOpts = #{ store => [hb_test_utils:test_store()] }, - Server = hb_http_server:start_node(ServerOpts), - ClientOpts = - #{ - store => [hb_test_utils:test_store()], - priv_wallet => hb:wallet() - }, - Msg = - hb_message:commit( - #{ - <<"variant">> => <<"ao.N.1">>, - <<"type">> => <<"Process">>, - <<"data">> => <<"test-data">> - }, - ClientOpts, - #{ <<"commitment-device">> => <<"ans104@1.0">> } - ), - {ok, PostRes} = - hb_http:post( - Server, - Msg#{ - <<"path">> => <<"/~arweave@2.9-pre/tx">>, - <<"codec-device">> => <<"ans104@1.0">> - }, - ClientOpts - ), - ?assertMatch(#{ <<"status">> := 200 }, PostRes), - SignedID = hb_message:id(Msg, signed, ClientOpts), - {ok, GetRes} = - hb_http:get( - Server, <<"/", SignedID/binary>>, - ClientOpts - ), - ?assertMatch( - #{ - <<"status">> := 200, - <<"variant">> := <<"ao.N.1">>, - <<"type">> := <<"Process">>, - <<"data">> := <<"test-data">> - }, - GetRes - ), -``` - ---- - -*Generated from [dev_arweave.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_arweave.erl)* diff --git a/docs/book/src/dev_arweave_block_cache.erl.md b/docs/book/src/dev_arweave_block_cache.erl.md deleted file mode 100644 index 4977f2067..000000000 --- a/docs/book/src/dev_arweave_block_cache.erl.md +++ /dev/null @@ -1,99 +0,0 @@ -# dev_arweave_block_cache - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_arweave_block_cache.erl) - -A module that performs caching operations for the Arweave device, -focused on ensuring that block metadata is queriable via pseudo-paths. - ---- - -## Exported Functions - -- `heights/1` -- `latest/1` -- `path/2` -- `read/2` -- `write/2` - ---- - -### latest - -A module that performs caching operations for the Arweave device, -The pseudo-path prefix which the Arweave block cache should use. -Get the latest block from the cache. - -```erlang -latest(Opts) -> - case heights(Opts) of - {ok, []} -> - ?event(arweave_cache, no_blocks_in_cache), - not_found; - {ok, Blocks} -> - Latest = lists:max(Blocks), - ?event(arweave_cache, {latest_block_from_cache, {latest, Latest}}), - {ok, Latest} - end. -``` - -### heights - -Get the list of blocks from the cache. - -```erlang -heights(Opts) -> - AllBlocks = - hb_cache:list_numbered( - hb_store:path(hb_opts:get(store, no_viable_store, Opts), [ - ?ARWEAVE_BLOCK_CACHE_PREFIX, - <<"block">>, - <<"height">> - ]), - Opts - ), - ?event(arweave_cache, {listed_blocks, length(AllBlocks)}), - {ok, AllBlocks}. -``` - -### read - -Read a block from the cache. - -```erlang -read(Block, Opts) -> - Res = hb_cache:read(path(Block, Opts), Opts), - ?event(arweave_cache, {read_block, {reference, Block}, {result, Res}}), - Res. -``` - -### path - -Return the path of a block that will be used in the cache. - -```erlang -path(Block, Opts) when is_integer(Block) -> - hb_store:path(hb_opts:get(store, no_viable_store, Opts), [ - ?ARWEAVE_BLOCK_CACHE_PREFIX, - <<"block">>, - <<"height">>, - hb_util:bin(Block) - ]). -``` - -### write - -Write a block to the cache and create pseudo-paths for it. - -```erlang -write(Block, Opts) -> - {ok, Height} = hb_maps:find(<<"height">>, Block, Opts), - {ok, BlockID} = hb_maps:find(<<"indep_hash">>, Block, Opts), - {ok, BlockHash} = hb_maps:find(<<"hash">>, Block, Opts), - {ok, MsgID} = hb_cache:write(Block, Opts), - % Link the independent hash and the dependent hash to the written AO-Core - % message ID. -``` - ---- - -*Generated from [dev_arweave_block_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_arweave_block_cache.erl)* diff --git a/docs/book/src/dev_auth_hook.erl.md b/docs/book/src/dev_auth_hook.erl.md deleted file mode 100644 index b83cc5828..000000000 --- a/docs/book/src/dev_auth_hook.erl.md +++ /dev/null @@ -1,431 +0,0 @@ -# dev_auth_hook - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_auth_hook.erl) - -A device offering an on-request hook that signs incoming messages with -node-hosted wallets, in accordance with the node operator's configuration. -It is intended for deployment in environments where a node's users have -intrinsic reasons for trusting the node outside of the scope of this device. -For example, if executed on a node running in a Trusted Execution Environment -with `~snp@1.0`, or a node they operate or is operated by a trusted -third-party. -This device utilizes the `generator` interface type which other devices may -implement. The generator is used to find/create a secret based on a user's -request, which is then passed to the `~proxy-wallet@1.0` device and matched -with a wallet which is used to sign the request. The `generator` interface -may implement the following keys: -
-    `generate` (optional): A key that generates a secret based on a
-                           user's request. May return either the secret
-                           directly, or a message with a `secret` key. If 
-                           a message is returned, it is assumed to be a
-                           modified version of the user's request and is
-                           used for further processing.
-    `finalize` (optional): A key that takes the message sequence after this
-                           device has processed it and returns it in a
-                           modified form.
-
-At present, the `~cookie-secret@1.0` and `~http-auth@1.0` devices implement -the `generator` interface. For example, the following hook definition will -use the `~cookie-secret@1.0` device to generate and manage wallets for -users, with authentication details stored in cookies: -
-  "on": {
-    "request": {
-      "device": "auth-hook@1.0",
-      "secret-provider": {
-        "device": "cookie-secret@1.0"
-      }
-    }
-  }
-
-`~auth-hook@1.0` expects to receive a `secret-provider` key in the hook -base message. It may optionally also take a `generate-path` and -`finalize-path`, which are used to generate the secret and post-process the -response. If either `X-path` keys are not present, the `generate` and -`finalize` paths are used upon the `secret-provider` message. If the secret -provider's device does not implement these keys, the operations are skipped. -Node operators may also specify a `when` message inside their hook definition -which is used to determine when messages should be signed. The supported keys -are: -
-    `committers`: always | uncommitted | [committer1, or committer2, or ...]
-    `keys`: always | [key1, or key2, or ...]
-
-Both keys are optional and can be combined to form 'and' conditions. For -example, the following hook definition will sign all uncommitted requests -that have the `Authorization` header: -
-  "on": {
-    "request": {
-      "device": "auth-hook@1.0",
-      "when": {
-            "keys": ["authorization"],
-            "committers": "uncommitted"
-        }
-      }
-    }
-
- ---- - -## Exported Functions - -- `request/3` - ---- - -### request - -A device offering an on-request hook that signs incoming messages with -Process an incoming request through a key provider. The key provider - -```erlang -request(Base, HookReq, Opts) -> - ?event({auth_hook_request, {base, Base}, {hook_req, HookReq}}), - maybe - % Get the key provider from options and short-circuit if none is - % provided. -``` - -### is_relevant - -Check if the request is relevant to the hook base. Node operators may - -```erlang -is_relevant(Base, Request, MessageSequence, Opts) -> - Committers = is_relevant_from_committers(Base, Request, Opts), - Keys = - lists:any( - fun(Msg) -> is_relevant_from_keys(Base, Msg, Opts) end, - [Request | MessageSequence] - ), - ?event({auth_hook_is_relevant, {committers, Committers}, {keys, Keys}}), - if Committers andalso Keys -> true; - true -> {skip, {committers, Committers}, {keys, Keys}} - end. -``` - -### is_relevant_from_committers - -Check if the request is relevant to the hook base based on the committers - -```erlang -is_relevant_from_committers(Base, Request, Opts) -> - Config = - hb_util:deep_get( - [<<"when">>, <<"committers">>], - Base, - <<"uncommitted">>, - Opts - ), - ?event({auth_hook_is_relevant_from_committers, {config, Config}, {base, Base}}), - case Config of - <<"always">> -> true; - <<"uncommitted">> -> hb_message:signers(Request, Opts) == []; - RelevantCommitters -> - lists:any( - fun(Signer) -> - lists:member(Signer, RelevantCommitters) - end, - hb_message:signers(Request, Opts) - ) - end. -``` - -### is_relevant_from_keys - -Check if the request is relevant to the hook base based on the presence - -```erlang -is_relevant_from_keys(_Base, ID, _Opts) when is_binary(ID) -> - false; -``` - -### is_relevant_from_keys - -Check if the request is relevant to the hook base based on the presence - -```erlang -is_relevant_from_keys(Base, {as, _, Msg}, Opts) -> - is_relevant_from_keys(Base, Msg, Opts); -``` - -### is_relevant_from_keys - -Check if the request is relevant to the hook base based on the presence - -```erlang -is_relevant_from_keys(Base, {resolve, Msg}, Opts) -> - is_relevant_from_keys(Base, Msg, Opts); -``` - -### is_relevant_from_keys - -Check if the request is relevant to the hook base based on the presence - -```erlang -is_relevant_from_keys(Base, Request, Opts) -> - Config = hb_util:deep_get([<<"when">>, <<"keys">>], Base, <<"always">>, Opts), - ?event( - { - auth_hook_is_relevant_from_keys, - {config, Config}, - {base, Base}, - {request, Request} - } - ), - case Config of - <<"always">> -> true; - RelevantKeys -> - lists:any( - fun(Key) -> - case hb_maps:find(Key, Request, Opts) of - {ok, _} -> true; - error -> false - end - end, - RelevantKeys - ) - end. -``` - -### generate_secret - -Normalize authentication credentials, generating new ones if needed. - -```erlang -generate_secret(Provider, Request, Opts) -> - case call_provider(<<"generate">>, Provider, Request, Opts) of - {error, not_found} -> - ?event({no_generate_handler, Provider}), - {ok, Provider, strip_sensitive(Request, Opts)}; - {error, Err} -> - % Forward the error. The main handler will fail to match this and - % return the error to the user. -``` - -### strip_sensitive - -Strip the `secret` field from a request. -Generate a wallet with the key if the `wallet` field is not present in - -```erlang -strip_sensitive(Request, Opts) -> - hb_maps:without([<<"secret">>], Request, Opts). -``` - -### generate_wallet - -Strip the `secret` field from a request. -Generate a wallet with the key if the `wallet` field is not present in - -```erlang -generate_wallet(Provider, Request, Opts) -> - {ok, #{ <<"body">> := WalletID }} = - dev_secret:generate(Provider, Request, Opts), - ?event({generated_wallet, WalletID}), - {ok, Provider, refresh_opts(Opts)}. -``` - -### sign_request - -Sign a request using the configured key provider - -```erlang -sign_request(Provider, Msg, Opts) -> - case hb_maps:get(<<"skip-commit">>, Provider, true, Opts) of - false -> - % Skip signing and return the normalized message. -``` - -### maybe_sign_messages - -Process a sequence of messages, signing those marked for signing - -```erlang -maybe_sign_messages(Provider, SignedReq, Opts) -> - Parsed = hb_singleton:from(SignedReq, Opts), - ?event({auth_hook_parsed_messages, {sequence_length, length(Parsed)}}), - SignKey = hb_opts:get(auth_hook_commit_key, ?DEFAULT_COMMIT_KEY, Opts), - Processed = maybe_sign_messages(Provider, SignKey, Parsed, Opts), - {ok, Processed}. -``` - -### maybe_sign_messages - -```erlang -maybe_sign_messages(_Provider, _Key, [], _Opts) -> []; -``` - -### maybe_sign_messages - -```erlang -maybe_sign_messages(Provider, Key, [Msg | Rest], Opts) when is_map(Msg) -> - case hb_util:atom(hb_maps:get(Key, Msg, false, Opts)) of - true -> - Uncommitted = hb_message:uncommitted(Msg, Opts), - ?event({auth_hook_signing_message, {uncommitted, Msg}}), - case sign_request(Provider, Uncommitted, Opts) of - {ok, Signed} -> - [ - Signed - | - maybe_sign_messages(Provider, Key, Rest, Opts) - ]; - {error, Err} -> - ?event({auth_hook_sign_error, Err}), - [{error, Err}] - end; - _ -> - [Msg | maybe_sign_messages(Provider, Key, Rest, Opts)] - end; -``` - -### maybe_sign_messages - -```erlang -maybe_sign_messages(Provider, Key, [Msg | Rest], Opts) -> - [Msg | maybe_sign_messages(Provider, Key, Rest, Opts)]. -``` - -### finalize - -Finalize the response by adding authentication state - -```erlang -finalize(KeyProvider, SignedReq, MessageSequence, Opts) -> - % Add the signed request and message sequence to the response, mirroring the - % structure of a normal `~hook@1.0' on-request hook. -``` - -### refresh_opts - -Refresh the options and log an event if they have changed. - -```erlang -refresh_opts(Opts) -> - NewOpts = hb_http_server:get_opts(Opts), - case NewOpts of - Opts -> ?event(auth_hook_no_opts_change); - _ -> - ?event( - {auth_hook_opts_changed, - {size_diff, - erlang:external_size(NewOpts) - - erlang:external_size(Opts) - } - } - ) - end, - NewOpts. -``` - -### find_provider - -Get the key provider from the base message or the defaults. - -```erlang -find_provider(Base, Opts) -> - case hb_maps:get(<<"secret-provider">>, Base, no_key_provider, Opts) of - no_key_provider -> - case hb_opts:get(hook_secret_provider, no_key_provider, Opts) of - no_key_provider -> {error, no_key_provider}; - SecretProvider -> SecretProvider - end; - SecretProvider when is_binary(SecretProvider) -> - {ok, #{ <<"device">> => SecretProvider }}; - SecretProvider when is_map(SecretProvider) -> - {ok, SecretProvider}; - _ -> - {error, invalid_auth_provider} - end. -``` - -### call_provider - -Find the appropriate handler for a key in the key provider. - -```erlang -call_provider(Key, Provider, Request, Opts) -> - ?event({call_provider, {key, Key}, {provider, Provider}, {req, Request}}), - ExecKey = hb_maps:get(<< Key/binary, "-path">>, Provider, Key, Opts), - ?event({call_provider, {exec_key, ExecKey}}), - case hb_ao:resolve(Provider, Request#{ <<"path">> => ExecKey }, Opts) of - {ok, Msg} when is_map(Msg) -> - % The result is a message. We revert the path to its original value. -``` - -### ignored_keys - -Default keys to ignore when signing - -```erlang -ignored_keys(Msg, Opts) -> - hb_maps:get( - <<"ignored-keys">>, - Msg, - hb_opts:get( - hook_auth_ignored_keys, - ?DEFAULT_IGNORED_KEYS, - Opts - ) - ). -``` - -### cookie_test - -```erlang -cookie_test() -> - % Start a node with a secret-provider that uses the cookie device. -``` - -### http_auth_test - -```erlang -http_auth_test() -> - % Start a node with the `~http-auth@1.0' device as the secret-provider. -``` - -### chained_preprocess_test - -```erlang -chained_preprocess_test() -> - % Start a node with the `~http-auth@1.0' device as the secret-provider, with - % a router chained afterwards in the request hook. -``` - -### when_test - -```erlang -when_test() -> - % Start a node with the `~http-auth@1.0' device as the secret-provider. Only - % request commitment with the hook if the `Authorization' header is present. -``` - -### signers_from_commitments_response - -The cookie hook test(s) call `GET /commitments`, which returns the - -```erlang -signers_from_commitments_response(Response, ServerWallet) -> - ServerAddress = ar_wallet:to_address(ServerWallet), - hb_maps:values(hb_maps:filtermap( - fun(Key, Value) when ?IS_ID(Key) -> - Type = hb_maps:get(<<"type">>, Value, not_found, #{}), - Committer = hb_maps:get(<<"committer">>, Value, not_found, #{}), - case {Type, Committer} of - {<<"rsa-pss-sha512">>, ServerAddress} -> false; - {<<"rsa-pss-sha512">>, _} -> {true, Committer}; - _ -> false - end; - (_Key, _Value) -> - false - end, - Response, - #{} -``` - ---- - -*Generated from [dev_auth_hook.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_auth_hook.erl)* diff --git a/docs/book/src/dev_cache.erl.md b/docs/book/src/dev_cache.erl.md deleted file mode 100644 index a3eae0380..000000000 --- a/docs/book/src/dev_cache.erl.md +++ /dev/null @@ -1,310 +0,0 @@ -# dev_cache - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cache.erl) - -A device that looks up an ID from a local store and returns it, -honoring the `accept` key to return the correct format. The cache also -supports writing messages to the store, if the node message has the -writer's address in its `cache_writers` key. - ---- - -## Exported Functions - -- `link/3` -- `read/3` -- `write/3` - ---- - -### read - -A device that looks up an ID from a local store and returns it, -Read data from the cache. - -```erlang -read(_M1, M2, Opts) -> - Location = hb_ao:get(<<"target">>, M2, Opts), - ?event({read, {key_extracted, Location}}), - ?event(debug_gateway, cache_read), - case hb_cache:read(Location, Opts) of - {ok, Res} -> - ?event({read, {cache_result, ok, Res}}), - case hb_ao:get(<<"accept">>, M2, Opts) of - <<"application/aos-2">> -> - ?event(dev_cache, - {read, - {accept_header, <<"application/aos-2">>} - } - ), - JSONMsg = dev_json_iface:message_to_json_struct(Res, Opts), - ?event(dev_cache, {read, {json_message, JSONMsg}}), - {ok, - #{ - <<"body">> => hb_json:encode(JSONMsg), - <<"content-type">> => <<"application/aos-2">> - } - }; - _ -> - {ok, Res} - end; - not_found -> - % The cache does not have this ID,but it may still be an explicit - % `data/' path. -``` - -### write - -Write data to the cache. - -```erlang -write(_M1, M2, Opts) -> - case is_trusted_writer(M2, Opts) of - true -> - ?event(dev_cache, {write, {trusted_writer, true}}), - Type = hb_ao:get(<<"type">>, M2, <<"single">>, Opts), - ?event(dev_cache, {write, {write_type, Type}}), - case Type of - <<"single">> -> - ?event(dev_cache, {write, {write_single_called}}), - write_single(M2, Opts); - <<"batch">> -> - ?event(dev_cache, {write, {write_batch_called}}), - hb_maps:map( - fun(_, Value) -> - ?event(dev_cache, {write, {batch_item, Value}}), - write_single(Value, Opts) - end, - hb_ao:get(<<"body">>, M2, Opts), - Opts - ); - _ -> - ?event(dev_cache, {write, {invalid_write_type, Type}}), - {error, - #{ - <<"status">> => 400, - <<"body">> => <<"Invalid write type.">> - } - } - end; - false -> - ?event(dev_cache, {write, {trusted_writer, false}}), - {error, - #{ - <<"status">> => 403, - <<"body">> => <<"Not authorized to write to the cache.">> - } - } - end. -``` - -### link - -Link a source to a destination in the cache. - -```erlang -link(_Base, Req, Opts) -> - case is_trusted_writer(Req, Opts) of - true -> - Source = hb_ao:get(<<"source">>, Req, Opts), - Destination = hb_ao:get(<<"destination">>, Req, Opts), - write_single(#{ - <<"operation">> => <<"link">>, - <<"source">> => Source, - <<"destination">> => Destination - }, Opts); - false -> - {error, not_authorized} - end. -``` - -### write_single - -Helper function to write a single data item to the cache. - -```erlang -write_single(Msg, Opts) -> - Body = hb_ao:get(<<"body">>, Msg, Opts), - ?event(dev_cache, {write_single, {body_extracted, Body}}), - Location = hb_ao:get(<<"location">>, Msg, Opts), - ?event(dev_cache, {write_single, {location_extracted, Location}}), - Operation = hb_ao:get(<<"operation">>, Msg, <<"write">>, Opts), - ?event(dev_cache, {write_single, {operation, Operation}}), - case {Operation, Body, Location} of - {<<"write">>, not_found, _} -> - ?event(dev_cache, {write_single, {error, "No body to write"}}), - {error, - #{ - <<"status">> => 400, - <<"body">> => <<"No body to write.">> - } - }; - {<<"write">>, Binary, not_found} when is_binary(Binary) -> - % When asked to write only a binary, we do not calculate any - % alternative IDs. -``` - -### is_trusted_writer - -Verify that the request originates from a trusted writer. - -```erlang -is_trusted_writer(Req, Opts) -> - Signers = hb_message:signers(Req, Opts), - ?event(dev_cache, {is_trusted_writer, {signers, Signers}, {req, Req}}), - CacheWriters = hb_opts:get(cache_writers, [], Opts), - ?event(dev_cache, {is_trusted_writer, {cache_writers, CacheWriters}}), - AnyTrusted = lists:any(fun(Signer) -> lists:member(Signer, CacheWriters) end, Signers), - case AnyTrusted of - true -> - ?event(dev_cache, {is_trusted_writer, {trusted, true}}), - true; - _ -> - ?event(dev_cache, {is_trusted_writer, {trusted, false}}), - false - end. -``` - -### setup_test_env - -Create a test environment with a local store and node. - -```erlang -setup_test_env() -> - Timestamp = integer_to_binary(os:system_time(millisecond)), - StorePrefix = <<"cache-TEST/remote-", Timestamp/binary>>, - ?event(dev_cache, {setup_test_env, {start, StorePrefix}}), - application:ensure_all_started(hb), - ?event(dev_cache, {setup_test_env, {hb_started}}), - LocalStore = - #{ <<"store-module">> => hb_store_fs, <<"name">> => StorePrefix }, - ?event(dev_cache, {setup_test_env, {local_store_configured, LocalStore}}), - hb_store:reset(LocalStore), - ?event(dev_cache, {setup_test_env, {store_reset}}), - Wallet = ar_wallet:new(), - Address = hb_util:human_id(ar_wallet:to_address(Wallet)), - ?event(dev_cache, {setup_test_env, {address, Address}}), - Node = hb_http_server:start_node(#{ - cache_control => [<<"no-cache">>, <<"no-store">>], - store => LocalStore, - cache_writers => [ - Address, - hb_util:human_id(ar_wallet:to_address(hb:wallet())) - ], - store_all_signed => false - }), - ?event(dev_cache, {setup_test_env, {node_started, Node}}), - TestOpts = #{ - cache_control => [<<"no-cache">>, <<"no-store">>], - store_all_signed => false, - store => [ - #{ - <<"store-module">> => hb_store_remote_node, - <<"node">> => Node, - priv_wallet => Wallet - } - ] - }, - {ok, TestOpts, [LocalStore, Wallet, Address, Node]}. -``` - -### write_to_cache - -Write data to the cache via HTTP. - -```erlang -write_to_cache(Node, Data, Wallet) -> - ?event(dev_cache, {write_to_cache, {start, Node}}), - WriteMsg = #{ - <<"path">> => <<"/~cache@1.0/write">>, - <<"method">> => <<"POST">>, - <<"body">> => Data - }, - ?event(dev_cache, {write_to_cache, {message_created, WriteMsg}}), - SignedMsg = hb_message:commit(WriteMsg, Wallet), - ?event(dev_cache, {write_to_cache, {message_signed}}), - WriteResult = hb_http:post(Node, SignedMsg, #{}), - ?event(dev_cache, {write_to_cache, {http_post, WriteResult}}), - {ok, WriteResponse} = WriteResult, - ?event(dev_cache, {write_to_cache, {response_received, WriteResponse}}), - Status = hb_ao:get(<<"status">>, WriteResponse, 0, #{}), - ?assertEqual(200, Status), - Path = hb_ao:get(<<"path">>, WriteResponse, not_found, #{}), - ?assertNotEqual(not_found, Path), - ?event(dev_cache, {write_to_cache, {write_success, Path}}), - {WriteResponse, Path}. -``` - -### read_from_cache - -Read data from the cache via HTTP. - -```erlang -read_from_cache(Node, Path) -> - ?event(dev_cache, {read_from_cache, {start, Node, Path}}), - ReadMsg = #{ - <<"path">> => <<"/~cache@1.0/read">>, - <<"method">> => <<"GET">>, - <<"target">> => Path - }, - ?event(dev_cache, {read_from_cache, {request_created, ReadMsg}}), - ?event({test_read, request, ReadMsg}), - ReadResult = hb_http:get(Node, ReadMsg, #{}), - ?event(dev_cache, {read_from_cache, {http_get, ReadResult}}), - case ReadResult of - ReadResponse when is_binary(ReadResponse) -> - ?event(dev_cache, - {read_from_cache, - {response_binary, ReadResponse} - } - ), - ReadResponse; - {ok, ReadResponse} -> - ?event(dev_cache, {read_from_cache, {response_ok, ReadResponse}}), - ReadResponse; - {error, Reason} -> - ?event(dev_cache, {read_from_cache, {response_error, Reason}}), - {error, Reason} - end. -``` - -### cache_write_message_test - -Test that the cache can be written to and read from using the hb_cache - -```erlang -cache_write_message_test() -> - ?event(dev_cache, {cache_api_test, {start}}), - {ok, Opts, _} = setup_test_env(), - TestData = #{ - <<"test_key">> => <<"test_value">> - }, - ?event(dev_cache, {cache_api_test, {opts, Opts}}), - {ok, Path} = hb_cache:write(TestData, Opts), - ?event(dev_cache, {cache_api_test, {data_written, Path}}), - {ok, ReadData} = hb_cache:read(Path, Opts), - ?event(dev_cache, {cache_api_test, {data_read, ReadData}}), - ?assert(hb_message:match(TestData, ReadData, only_present, Opts)), - ?event(dev_cache, {cache_api_test}), - ok. -``` - -### cache_write_binary_test - -Ensure that we can write direct binaries to the cache. - -```erlang -cache_write_binary_test() -> - ?event(dev_cache, {cache_api_test, {start}}), - {ok, Opts, _} = setup_test_env(), - TestData = <<"test_binary">>, - {ok, Path} = hb_cache:write(TestData, Opts), - {ok, ReadData} = hb_cache:read(Path, Opts), - ?event(dev_cache, {cache_api_test, {data_read, ReadData}}), - ?assertEqual(TestData, ReadData), - ?event(dev_cache, {cache_api_test}), -``` - ---- - -*Generated from [dev_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cache.erl)* diff --git a/docs/book/src/dev_cacheviz.erl.md b/docs/book/src/dev_cacheviz.erl.md deleted file mode 100644 index 331f2a566..000000000 --- a/docs/book/src/dev_cacheviz.erl.md +++ /dev/null @@ -1,115 +0,0 @@ -# dev_cacheviz - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cacheviz.erl) - -A device that generates renders (or renderable dot output) of a node's -cache. - ---- - -## Exported Functions - -- `dot/3` -- `index/3` -- `js/3` -- `json/3` -- `svg/3` - ---- - -### dot - -A device that generates renders (or renderable dot output) of a node's -Output the dot representation of the cache, or a specific path within -Output the SVG representation of the cache, or a specific path within - -```erlang -dot(_, Req, Opts) -> - Target = hb_ao:get(<<"target">>, Req, all, Opts), - Dot = - hb_cache_render:cache_path_to_dot( - Target, - #{ - render_data => - hb_util:atom( - hb_ao:get(<<"render-data">>, Req, false, Opts) - ) - }, - Opts - ), - {ok, #{ <<"content-type">> => <<"text/vnd.graphviz">>, <<"body">> => Dot }}. -``` - -### svg - -A device that generates renders (or renderable dot output) of a node's -Output the dot representation of the cache, or a specific path within -Output the SVG representation of the cache, or a specific path within -Return a JSON representation of the cache graph, suitable for use with - -```erlang -svg(Base, Req, Opts) -> - {ok, #{ <<"body">> := Dot }} = dot(Base, Req, Opts), - ?event(cacheviz, {dot, Dot}), - Svg = hb_cache_render:dot_to_svg(Dot), - {ok, #{ <<"content-type">> => <<"image/svg+xml">>, <<"body">> => Svg }}. -``` - -### json - -A device that generates renders (or renderable dot output) of a node's -Output the dot representation of the cache, or a specific path within -Output the SVG representation of the cache, or a specific path within -Return a JSON representation of the cache graph, suitable for use with - -```erlang -json(Base, Req, Opts) -> - ?event({json, {base, Base}, {req, Req}}), - Target = - case hb_ao:get(<<"target">>, Req, Opts) of - not_found -> - case map_size(maps:without([<<"device">>], hb_private:reset(Base))) of - 0 -> - all; - _ -> - ?event({writing_base_for_rendering, Base}), - {ok, Path} = hb_cache:write(Base, Opts), - ?event({wrote_message, Path}), - ID = hb_message:id(Base, all, Opts), - ?event({generated_id, ID}), - ID - end; - <<".">> -> all; - ReqTarget -> ReqTarget - end, - MaxSize = hb_util:int(hb_ao:get(<<"max-size">>, Req, 250, Opts)), - ?event({max_size, MaxSize}), - ?event({generating_json_for, {target, Target}}), - Res = hb_cache_render:get_graph_data(Target, MaxSize, Opts), - ?event({graph_data, Res}), - Res. -``` - -### index - -Return a renderer in HTML form for the JSON format. -Return a JS library that can be used to render the JSON format. - -```erlang -index(Base, _, _Opts) -> - ?event({cacheviz_index, {base, Base}}), - dev_hyperbuddy:return_file(<<"cacheviz@1.0">>, <<"graph.html">>). -``` - -### js - -Return a renderer in HTML form for the JSON format. -Return a JS library that can be used to render the JSON format. - -```erlang -js(_, _, _Opts) -> -``` - ---- - -*Generated from [dev_cacheviz.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cacheviz.erl)* diff --git a/docs/book/src/dev_codec_ans104.erl.md b/docs/book/src/dev_codec_ans104.erl.md deleted file mode 100644 index 2eda359ed..000000000 --- a/docs/book/src/dev_codec_ans104.erl.md +++ /dev/null @@ -1,511 +0,0 @@ -# dev_codec_ans104 - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104.erl) - -Codec for managing transformations from `ar_bundles`-style Arweave TX -records to and from TABMs. - ---- - -## Exported Functions - -- `commit/3` -- `content_type/1` -- `deserialize/3` -- `from/3` -- `serialize/3` -- `to/3` -- `verify/3` - ---- - -### content_type - -Codec for managing transformations from `ar_bundles`-style Arweave TX -Return the content type for the codec. -Serialize a message or TX to a binary. - -```erlang -content_type(_) -> {ok, <<"application/ans104">>}. -``` - -### serialize - -Codec for managing transformations from `ar_bundles`-style Arweave TX -Return the content type for the codec. -Serialize a message or TX to a binary. - -```erlang -serialize(Msg, Req, Opts) when is_map(Msg) -> - serialize(to(Msg, Req, Opts), Req, Opts); -``` - -### serialize - -Codec for managing transformations from `ar_bundles`-style Arweave TX -Return the content type for the codec. -Serialize a message or TX to a binary. - -```erlang -serialize(TX, _Req, _Opts) when is_record(TX, tx) -> - {ok, ar_bundles:serialize(TX)}. -``` - -### deserialize - -Deserialize a binary ans104 message to a TABM. - -```erlang -deserialize(#{ <<"body">> := Binary }, Req, Opts) -> - deserialize(Binary, Req, Opts); -``` - -### deserialize - -Deserialize a binary ans104 message to a TABM. - -```erlang -deserialize(Binary, Req, Opts) when is_binary(Binary) -> - deserialize(ar_bundles:deserialize(Binary), Req, Opts); -``` - -### deserialize - -Deserialize a binary ans104 message to a TABM. - -```erlang -deserialize(TX, Req, Opts) when is_record(TX, tx) -> - from(TX, Req, Opts). -``` - -### commit - -Sign a message using the `priv_wallet` key in the options. Supports both - -```erlang -commit(Msg, Req = #{ <<"type">> := <<"unsigned">> }, Opts) -> - commit(Msg, Req#{ <<"type">> => <<"unsigned-sha256">> }, Opts); -``` - -### commit - -Sign a message using the `priv_wallet` key in the options. Supports both - -```erlang -commit(Msg, Req = #{ <<"type">> := <<"signed">> }, Opts) -> - commit(Msg, Req#{ <<"type">> => <<"rsa-pss-sha256">> }, Opts); -``` - -### commit - -Sign a message using the `priv_wallet` key in the options. Supports both - -```erlang -commit(Msg, Req = #{ <<"type">> := <<"rsa-pss-sha256">> }, Opts) -> - % Convert the given message to an ANS-104 TX record, sign it, and convert - % it back to a structured message. -``` - -### commit - -```erlang -commit(Msg, #{ <<"type">> := <<"unsigned-sha256">> }, Opts) -> - % Remove the commitments from the message, convert it to ANS-104, then back. -``` - -### verify - -Verify an ANS-104 commitment. - -```erlang -verify(Msg, Req, Opts) -> - ?event({verify, {base, Msg}, {req, Req}}), - OnlyWithCommitment = - hb_private:reset( - hb_message:with_commitments( - Req, - Msg, - Opts - ) - ), - ?event({verify, {only_with_commitment, OnlyWithCommitment}}), - {ok, TX} = to(OnlyWithCommitment, Req, Opts), - ?event({verify, {encoded, TX}}), - Res = ar_bundles:verify_item(TX), - {ok, Res}. -``` - -### from - -Convert a #tx record into a message map recursively. - -```erlang -from(Binary, _Req, _Opts) when is_binary(Binary) -> {ok, Binary}; -``` - -### from - -Convert a #tx record into a message map recursively. - -```erlang -from(TX, Req, Opts) when is_record(TX, tx) -> - case lists:keyfind(<<"ao-type">>, 1, TX#tx.tags) of - false -> - do_from(TX, Req, Opts); - {<<"ao-type">>, <<"binary">>} -> - {ok, TX#tx.data} - end. -``` - -### do_from - -```erlang -do_from(RawTX, Req, Opts) -> - % Ensure the TX is fully deserialized. -``` - -### to - -Internal helper to translate a message to its #tx record representation, - -```erlang -to(Binary, _Req, _Opts) when is_binary(Binary) -> - % ar_bundles cannot serialize just a simple binary or get an ID for it, so - % we turn it into a TX record with a special tag, tx_to_message will - % identify this tag and extract just the binary. -``` - -### to - -```erlang -to(TX, _Req, _Opts) when is_record(TX, tx) -> {ok, TX}; -``` - -### to - -```erlang -to(RawTABM, Req, Opts) when is_map(RawTABM) -> - % Ensure that the TABM is fully loaded if the `bundle` key is set to true. -``` - -### to - -```erlang -to(Other, _Req, _Opts) -> - throw({invalid_tx, Other}). -``` - -### normal_tags_test - -```erlang -normal_tags_test() -> - Msg = #{ - <<"first-tag">> => <<"first-value">>, - <<"second-tag">> => <<"second-value">> - }, - {ok, Encoded} = to(Msg, #{}, #{}), - ?event({encoded, Encoded}), - {ok, Decoded} = from(Encoded, #{}, #{}), - ?event({decoded, Decoded}), - ?assert(hb_message:match(Msg, Decoded)). -``` - -### from_maintains_tag_name_case_test - -```erlang -from_maintains_tag_name_case_test() -> - TX = #tx { - tags = [ - {<<"Test-Tag">>, <<"test-value">>} - ] - }, - SignedTX = ar_bundles:sign_item(TX, hb:wallet()), - ?event({signed_tx, SignedTX}), - ?assert(ar_bundles:verify_item(SignedTX)), - TABM = hb_util:ok(from(SignedTX, #{}, #{})), - ?event({tabm, TABM}), - ConvertedTX = hb_util:ok(to(TABM, #{}, #{})), - ?event({converted_tx, ConvertedTX}), - ?assert(ar_bundles:verify_item(ConvertedTX)), - ?assertEqual(ConvertedTX, ar_bundles:normalize(SignedTX)). -``` - -### restore_tag_name_case_from_cache_test - -```erlang -restore_tag_name_case_from_cache_test() -> - Opts = #{ store => hb_test_utils:test_store() }, - TX = #tx { - tags = [ - {<<"Test-Tag">>, <<"test-value">>}, - {<<"test-tag-2">>, <<"test-value-2">>} - ] - }, - SignedTX = ar_bundles:sign_item(TX, ar_wallet:new()), - SignedMsg = - hb_message:convert( - SignedTX, - <<"structured@1.0">>, - <<"ans104@1.0">>, - Opts - ), - SignedID = hb_message:id(SignedMsg, all), - ?event({signed_msg, SignedMsg}), - OnlyCommitted = hb_message:with_only_committed(SignedMsg, Opts), - ?event({only_committed, OnlyCommitted}), - {ok, ID} = hb_cache:write(SignedMsg, Opts), - ?event({id, ID}), - {ok, ReadMsg} = hb_cache:read(SignedID, Opts), - ?event({restored_msg, ReadMsg}), - {ok, ReadTX} = to(ReadMsg, #{}, Opts), - ?event({restored_tx, ReadTX}), - ?assert(hb_message:match(ReadMsg, SignedMsg)), - ?assert(ar_bundles:verify_item(ReadTX)). -``` - -### unsigned_duplicated_tag_name_test - -```erlang -unsigned_duplicated_tag_name_test() -> - TX = ar_bundles:reset_ids(ar_bundles:normalize(#tx { - tags = [ - {<<"Test-Tag">>, <<"test-value">>}, - {<<"test-tag">>, <<"test-value-2">>} - ] - })), - Msg = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), - ?event({msg, Msg}), - TX2 = hb_message:convert(Msg, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), - ?event({tx2, TX2}), - ?assertEqual(TX, TX2). -``` - -### signed_duplicated_tag_name_test - -```erlang -signed_duplicated_tag_name_test() -> - TX = ar_bundles:sign_item(#tx { - tags = [ - {<<"Test-Tag">>, <<"test-value">>}, - {<<"test-tag">>, <<"test-value-2">>} - ] - }, ar_wallet:new()), - Msg = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), - ?event({msg, Msg}), - TX2 = hb_message:convert(Msg, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), - ?event({tx2, TX2}), - ?assertEqual(TX, TX2), - ?assert(ar_bundles:verify_item(TX2)). -``` - -### simple_to_conversion_test - -```erlang -simple_to_conversion_test() -> - Msg = #{ - <<"first-tag">> => <<"first-value">>, - <<"second-tag">> => <<"second-value">> - }, - {ok, Encoded} = to(Msg, #{}, #{}), - ?event({encoded, Encoded}), - {ok, Decoded} = from(Encoded, #{}, #{}), - ?event({decoded, Decoded}), - ?assert(hb_message:match(Msg, hb_message:uncommitted(Decoded, #{}))). -``` - -### external_item_with_target_field_test - -Ensure that items with an explicitly defined target field lead to: - -```erlang -external_item_with_target_field_test() -> - TX = - ar_bundles:sign_item( - #tx { - target = crypto:strong_rand_bytes(32), - tags = [ - {<<"test-tag">>, <<"test-value">>}, - {<<"test-tag-2">>, <<"test-value-2">>} - ], - data = <<"test-data">> - }, - ar_wallet:new() - ), - EncodedTarget = hb_util:encode(TX#tx.target), - ?event({tx, TX}), - Decoded = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), - ?event({decoded, Decoded}), - ?assertEqual(EncodedTarget, hb_maps:get(<<"target">>, Decoded, undefined, #{})), - {ok, OnlyCommitted} = hb_message:with_only_committed(Decoded, #{}), - ?event({only_committed, OnlyCommitted}), - ?assertEqual(EncodedTarget, hb_maps:get(<<"target">>, OnlyCommitted, undefined, #{})), - Encoded = hb_message:convert(OnlyCommitted, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), - ?assertEqual(TX#tx.target, Encoded#tx.target), - ?event({result, {initial, TX}, {result, Encoded}}), - ?assertEqual(TX, Encoded). -``` - -### generate_item_with_target_tag_test - -Ensure that items made inside HyperBEAM use the tags to encode `target` - -```erlang -generate_item_with_target_tag_test() -> - Msg = - #{ - <<"target">> => Target = <<"NON-ID-TARGET">>, - <<"other-key">> => <<"other-value">> - }, - {ok, TX} = to(Msg, #{}, #{}), - ?event({encoded_tx, TX}), - % The encoded TX should have ignored the `target' field, setting a tag instead. -``` - -### generate_item_with_target_field_test - -```erlang -generate_item_with_target_field_test() -> - Msg = - hb_message:commit( - #{ - <<"target">> => Target = hb_util:encode(crypto:strong_rand_bytes(32)), - <<"other-key">> => <<"other-value">> - }, - #{ priv_wallet => hb:wallet() }, - <<"ans104@1.0">> - ), - {ok, TX} = to(Msg, #{}, #{}), - ?event({encoded_tx, TX}), - ?assertEqual(Target, hb_util:encode(TX#tx.target)), - Decoded = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), - ?event({decoded, Decoded}), - ?assertEqual(Target, hb_maps:get(<<"target">>, Decoded, undefined, #{})), - {ok, OnlyCommitted} = hb_message:with_only_committed(Decoded, #{}), - ?event({only_committed, OnlyCommitted}), - ?assertEqual(Target, hb_maps:get(<<"target">>, OnlyCommitted, undefined, #{})), - Encoded = hb_message:convert(OnlyCommitted, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), - ?event({result, {initial, TX}, {result, Encoded}}), - ?assertEqual(TX, Encoded). -``` - -### type_tag_test - -```erlang -type_tag_test() -> - TX = - ar_bundles:sign_item( - #tx { - tags = [{<<"type">>, <<"test-value">>}] - }, - ar_wallet:new() - ), - ?event({tx, TX}), - Structured = hb_message:convert(TX, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), - ?event({structured, Structured}), - TX2 = hb_message:convert(Structured, <<"ans104@1.0">>, <<"structured@1.0">>, #{}), - ?event({after_conversion, TX2}), - ?assertEqual(TX, TX2). -``` - -### ao_data_key_test - -```erlang -ao_data_key_test() -> - Msg = - hb_message:commit( - #{ - <<"other-key">> => <<"Normal value">>, - <<"body">> => <<"Body value">> - }, - #{ priv_wallet => hb:wallet() }, - <<"ans104@1.0">> - ), - ?event({msg, Msg}), - Enc = hb_message:convert(Msg, <<"ans104@1.0">>, #{}), - ?event({enc, Enc}), - ?assertEqual(<<"Body value">>, Enc#tx.data), - Dec = hb_message:convert(Enc, <<"structured@1.0">>, <<"ans104@1.0">>, #{}), - ?event({dec, Dec}), - ?assert(hb_message:verify(Dec, all, #{})). -``` - -### simple_signed_to_httpsig_test - -```erlang -simple_signed_to_httpsig_test() -> - Structured = - hb_message:commit( - #{ <<"test-tag">> => <<"test-value">> }, - #{ priv_wallet => ar_wallet:new() }, - #{ - <<"commitment-device">> => <<"ans104@1.0">> - } - ), - ?event(debug_test, {msg, Structured}), - HTTPSig = - hb_message:convert( - Structured, - <<"httpsig@1.0">>, - <<"structured@1.0">>, - #{} - ), - ?event(debug_test, {httpsig, HTTPSig}), - Structured2 = - hb_message:convert( - HTTPSig, - <<"structured@1.0">>, - <<"httpsig@1.0">>, - #{} - ), - ?event(debug_test, {decoded, Structured2}), - Match = hb_message:match(Structured, Structured2, #{}), - ?assert(Match), - ?assert(hb_message:verify(Structured2, all, #{})), - HTTPSig2 = hb_message:convert(Structured2, <<"httpsig@1.0">>, <<"structured@1.0">>, #{}), - ?event(debug_test, {httpsig2, HTTPSig2}), - ?assert(hb_message:verify(HTTPSig2, all, #{})), - ?assert(hb_message:match(HTTPSig, HTTPSig2)). -``` - -### unsorted_tag_map_test - -```erlang -unsorted_tag_map_test() -> - TX = - ar_bundles:sign_item( - #tx{ - format = ans104, - tags = [ - {<<"z">>, <<"position-1">>}, - {<<"a">>, <<"position-2">>} - ], - data = <<"data">> - }, - ar_wallet:new() - ), - ?assert(ar_bundles:verify_item(TX)), - ?event(debug_test, {tx, TX}), - {ok, TABM} = dev_codec_ans104:from(TX, #{}, #{}), - ?event(debug_test, {tabm, TABM}), - {ok, Decoded} = dev_codec_ans104:to(TABM, #{}, #{}), - ?event(debug_test, {decoded, Decoded}), - ?assert(ar_bundles:verify_item(Decoded)). -``` - -### field_and_tag_ordering_test - -```erlang -field_and_tag_ordering_test() -> - UnsignedTABM = #{ - <<"a">> => <<"value1">>, - <<"z">> => <<"value2">>, - <<"target">> => <<"NON-ID-TARGET">> - }, - Wallet = hb:wallet(), - SignedTABM = hb_message:commit( - UnsignedTABM, #{priv_wallet => Wallet}, <<"ans104@1.0">>), -``` - ---- - -*Generated from [dev_codec_ans104.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104.erl)* diff --git a/docs/book/src/dev_codec_ans104_from.erl.md b/docs/book/src/dev_codec_ans104_from.erl.md deleted file mode 100644 index d9b42e17e..000000000 --- a/docs/book/src/dev_codec_ans104_from.erl.md +++ /dev/null @@ -1,355 +0,0 @@ -# dev_codec_ans104_from - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104_from.erl) - -Library functions for decoding ANS-104-style data items to TABM form. - ---- - -## Exported Functions - -- `base/5` -- `committed/5` -- `data/4` -- `fields/2` -- `tags/2` -- `with_commitments/5` - ---- - -### fields - -Library functions for decoding ANS-104-style data items to TABM form. -Return a TABM message containing the fields of the given decoded - -```erlang -fields(Item, _Opts) -> - case Item#tx.target of - ?DEFAULT_TARGET -> #{}; - Target -> - #{ - <<"target">> => hb_util:encode(Target) - } - end. -``` - -### tags - -Return a TABM of the raw tags of the item, including all metadata - -```erlang -tags(Item, Opts) -> - Tags = hb_ao:normalize_keys( - deduplicating_from_list(Item#tx.tags, Opts), - Opts - ), - ao_types(Tags, Opts). -``` - -### ao_types - -Ensure the encoded keys in the `ao-types` field are lowercased and - -```erlang -ao_types(#{ <<"ao-types">> := AoTypes } = Tags, Opts) -> - AOTypes = dev_codec_structured:decode_ao_types(AoTypes, Opts), - % Normalize all keys in the ao-types map and re-encode - NormAOTypes = - maps:fold( - fun(Key, Val, Acc) -> - NormKey = hb_util:to_lower(hb_ao:normalize_key(Key)), - Acc#{ NormKey => Val } - end, - #{}, - AOTypes - ), - EncodedAOTypes = dev_codec_structured:encode_ao_types(NormAOTypes, Opts), - Tags#{ <<"ao-types">> := EncodedAOTypes }; -``` - -### ao_types - -Ensure the encoded keys in the `ao-types` field are lowercased and - -```erlang -ao_types(Tags, _Opts) -> - Tags. -``` - -### data - -Return a TABM of the keys and values found in the data field of the item. - -```erlang -data(Item, Req, Tags, Opts) -> - % If the data field is empty, we return an empty map. If it is a map, we - % return it as such. Otherwise, we return a map with the data key set to - % the raw data value. This handles unbundling nested messages, as well as - % applying the `ao-data-key' tag if given. -``` - -### committed - -Calculate the list of committed keys for an item, based on its - -```erlang -committed(Item, Fields, Tags, Data, Opts) -> - hb_util:unique( - data_keys(Data, Opts) ++ - tag_keys(Item, Opts) ++ - field_keys(Fields, Tags, Data, Opts) - ). -``` - -### field_keys - -Return the list of the keys from the fields TABM. - -```erlang -field_keys(BaseFields, Tags, Data, Opts) -> - HasTarget = - hb_maps:is_key(<<"target">>, BaseFields, Opts) orelse - hb_maps:is_key(<<"target">>, Tags, Opts) orelse - hb_maps:is_key(<<"target">>, Data, Opts), - case HasTarget of - true -> [<<"target">>]; - false -> [] - end. -``` - -### data_keys - -Return the list of the keys from the data TABM. - -```erlang -data_keys(Data, Opts) -> - hb_util:to_sorted_keys(Data, Opts). -``` - -### tag_keys - -Return the list of the keys from the tags TABM. Filter all metadata - -```erlang -tag_keys(Item, _Opts) -> - MetaTags = [ - <<"bundle-format">>, - <<"bundle-version">>, - <<"bundle-map">>, - <<"ao-data-key">> - ], - lists:filtermap( - fun({Tag, _}) -> - case lists:member(Tag, MetaTags) of - true -> false; - false -> {true, hb_util:to_lower(hb_ao:normalize_key(Tag))} - end - end, - Item#tx.tags - ). -``` - -### base - -Return the complete message for an item, less its commitments. The - -```erlang -base(CommittedKeys, Fields, Tags, Data, Opts) -> - hb_maps:from_list( - lists:map( - fun(Key) -> - case hb_maps:find(Key, Data, Opts) of - error -> - case hb_maps:find(Key, Fields, Opts) of - error -> - case hb_maps:find(Key, Tags, Opts) of - error -> throw({missing_key, Key}); - {ok, Value} -> {Key, Value} - end; - {ok, Value} -> {Key, Value} - end; - {ok, Value} -> {Key, Value} - end - end, - CommittedKeys - ) - ). -``` - -### with_commitments - -Return a message with the appropriate commitments added to it. - -```erlang -with_commitments(Item, Tags, Base, CommittedKeys, Opts) -> - case Item#tx.signature of - ?DEFAULT_SIG -> - case normal_tags(Item#tx.tags) of - true -> Base; - false -> - with_unsigned_commitment(Item, Tags, Base, CommittedKeys, Opts) - end; - _ -> with_signed_commitment(Item, Tags, Base, CommittedKeys, Opts) - end. -``` - -### with_unsigned_commitment - -Returns a commitments message for an item, containing an unsigned - -```erlang -with_unsigned_commitment(Item, Tags, UncommittedMessage, CommittedKeys, Opts) -> - ID = hb_util:human_id(Item#tx.unsigned_id), - UncommittedMessage#{ - <<"commitments">> => #{ - ID => - filter_unset( - #{ - <<"commitment-device">> => <<"ans104@1.0">>, - <<"committed">> => CommittedKeys, - <<"type">> => <<"unsigned-sha256">>, - <<"bundle">> => bundle_commitment_key(Tags, Opts), - <<"original-tags">> => original_tags(Item, Opts), - <<"field-target">> => - case Item#tx.target of - ?DEFAULT_TARGET -> unset; - Target -> hb_util:encode(Target) - end, - <<"field-anchor">> => - case Item#tx.anchor of - ?DEFAULT_LAST_TX -> unset; - LastTX -> LastTX - end - }, - Opts - ) - } - }. -``` - -### with_signed_commitment - -Returns a commitments message for an item, containing a signed - -```erlang -with_signed_commitment(Item, Tags, UncommittedMessage, CommittedKeys, Opts) -> - Address = hb_util:human_id(ar_wallet:to_address(Item#tx.owner)), - ID = hb_util:human_id(Item#tx.id), - Commitment = - filter_unset( - #{ - <<"commitment-device">> => <<"ans104@1.0">>, - <<"committer">> => Address, - <<"committed">> => CommittedKeys, - <<"signature">> => hb_util:encode(Item#tx.signature), - <<"keyid">> => - <<"publickey:", (hb_util:encode(Item#tx.owner))/binary>>, - <<"type">> => <<"rsa-pss-sha256">>, - <<"bundle">> => bundle_commitment_key(Tags, Opts), - <<"original-tags">> => original_tags(Item, Opts), - <<"field-anchor">> => - case Item#tx.anchor of - ?DEFAULT_LAST_TX -> unset; - LastTX -> LastTX - end, - <<"field-target">> => - case Item#tx.target of - ?DEFAULT_TARGET -> unset; - Target -> hb_util:encode(Target) - end - }, - Opts - ), - UncommittedMessage#{ - <<"commitments">> => #{ - ID => Commitment - } - }. -``` - -### bundle_commitment_key - -Return the bundle key for an item. -Check whether a list of key-value pairs contains only normalized keys. - -```erlang -bundle_commitment_key(Tags, Opts) -> - hb_util:bin(hb_maps:is_key(<<"bundle-format">>, Tags, Opts)). -``` - -### normal_tags - -Return the bundle key for an item. -Check whether a list of key-value pairs contains only normalized keys. - -```erlang -normal_tags(Tags) -> - lists:all( - fun({Key, _}) -> - hb_util:to_lower(hb_ao:normalize_key(Key)) =:= Key - end, - Tags - ). -``` - -### original_tags - -Return the original tags of an item if it is applicable. Otherwise, - -```erlang -original_tags(Item, _Opts) -> - case normal_tags(Item#tx.tags) of - true -> unset; - false -> encoded_tags_to_map(Item#tx.tags) - end. -``` - -### encoded_tags_to_map - -Convert an ANS-104 encoded tag list into a HyperBEAM-compatible map. - -```erlang -encoded_tags_to_map(Tags) -> - hb_util:list_to_numbered_message( - lists:map( - fun({Key, Value}) -> - #{ - <<"name">> => Key, - <<"value">> => Value - } - end, - Tags - ) - ). -``` - -### filter_unset - -Remove all undefined values from a map. - -```erlang -filter_unset(Map, Opts) -> - hb_maps:filter( - fun(_, Value) -> - case Value of - unset -> false; - _ -> true - end - end, - Map, - Opts - ). -``` - -### deduplicating_from_list - -Deduplicate a list of key-value pairs by key, generating a list of - -```erlang -deduplicating_from_list(Tags, Opts) -> - % Aggregate any duplicated tags into an ordered list of values. -``` - ---- - -*Generated from [dev_codec_ans104_from.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104_from.erl)* diff --git a/docs/book/src/dev_codec_ans104_to.erl.md b/docs/book/src/dev_codec_ans104_to.erl.md deleted file mode 100644 index 6c045f251..000000000 --- a/docs/book/src/dev_codec_ans104_to.erl.md +++ /dev/null @@ -1,240 +0,0 @@ -# dev_codec_ans104_to - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104_to.erl) - -Library functions for encoding messages to the ANS-104 format. - ---- - -## Exported Functions - -- `data/3` -- `maybe_load/3` -- `siginfo/2` -- `tags/4` - ---- - -### maybe_load - -Library functions for encoding messages to the ANS-104 format. -Determine if the message should be loaded from the cache and re-converted - -```erlang -maybe_load(RawTABM, Req, Opts) -> - case hb_util:atom(hb_ao:get(<<"bundle">>, Req, false, Opts)) of - false -> RawTABM; - true -> - % Convert back to the fully loaded structured@1.0 message, then - % convert to TABM with bundling enabled. -``` - -### siginfo - -Calculate the fields for a message, returning an initial TX record. - -```erlang -siginfo(Message, Opts) -> - MaybeCommitment = - hb_message:commitment( - #{ <<"commitment-device">> => <<"ans104@1.0">> }, - Message, - Opts - ), - case MaybeCommitment of - {ok, _, Commitment} -> commitment_to_tx(Commitment, Opts); - not_found -> - case hb_maps:find(<<"target">>, Message, Opts) of - {ok, EncodedTarget} -> - case hb_util:safe_decode(EncodedTarget) of - {ok, Target} when ?IS_ID(Target) -> - #tx{ target = Target }; - _ -> #tx{} - end; - error -> #tx{} - end; - multiple_matches -> - throw({multiple_ans104_commitments_unsupported, Message}) - end. -``` - -### commitment_to_tx - -Convert a commitment to a base TX record. Extracts the owner, signature, - -```erlang -commitment_to_tx(Commitment, Opts) -> - Signature = - hb_util:decode( - maps:get(<<"signature">>, Commitment, hb_util:encode(?DEFAULT_SIG)) - ), - Owner = - case hb_maps:find(<<"keyid">>, Commitment, Opts) of - {ok, KeyID} -> - hb_util:decode( - dev_codec_httpsig_keyid:remove_scheme_prefix(KeyID) - ); - error -> ?DEFAULT_OWNER - end, - Tags = - case hb_maps:find(<<"original-tags">>, Commitment, Opts) of - {ok, OriginalTags} -> original_tags_to_tags(OriginalTags); - error -> [] - end, - LastTX = - case hb_maps:find(<<"field-anchor">>, Commitment, Opts) of - {ok, EncodedLastTX} -> hb_util:decode(EncodedLastTX); - error -> ?DEFAULT_LAST_TX - end, - Target = - case hb_maps:find(<<"field-target">>, Commitment, Opts) of - {ok, EncodedTarget} -> hb_util:decode(EncodedTarget); - error -> ?DEFAULT_TARGET - end, - ?event({commitment_owner, Owner}), - ?event({commitment_signature, Signature}), - ?event({commitment_tags, Tags}), - ?event({commitment_last_tx, LastTX}), - #tx{ - owner = Owner, - signature = Signature, - tags = Tags, - anchor = LastTX, - target = Target - }. -``` - -### data - -Calculate the data field for a message. - -```erlang -data(TABM, Req, Opts) -> - DataKey = inline_key(TABM), - % Translate the keys into a binary map. If a key has a value that is a map, - % we recursively turn its children into messages. -``` - -### data_messages - -Calculate the data value for a message. The rules are: - -```erlang -data_messages(TABM, Opts) when is_map(TABM) -> - UncommittedTABM = - hb_maps:without( - [<<"commitments">>, <<"data">>, <<"target">>], - hb_private:reset(TABM), - Opts - ), - % If there are too many keys in the TABM, throw an error. -``` - -### tags - -Calculate the tags field for a data item. If the TX already has tags - -```erlang -tags(#tx{ tags = ExistingTags }, _, _, _) when ExistingTags =/= [] -> - ExistingTags; -``` - -### tags - -Calculate the tags field for a data item. If the TX already has tags - -```erlang -tags(TX, TABM, Data, Opts) -> - DataKey = inline_key(TABM), - MaybeCommitment = - hb_message:commitment( - #{ <<"commitment-device">> => <<"ans104@1.0">> }, - TABM, - Opts - ), - CommittedTagKeys = - case MaybeCommitment of - {ok, _, Commitment} -> - % There is already a commitment, so the tags and order are - % pre-determined. However, if the message has been bundled, - % any `+link`-suffixed keys in the committed list may need to - % be resolved to their base keys (e.g., `output+link` -> `output`). -``` - -### include_target_tag - -Return whether to include the `target` tag in the tags list. - -```erlang -include_target_tag(TX, TABM, Opts) -> - case {TX#tx.target, hb_maps:get(<<"target">>, TABM, undefined, Opts)} of - {?DEFAULT_TARGET, _} -> true; - {FieldTarget, TagTarget} when FieldTarget =/= TagTarget -> false; - _ -> true - end. -``` - -### committed_tag_keys_to_tags - -Apply the `ao-data-key` to the committed keys to generate the list of - -```erlang -committed_tag_keys_to_tags(TX, TABM, DataKey, Committed, Opts) -> - DataKeysToExclude = - case TX#tx.data of - Data when is_map(Data)-> maps:keys(Data); - _ -> [] - end, - case DataKey of - <<"data">> -> []; - _ -> [{<<"ao-data-key">>, DataKey}] - end ++ - lists:map( - fun(Key) -> - case hb_maps:find(Key, TABM, Opts) of - error -> throw({missing_committed_key, Key}); - {ok, Value} -> {Key, Value} - end - end, - hb_util:list_without( - [DataKey | DataKeysToExclude], - Committed - ) - ). -``` - -### inline_key - -Determine if an `ao-data-key` should be added to the message. - -```erlang -inline_key(Msg) -> - InlineKey = maps:get(<<"ao-data-key">>, Msg, undefined), - case { - InlineKey, - maps:get(<<"data">>, Msg, ?DEFAULT_DATA) == ?DEFAULT_DATA, - maps:is_key(<<"body">>, Msg) - andalso not ?IS_LINK(maps:get(<<"body">>, Msg, undefined)) - } of - {Explicit, _, _} when Explicit =/= undefined -> - % ao-data-key already exists, so we honor it. -``` - -### original_tags_to_tags - -Convert a HyperBEAM-compatible map into an ANS-104 encoded tag list, - -```erlang -original_tags_to_tags(TagMap) -> - OrderedList = hb_util:message_to_ordered_list(hb_private:reset(TagMap)), - ?event({ordered_tagmap, {explicit, OrderedList}, {input, {explicit, TagMap}}}), - lists:map( - fun(#{ <<"name">> := Key, <<"value">> := Value }) -> - {Key, Value} - end, - OrderedList -``` - ---- - -*Generated from [dev_codec_ans104_to.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_ans104_to.erl)* diff --git a/docs/book/src/dev_codec_cookie.erl.md b/docs/book/src/dev_codec_cookie.erl.md deleted file mode 100644 index b377ac2c8..000000000 --- a/docs/book/src/dev_codec_cookie.erl.md +++ /dev/null @@ -1,570 +0,0 @@ -# dev_codec_cookie - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie.erl) - -A utility device that manages setting and encoding/decoding the cookies -found in requests from a caller. This device implements the `~cookie@1.0` -codec, inline with the `~message@1.0` schema for conversion. -Additionally, a `commit` to a message using a secret generated and stored -in the cookies of the caller, and a `verify` key that validates said -commitments. In addition, a `generate` key is provided to perform only the -generation side of the commitment process. The `finalize` key may be -employed to add a `set` operation to the end of a message sequence, which -is used in hooks that need to ensure a caller always receives cookies -generated outside of the normal AO-Core execution flow. In totality, these -keys implement the `generator` interface type, and may be employed in -various contexts. For example, `~auth-hook@1.0` may be configured to use -this device to generate and store secrets in the cookies of the caller, -which are then used with the `~proxy-wallet@1.0` device to sign requests. -The `commit` and `verify` keys utilize the `~httpsig@1.0`'s HMAC `secret` -commitment scheme, which uses a secret key to commit to a message, with the -`committer` being listed as a hash of the secret. -This device supports the following paths: -`/commit`: Sets a `secret` key in the cookies of the caller. The name of -the cookie is calculated as the hash of the secret. -`/verify`: Verifies the caller's request by checking the committer in the -request matches the secret in the cookies of the base message. -`/store`: Sets the keys in the request message in the cookies of the caller. -`/extract`: Extracts the cookies from a base message. -`/reset`: Removes all cookie keys from the base message. -`/to`: Converts a message containing cookie sources (`cookie`, `set-cookie`, -or `priv/cookie`) into the format specified in the request message (e.g. -`set-cookie`, `cookie`). -`/from`: Converts a message containing encoded cookies into a message -containing the cookies parsed and normalized. - ---- - -## Exported Functions - -- `commit/3` -- `extract/3` -- `finalize/3` -- `from/3` -- `generate/3` -- `get_cookie/3` -- `opts/1` -- `reset/2` -- `store/3` -- `to/3` -- `verify/3` - ---- - -### opts - -A utility device that manages setting and encoding/decoding the cookies -Get the private store options to use for functions in the cookie device. - -```erlang -opts(Opts) -> hb_private:opts(Opts). -%%% ~message@1.0 Commitments API keys. -``` - -### commit - -```erlang -commit(Base, Req, RawOpts) -> dev_codec_cookie_auth:commit(Base, Req, RawOpts). -``` - -### verify - -Preprocessor keys that utilize cookies and the `~secret@1.0` device to - -```erlang -verify(Base, Req, RawOpts) -> dev_codec_cookie_auth:verify(Base, Req, RawOpts). -``` - -### generate - -Preprocessor keys that utilize cookies and the `~secret@1.0` device to - -```erlang -generate(Base, Req, Opts) -> - dev_codec_cookie_auth:generate(Base, Req, Opts). -``` - -### finalize - -Finalize an `on-request` hook by adding the `set-cookie` header to the - -```erlang -finalize(Base, Request, Opts) -> - dev_codec_cookie_auth:finalize(Base, Request, Opts). -``` - -### get_cookie - -Get the cookie with the given key from the base message. The format of - -```erlang -get_cookie(Base, Req, RawOpts) -> - Opts = opts(RawOpts), - {ok, Cookies} = extract(Base, Req, Opts), - Key = hb_maps:get(<<"key">>, Req, undefined, Opts), - case hb_maps:get(Key, Cookies, undefined, Opts) of - undefined -> {error, not_found}; - Cookie -> - Format = hb_maps:get(<<"format">>, Req, <<"default">>, Opts), - case Format of - <<"default">> -> {ok, Cookie}; - <<"set-cookie">> -> {ok, normalize_cookie_value(Cookie)}; - <<"cookie">> -> {ok, value(Cookie)} - end - end. -``` - -### extract - -Return the parsed and normalized cookies from a message. - -```erlang -extract(Msg, Req, Opts) -> - {ok, MsgWithCookie} = from(Msg, Req, Opts), - Cookies = hb_private:get(<<"cookie">>, MsgWithCookie, #{}, Opts), - {ok, Cookies}. -``` - -### store - -Set the keys in the request message in the cookies of the caller. Removes - -```erlang -store(Base, Req, RawOpts) -> - Opts = opts(RawOpts), - ?event({store, {base, Base}, {req, Req}}), - {ok, ExistingCookies} = extract(Base, Req, Opts), - ?event({store, {existing_cookies, ExistingCookies}}), - {ok, ResetBase} = reset(Base, Opts), - ?event({store, {reset_base, ResetBase}}), - MsgToSet = - hb_maps:without( - [ - <<"path">>, - <<"accept-bundle">>, - <<"ao-peer">>, - <<"host">>, - <<"method">>, - <<"body">> - ], - hb_private:reset(Req), - Opts - ), - ?event({store, {msg_to_set, MsgToSet}}), - NewCookies = hb_maps:merge(ExistingCookies, MsgToSet, Opts), - NewBase = hb_private:set(ResetBase, <<"cookie">>, NewCookies, Opts), - {ok, NewBase}. -``` - -### reset - -Remove all cookie keys from the given message (including `cookie` and - -```erlang -reset(Base, RawOpts) -> - Opts = opts(RawOpts), - WithoutBaseCookieKeys = - hb_maps:without( - [<<"cookie">>, <<"set-cookie">>], - Base, - Opts - ), - WithoutPrivCookie = - hb_private:set( - WithoutBaseCookieKeys, - <<"cookie">>, - unset, - Opts - ), - {ok, WithoutPrivCookie}. -``` - -### to - -Convert a message containing cookie sources (`cookie`, `set-cookie`, - -```erlang -to(Msg, Req, Opts) -> - ?event({to, {msg, Msg}, {req, Req}}), - CookieOpts = opts(Opts), - LoadedMsg = hb_cache:ensure_all_loaded(Msg, CookieOpts), - ?event({to, {loaded_msg, LoadedMsg}}), - do_to(LoadedMsg, Req, CookieOpts). -``` - -### do_to - -```erlang -do_to(Msg, Req = #{ <<"format">> := <<"set-cookie">> }, Opts) when is_map(Msg) -> - ?event({to_set_cookie, {msg, Msg}, {req, Req}}), - {ok, ExtractedParsedCookies} = extract(Msg, Req, Opts), - {ok, ResetBase} = reset(Msg, Opts), - SetCookieLines = - maps:values( - maps:map( - fun to_set_cookie_line/2, - ExtractedParsedCookies - ) - ), - MsgWithSetCookie = - ResetBase#{ - <<"set-cookie">> => SetCookieLines - }, - {ok, MsgWithSetCookie}; -``` - -### do_to - -```erlang -do_to(Msg, Req = #{ <<"format">> := <<"cookie">> }, Opts) when is_map(Msg) -> - ?event({to_cookie, {msg, Msg}, {req, Req}}), - {ok, ExtractedParsedCookies} = extract(Msg, Req, Opts), - {ok, ResetBase} = reset(Msg, Opts), - CookieLines = - hb_maps:values( - hb_maps:map( - fun to_cookie_line/2, - ExtractedParsedCookies, - Opts - ), - Opts - ), - ?event({to_cookie, {cookie_lines, CookieLines}}), - CookieLine = join(CookieLines, <<"; ">>), - {ok, ResetBase#{ <<"cookie">> => CookieLine }}; -``` - -### do_to - -```erlang -do_to(Msg, _Req, _Opts) when is_map(Msg) -> - error({cookie_to_error, {no_format_specified, Msg}}); -``` - -### do_to - -```erlang -do_to(Msg, _Req, _Opts) -> - error({cookie_to_error, {unexpected_message_format, Msg}}). -``` - -### to_set_cookie_line - -Convert a single cookie into a `set-cookie` header line. The cookie - -```erlang -to_set_cookie_line(Key, RawCookie) -> - Cookie = normalize_cookie_value(RawCookie), - % Encode the cookie key-value pair as a string to use as the base. -``` - -### to_cookie_line - -Convert a single cookie into a `cookie` header component. These - -```erlang -to_cookie_line(Key, Cookie) -> - to_set_cookie_line(Key, value(Cookie)). -``` - -### from - -Normalize a message containing a `cookie`, `set-cookie`, and potentially - -```erlang -from(Msg, Req, Opts) -> - CookieOpts = opts(Opts), - LoadedMsg = hb_cache:ensure_all_loaded(Msg, Opts), - do_from(LoadedMsg, Req, CookieOpts). -``` - -### do_from - -```erlang -do_from(Msg, Req, Opts) when is_map(Msg) -> - {ok, ResetBase} = reset(Msg, Opts), - % Get the cookies, parsed, from each available source. -``` - -### do_from - -```erlang -do_from(CookiesMsg, _Req, _Opts) -> - error({cookie_from_error, {unexpected_message_format, CookiesMsg}}). -``` - -### from_cookie - -Convert the `cookie` key into a parsed cookie message. `cookie` headers - -```erlang -from_cookie(#{ <<"cookie">> := Cookie }, Req, Opts) -> - from_cookie(Cookie, Req, Opts); -``` - -### from_cookie - -Convert the `cookie` key into a parsed cookie message. `cookie` headers - -```erlang -from_cookie(Cookies, Req, Opts) when is_list(Cookies) -> - MergedParsed = - lists:foldl( - fun(Cookie, Acc) -> - {ok, Parsed} = from_cookie(Cookie, Req, Opts), - hb_maps:merge(Acc, Parsed, Opts) - end, - #{}, - Cookies - ), - {ok, MergedParsed}; -``` - -### from_cookie - -Convert the `cookie` key into a parsed cookie message. `cookie` headers - -```erlang -from_cookie(Cookie, _Req, _Opts) when is_binary(Cookie) -> - BinaryCookiePairs = split(semicolon, Cookie), - KeyValList = - lists:map( - fun(BinaryCookiePair) -> - {[Key, Value], _Rest} = split(pair, BinaryCookiePair), - {Key, hb_escape:decode(Value)} - end, - BinaryCookiePairs - ), - NormalizedMessage = maps:from_list(KeyValList), - {ok, NormalizedMessage}; -``` - -### from_cookie - -Convert the `cookie` key into a parsed cookie message. `cookie` headers - -```erlang -from_cookie(_MsgWithoutCookie, _Req, _Opts) -> - % The cookie key is not present in the message, so we return an empty map. -``` - -### from_set_cookie - -Convert a `set-cookie` header line into a cookie message. The `set-cookie` - -```erlang -from_set_cookie(#{ <<"set-cookie">> := Cookie }, Req, Opts) -> - ?event({from_set_cookie, {cookie, Cookie}}), - from_set_cookie(Cookie, Req, Opts); -``` - -### from_set_cookie - -Convert a `set-cookie` header line into a cookie message. The `set-cookie` - -```erlang -from_set_cookie(MsgWithoutSet, _Req, _Opts) when is_map(MsgWithoutSet) -> - % The set-cookie key is not present in the message, so we return an empty map. -``` - -### from_set_cookie - -```erlang -from_set_cookie(Lines, Req, Opts) when is_list(Lines) -> - MergedParsed = - lists:foldl( - fun(Line, Acc) -> - {ok, Parsed} = from_set_cookie(Line, Req, Opts), - hb_maps:merge(Acc, Parsed) - end, - #{}, - Lines - ), - {ok, MergedParsed}; -``` - -### from_set_cookie - -```erlang -from_set_cookie(Line, _Req, Opts) when is_binary(Line) -> - {[Key, Value], Rest} = split(pair, Line), - ValueDecoded = hb_escape:decode(Value), - % If there is no remaining binary after the pair, we have a simple key-value - % pair, returning just the binary as the value. Otherwise, we split the - % remaining binary into attributes and flags and return a message with the - % value and those parsed elements. -``` - -### to_sorted_list - -Takes a message or list of binaries and returns a sorted list of key- - -```erlang -to_sorted_list(Msg) when is_map(Msg) -> - lists:keysort( - 1, - [ - {trim_bin(hb_util:bin(K)), trim_bin(V)} - || {K, V} <- maps:to_list(Msg) - ] - ); -``` - -### to_sorted_list - -Takes a message or list of binaries and returns a sorted list of key- - -```erlang -to_sorted_list(Binaries) when is_list(Binaries) -> - lists:sort( - lists:map( - fun(Bin) -> trim_bin(hb_util:bin(Bin)) end, - Binaries - ) - ). -``` - -### value - -Take a single parse cookie and return only the value (ignoring attributes - -```erlang -value(Msg) when is_map(Msg) -> - maps:get(<<"value">>, Msg, Msg); -``` - -### value - -Take a single parse cookie and return only the value (ignoring attributes - -```erlang -value(Bin) when is_binary(Bin) -> - Bin. -``` - -### normalize_cookie_value - -Normalize a cookie value to a map with the following keys: - -```erlang -normalize_cookie_value(Msg) when is_map(Msg) -> - Msg#{ - <<"value">> => maps:get(<<"value">>, Msg, Msg), - <<"attributes">> => maps:get(<<"attributes">>, Msg, #{}), - <<"flags">> => maps:get(<<"flags">>, Msg, []) - }; -``` - -### normalize_cookie_value - -Normalize a cookie value to a map with the following keys: - -```erlang -normalize_cookie_value(Bin) when is_binary(Bin) -> - #{ - <<"value">> => Bin, - <<"attributes">> => #{}, - <<"flags">> => [] - }. -``` - -### trim_bin - -Trim a binary of leading and trailing whitespace. - -```erlang -trim_bin(Bin) when is_binary(Bin) -> - list_to_binary(string:trim(binary_to_list(Bin))). -``` - -### join - -Join a list of binaries into a `separator`-separated string. Abstracts - -```erlang -join(Binaries, Separator) -> - hb_util:bin( - string:join( - lists:map(fun hb_util:list/1, Binaries), - hb_util:list(Separator) - ) - ). -``` - -### split - -Split a binary by a separator type (`pair`, `lines`, or `attributes`). - -```erlang -split(pair, Bin) -> - [Key, ValueRest] = binary:split(Bin, <<"=">>), - {_, Value, Rest} = hb_util:split_depth_string_aware_single($;, ValueRest), - {[Key, unquote(Value)], trim_leading(Rest)}; -``` - -### split - -Split a binary by a separator type (`pair`, `lines`, or `attributes`). - -```erlang -split(lines, Bin) -> - lists:map(fun trim_leading/1, hb_util:split_depth_string_aware($,, Bin)); -``` - -### split - -Split a binary by a separator type (`pair`, `lines`, or `attributes`). - -```erlang -split(semicolon, Bin) -> - lists:map(fun trim_leading/1, hb_util:split_depth_string_aware($;, Bin)). -``` - -### trim_leading - -Remove leading whitespace from a binary, if present. - -```erlang -trim_leading(Line) when not is_binary(Line) -> - trim_leading(hb_util:bin(Line)); -``` - -### trim_leading - -Remove leading whitespace from a binary, if present. - -```erlang -trim_leading(<<>>) -> <<>>; -``` - -### trim_leading - -Remove leading whitespace from a binary, if present. - -```erlang -trim_leading(<<" ", Rest/binary>>) -> trim_leading(Rest); -``` - -### trim_leading - -Remove leading whitespace from a binary, if present. -Unquote a binary if it is quoted. If it is not quoted, we return the - -```erlang -trim_leading(Line) -> Line. -``` - -### unquote - -Remove leading whitespace from a binary, if present. -Unquote a binary if it is quoted. If it is not quoted, we return the - -```erlang -unquote(<< $\", Rest/binary>>) -> - {Unquoted, _} = hb_util:split_escaped_single($\", Rest), - Unquoted; -``` - ---- - -*Generated from [dev_codec_cookie.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie.erl)* diff --git a/docs/book/src/dev_codec_cookie_auth.erl.md b/docs/book/src/dev_codec_cookie_auth.erl.md deleted file mode 100644 index 6f50f3641..000000000 --- a/docs/book/src/dev_codec_cookie_auth.erl.md +++ /dev/null @@ -1,329 +0,0 @@ -# dev_codec_cookie_auth - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie_auth.erl) - -Implements the `message@1.0` commitment interface for the `~cookie@1.0`, -as well as the `generator` interface type for the `~auth-hook@1.0` device. -See the [cookie codec](dev_codec_cookie.html) documentation for more details. - ---- - -## Exported Functions - -- `commit/3` -- `finalize/3` -- `generate/3` -- `verify/3` - ---- - -### generate - -Implements the `message@1.0` commitment interface for the `~cookie@1.0`, -Generate a new secret (if no `committer` specified), and use it as the - -```erlang -generate(Base, Request, Opts) -> - {WithCookie, Secrets} = - case find_secrets(Request, Opts) of - [] -> - {ok, GeneratedSecret} = generate_secret(Base, Request, Opts), - {ok, Updated} = store_secret(GeneratedSecret, Request, Opts), - {Updated, [GeneratedSecret]}; - FoundSecrets -> - {Request, FoundSecrets} - end, - ?event({normalized_cookies_found, {secrets, Secrets}}), - { - ok, - WithCookie#{ - <<"secret">> => Secrets - } - }. -``` - -### finalize - -Finalize an `on-request` hook by adding the cookie to the chain of - -```erlang -finalize(Base, Request, Opts) -> - ?event(debug_auth, {finalize, {base, Base}, {request, Request}}), - maybe - {ok, SignedMsg} ?= hb_maps:find(<<"request">>, Request, Opts), - {ok, MessageSequence} ?= hb_maps:find(<<"body">>, Request, Opts), - % Cookie auth adds set-cookie to response - {ok, #{ <<"set-cookie">> := SetCookie }} = - dev_codec_cookie:to( - SignedMsg, - #{ <<"format">> => <<"set-cookie">> }, - Opts - ), - { - ok, - MessageSequence ++ - [#{ <<"path">> => <<"set">>, <<"set-cookie">> => SetCookie }] - } - else error -> - {error, no_request} - end. -``` - -### commit - -Generate a new secret (if no `committer` specified), and use it as the - -```erlang -commit(Base, Request, RawOpts) when ?IS_LINK(Request) -> - Opts = dev_codec_cookie:opts(RawOpts), - commit(Base, hb_cache:ensure_loaded(Request, Opts), Opts); -``` - -### commit - -Generate a new secret (if no `committer` specified), and use it as the - -```erlang -commit(Base, Req = #{ <<"secret">> := Secret }, RawOpts) -> - Opts = dev_codec_cookie:opts(RawOpts), - commit(hb_cache:ensure_loaded(Secret, Opts), Base, Req, Opts); -``` - -### commit - -Generate a new secret (if no `committer` specified), and use it as the - -```erlang -commit(Base, Request, RawOpts) -> - Opts = dev_codec_cookie:opts(RawOpts), - % Calculate the key to use for the commitment. -``` - -### commit - -Given the secret key, commit the message and set the cookie. This - -```erlang -commit(Secret, Base, Request, Opts) -> - {ok, CommittedMsg} = - dev_codec_httpsig_proxy:commit( - <<"cookie@1.0">>, - Secret, - Base, - Request, - Opts - ), - store_secret(Secret, CommittedMsg, Opts). -``` - -### store_secret - -Update the nonces for a given secret. - -```erlang -store_secret(Secret, Msg, Opts) -> - CookieAddr = dev_codec_httpsig_keyid:secret_key_to_committer(Secret), - % Create the cookie parameters, using the name as the key and the secret as - % the value. -``` - -### verify - -Verify the HMAC commitment with the key being the secret from the - -```erlang -verify(Base, ReqLink, RawOpts) when ?IS_LINK(ReqLink) -> - Opts = dev_codec_cookie:opts(RawOpts), - verify(Base, hb_cache:ensure_loaded(ReqLink, Opts), Opts); -``` - -### verify - -Verify the HMAC commitment with the key being the secret from the - -```erlang -verify(Base, Req = #{ <<"secret">> := Secret }, RawOpts) -> - Opts = dev_codec_cookie:opts(RawOpts), - ?event({verify_with_explicit_key, {base, Base}, {request, Req}}), - dev_codec_httpsig_proxy:verify( - hb_util:decode(Secret), - Base, - Req, - Opts - ); -``` - -### verify - -Verify the HMAC commitment with the key being the secret from the - -```erlang -verify(Base, Request, RawOpts) -> - Opts = dev_codec_cookie:opts(RawOpts), - ?event({verify_finding_key, {base, Base}, {request, Request}}), - case find_secret(Request, Opts) of - {ok, Secret} -> - dev_codec_httpsig_proxy:verify( - hb_util:decode(Secret), - Base, - Request, - Opts - ); - {error, Err} -> - {error, Err} - end. -``` - -### generate_secret - -Generate a new secret key for the given request. The user may specify - -```erlang -generate_secret(_Base, Request, Opts) -> - case hb_maps:get(<<"generator">>, Request, undefined, Opts) of - undefined -> - % If no generator is specified, use the default generator. -``` - -### default_generator - -Generate a new secret key using the default generator. - -```erlang -default_generator(_Opts) -> - {ok, hb_util:encode(crypto:strong_rand_bytes(64))}. -``` - -### execute_generator - -Execute a generator function. See `generate_secret/3` for more details. - -```erlang -execute_generator(GeneratorPath, Opts) when is_binary(GeneratorPath) -> - hb_ao:resolve(GeneratorPath, Opts); -``` - -### execute_generator - -Execute a generator function. See `generate_secret/3` for more details. -Find all secrets in the cookie of a message. - -```erlang -execute_generator(Generator, Opts) -> - Path = hb_maps:get(<<"path">>, Generator, <<"generate">>, Opts), - hb_ao:resolve(Generator#{ <<"path">> => Path }, Opts). -``` - -### find_secrets - -Execute a generator function. See `generate_secret/3` for more details. -Find all secrets in the cookie of a message. - -```erlang -find_secrets(Request, Opts) -> - maybe - {ok, Cookie} ?= dev_codec_cookie:extract(Request, #{}, Opts), - [ - hb_maps:get(SecretRef, Cookie, secret_unavailable, Opts) - || - SecretRef = <<"secret-", _/binary>> <- hb_maps:keys(Cookie) - ] - else error -> [] - end. -``` - -### find_secret - -Find the secret key for the given committer, if it exists in the cookie. - -```erlang -find_secret(Request, Opts) -> - maybe - {ok, Committer} ?= hb_maps:find(<<"committer">>, Request, Opts), - find_secret(Committer, Request, Opts) - else error -> {error, no_secret} - end. -``` - -### find_secret - -```erlang -find_secret(Committer, Request, Opts) -> - maybe - {ok, Cookie} ?= dev_codec_cookie:extract(Request, #{}, Opts), - {ok, _Secret} ?= hb_maps:find(<<"secret-", Committer/binary>>, Cookie, Opts) - else error -> {error, not_found} - end. -``` - -### directly_invoke_commit_verify_test - -Call the cookie codec's `commit` and `verify` functions directly. - -```erlang -directly_invoke_commit_verify_test() -> - Base = #{ <<"test-key">> => <<"test-value">> }, - CommittedMsg = - hb_message:commit( - Base, - #{}, - #{ - <<"commitment-device">> => <<"cookie@1.0">> - } - ), - ?event({committed_msg, CommittedMsg}), - ?assertEqual(1, length(hb_message:signers(CommittedMsg, #{}))), - VerifyReq = - apply_cookie( - CommittedMsg#{ - <<"committers">> => hb_message:signers(CommittedMsg, #{}) - }, - CommittedMsg, - #{} - ), - VerifyReqWithoutComms = hb_maps:without([<<"commitments">>], VerifyReq, #{}), - ?event({verify_req_without_comms, VerifyReqWithoutComms}), - ?assert(hb_message:verify(CommittedMsg, VerifyReqWithoutComms, #{})), - ok. -``` - -### http_set_get_cookies_test - -Set keys in a cookie and verify that they can be parsed into a message. - -```erlang -http_set_get_cookies_test() -> - Node = hb_http_server:start_node(#{}), - {ok, SetRes} = - hb_http:get( - Node, - <<"/~cookie@1.0/store?k1=v1&k2=v2">>, - #{} - ), - ?event(debug_cookie, {set_cookie_test, {set_res, SetRes}}), - ?assertMatch(#{ <<"set-cookie">> := _ }, SetRes), - Req = apply_cookie(#{ <<"path">> => <<"/~cookie@1.0/extract">> }, SetRes, #{}), - {ok, Res} = hb_http:get(Node, Req, #{}), - ?assertMatch(#{ <<"k1">> := <<"v1">>, <<"k2">> := <<"v2">> }, Res), - ok. -``` - -### apply_cookie - -Takes the cookies from the `GenerateResponse` and applies them to the - -```erlang -apply_cookie(NextReq, GenerateResponse, Opts) -> - {ok, Cookie} = dev_codec_cookie:extract(GenerateResponse, #{}, Opts), - {ok, NextWithParsedCookie} = dev_codec_cookie:store(NextReq, Cookie, Opts), - {ok, NextWithCookie} = - dev_codec_cookie:to( - NextWithParsedCookie, - #{ <<"format">> => <<"cookie">> }, - Opts - ), -``` - ---- - -*Generated from [dev_codec_cookie_auth.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie_auth.erl)* diff --git a/docs/book/src/dev_codec_cookie_test_vectors.erl.md b/docs/book/src/dev_codec_cookie_test_vectors.erl.md deleted file mode 100644 index 54cf6b062..000000000 --- a/docs/book/src/dev_codec_cookie_test_vectors.erl.md +++ /dev/null @@ -1,904 +0,0 @@ -# dev_codec_cookie_test_vectors - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie_test_vectors.erl) - -A battery of cookie parsing and encoding test vectors. - ---- - -### assert_set - -A battery of cookie parsing and encoding test vectors. -Assert that when given the inputs in the test set, the outputs are - -```erlang -assert_set(TestSet, Fun) -> - {Inputs, Expected} = maps:get(TestSet, test_data()), - ?event(match_cookie, {starting_group_match, {inputs, {explicit, Inputs}}}), - lists:foreach( - fun(Input) -> - Res = Fun(Input), - ?event( - match_cookie, - {matching, - {expected, {explicit, Expected}, {output, {explicit, Res}}} - } - ), - ?assertEqual(Expected, Res) - end, - Inputs - ). -``` - -### to_string - -Convert a cookie message to a string. -Convert a string to a cookie message. - -```erlang -to_string(CookieMsg) -> - {ok, BaseMsg} = dev_codec_cookie:store(#{}, CookieMsg, #{}), - {ok, Msg} = - dev_codec_cookie:to( - BaseMsg, - #{ <<"format">> => <<"set-cookie">> }, - #{} - ), - hb_maps:get(<<"set-cookie">>, Msg, [], #{}). -``` - -### from_string - -Convert a cookie message to a string. -Convert a string to a cookie message. - -```erlang -from_string(String) -> - {ok, BaseMsg} = - dev_codec_cookie:from( - #{ <<"set-cookie">> => String }, - #{}, - #{} - ), - {ok, Cookie} = dev_codec_cookie:extract(BaseMsg, #{}, #{}), - Cookie. -``` - -### test_data - -returns a map of tuples of the form `testset_name => {[before], after}`. - -```erlang -test_data() -> - #{ - from_string_raw_value => - { - [<<"k1=v1">>, <<"k1=\"v1\"">>], - #{ <<"k1">> => <<"v1">> } - }, - from_string_attributes => - { - [<<"k1=v1; k2=v2">>, <<"k1=\"v1\"; k2=\"v2\"">>], - #{ - <<"k1">> => - #{ - <<"value">> => <<"v1">>, - <<"attributes">> => #{ <<"k2">> => <<"v2">> } - } - } - }, - from_string_flags => - { - [<<"k1=v1; k2=v2; f1; f2">>, <<"k1=\"v1\"; k2=\"v2\"; f1; f2">>], - #{ - <<"k1">> => - #{ - <<"value">> => <<"v1">>, - <<"attributes">> => #{ <<"k2">> => <<"v2">> }, - <<"flags">> => [<<"f1">>, <<"f2">>] - } - } - }, - to_string_raw_value => - { - [ - #{ <<"k1">> => <<"v1">> }, - #{ <<"k1">> => #{ <<"value">> => <<"v1">> } }, - #{ - <<"k1">> => - #{ - <<"value">> => <<"v1">>, - <<"attributes">> => #{}, - <<"flags">> => [] - } - } - ], - [<<"k1=\"v1\"">>] - }, - to_string_attributes => - { - [ - #{ - <<"k1">> => - #{ - <<"value">> => <<"v1">>, - <<"attributes">> => #{ <<"k2">> => <<"v2">> } - } - }, - #{ - <<"k1">> => - #{ - <<"value">> => <<"v1">>, - <<"attributes">> => #{ <<"k2">> => <<"v2">> }, - <<"flags">> => [] - } - } - ], - [<<"k1=\"v1\"; k2=v2">>] - }, - to_string_flags => - { - [ - #{ - <<"k1">> => - #{ - <<"value">> => <<"v1">>, - <<"flags">> => [<<"f1">>, <<"f2">>] - } - }, - #{ - <<"k1">> => - #{ - <<"value">> => <<"v1">>, - <<"attributes">> => #{}, - <<"flags">> => [<<"f1">>, <<"f2">>] - } - } - ], - [<<"k1=\"v1\"; f1; f2">>] - }, - parse_realworld_1 => - { - [ - [ - <<"cart=110045_77895_53420; SameSite=Strict">>, - <<"affiliate=e4rt45dw; SameSite=Lax">> - ] - ], - #{ - <<"cart">> => - #{ - <<"value">> => <<"110045_77895_53420">>, - <<"attributes">> => #{ <<"SameSite">> => <<"Strict">> } - }, - <<"affiliate">> => - #{ - <<"value">> => <<"e4rt45dw">>, - <<"attributes">> => #{ <<"SameSite">> => <<"Lax">> } - } - } - }, - parse_user_settings_and_permissions => - { - [ - [ - <<"user_settings=notifications=true,privacy=strict,layout=grid; Path=/; HttpOnly; Secure">>, - <<"user_permissions=\"read;write;delete\"; Path=/; SameSite=None; Secure">> - ] - ], - #{ - <<"user_settings">> => - #{ - <<"value">> => <<"notifications=true,privacy=strict,layout=grid">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"HttpOnly">>, <<"Secure">>] - }, - <<"user_permissions">> => - #{ - <<"value">> => <<"read;write;delete">>, - <<"attributes">> => #{ <<"Path">> => <<"/">>, <<"SameSite">> => <<"None">> }, - <<"flags">> => [<<"Secure">>] - } - } - }, - parse_session_and_temp_data => - { - [ - [ - <<"SESSION_ID=abc123xyz ; path= /dashboard ; samesite=Strict ; Secure">>, - <<"temp_data=cleanup_me; Max-Age=-1; Path=/">> - ] - ], - #{ - <<"SESSION_ID">> => - #{ - <<"value">> => <<"abc123xyz ">>, - <<"attributes">> => #{ <<"path">> => <<"/dashboard">>, <<"samesite">> => <<"Strict">> }, - <<"flags">> => [<<"Secure">>] - }, - <<"temp_data">> => - #{ - <<"value">> => <<"cleanup_me">>, - <<"attributes">> => #{ <<"Max-Age">> => <<"-1">>, <<"Path">> => <<"/">> } - } - } - }, - parse_empty_and_anonymous => - { - [ - [ - <<"user_preference=; Path=/; HttpOnly">>, - <<"=anonymous_session_123; Path=/guest">> - ] - ], - #{ - <<"user_preference">> => - #{ - <<"value">> => <<"">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"HttpOnly">>] - }, - <<>> => - #{ - <<"value">> => <<"anonymous_session_123">>, - <<"attributes">> => #{ <<"Path">> => <<"/guest">> } - } - } - }, - parse_app_config_and_analytics => - { - [ - [ - <<"$app_config$=theme@dark!%20mode; Path=/">>, - <<"analytics_session_data_with_very_long_name_for_tracking_purposes=comprehensive_user_behavior_analytics_data_including_page_views_click_events_scroll_depth_time_spent_geographic_location_device_info_browser_details_and_more; Path=/">> - ] - ], - #{ - <<"$app_config$">> => - #{ - <<"value">> => <<"theme@dark! mode">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - }, - <<"analytics_session_data_with_very_long_name_for_tracking_purposes">> => - #{ - <<"value">> => <<"comprehensive_user_behavior_analytics_data_including_page_views_click_events_scroll_depth_time_spent_geographic_location_device_info_browser_details_and_more">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - } - } - }, - parse_debug_and_tracking => - { - [ - [ - <<"debug_info=\\tIndented\\t\\nMultiline\\n; Path=/">>, - <<"tracking_id=user_12345; CustomAttr=CustomValue; Analytics=Enabled; Path=/; HttpOnly">> - ] - ], - #{ - <<"debug_info">> => - #{ - <<"value">> => <<"\\tIndented\\t\\nMultiline\\n">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - }, - <<"tracking_id">> => - #{ - <<"value">> => <<"user_12345">>, - <<"attributes">> => #{ - <<"CustomAttr">> => <<"CustomValue">>, - <<"Analytics">> => <<"Enabled">>, - <<"Path">> => <<"/">> - }, - <<"flags">> => [<<"HttpOnly">>] - } - } - }, - parse_cache_and_form_token => - { - [ - [ - <<"cache_bust=v1.2.3; Expires=Mon, 99 Feb 2099 25:99:99 GMT; Path=/">>, - <<"form_token=form_abc123; SameSite=Strick; Secure">> - ] - ], - #{ - <<"cache_bust">> => - #{ - <<"value">> => <<"v1.2.3">>, - <<"attributes">> => #{ - <<"Expires">> => <<"Mon, 99 Feb 2099 25:99:99 GMT">>, - <<"Path">> => <<"/">> - } - }, - <<"form_token">> => - #{ - <<"value">> => <<"form_abc123">>, - <<"attributes">> => #{ <<"SameSite">> => <<"Strick">> }, - <<"flags">> => [<<"Secure">>] - } - } - }, - parse_token_and_reactions => - { - [ - [ - <<"access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c; Path=/; HttpOnly; Secure">>, - <<"reaction_prefs=👍👎; Path=/; Secure">> - ] - ], - #{ - <<"access_token">> => - #{ - <<"value">> => <<"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"HttpOnly">>, <<"Secure">>] - }, - <<"reaction_prefs">> => - #{ - <<"value">> => <<"👍👎">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"Secure">>] - } - } - }, - parse_error_log_and_auth_token => - { - [ - [ - <<"error_log=\"timestamp=2024-01-15 10:30:00\\nlevel=ERROR\\tmessage=Database connection failed\"; Path=/">>, - <<"auth_token=bearer_xyz789; Secure; Path=/api; Secure; HttpOnly">> - ] - ], - #{ - <<"error_log">> => - #{ - <<"value">> => <<"timestamp=2024-01-15 10:30:00\\nlevel=ERROR\\tmessage=Database connection failed">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - }, - <<"auth_token">> => - #{ - <<"value">> => <<"bearer_xyz789">>, - <<"attributes">> => #{ <<"Path">> => <<"/api">> }, - <<"flags">> => [<<"HttpOnly">>,<<"Secure">>, <<"Secure">>] - } - } - }, - parse_csrf_and_quick_setting => - { - [ - [ - <<"csrf_token=abc123; \"HttpOnly\"; Path=/">>, - <<"quick_setting=\"enabled\"">> - ] - ], - #{ - <<"csrf_token">> => - #{ - <<"value">> => <<"abc123">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"HttpOnly">>] - }, - <<"quick_setting">> => <<"enabled">> - } - }, - parse_admin_and_upload => - { - [ - [ - <<"secret_key=confidential; Path=%2Fadmin">>, - <<"admin_flag=true; Path=/">> - ] - ], - #{ - <<"secret_key">> => - #{ - <<"value">> => <<"confidential">>, - <<"attributes">> => #{ <<"Path">> => <<"%2Fadmin">> } - }, - <<"admin_flag">> => - #{ - <<"value">> => <<"true">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - } - } - }, - parse_search_and_tags => - { - [ - [ - <<"search_history=\"query,results\"; Path=/">>, - <<"user_tags=\"work,personal\"; Path=/">> - ] - ], - #{ - <<"search_history">> => - #{ - <<"value">> => <<"query,results">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - }, - <<"user_tags">> => - #{ - <<"value">> => <<"work,personal">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - } - } - }, - to_string_realworld_1 => - { - [ - #{ - <<"cart">> => - #{ - <<"value">> => <<"110045_77895_53420">>, - <<"attributes">> => #{ <<"SameSite">> => <<"Strict">> } - }, - <<"affiliate">> => - #{ - <<"value">> => <<"e4rt45dw">>, - <<"attributes">> => #{ <<"SameSite">> => <<"Lax">> } - } - } - ], - [ - <<"affiliate=\"e4rt45dw\"; SameSite=Lax">>, - <<"cart=\"110045_77895_53420\"; SameSite=Strict">> - ] - }, - to_string_user_settings_and_permissions => - { - [ - #{ - <<"user_settings">> => - #{ - <<"value">> => <<"notifications=true,privacy=strict,layout=grid">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"HttpOnly">>, <<"Secure">>] - }, - <<"user_permissions">> => - #{ - <<"value">> => <<"read;write;delete">>, - <<"attributes">> => #{ <<"Path">> => <<"/">>, <<"SameSite">> => <<"None">> }, - <<"flags">> => [<<"Secure">>] - } - } - ], - [ - <<"user_permissions=\"read;write;delete\"; Path=/; SameSite=None; Secure">>, - <<"user_settings=\"notifications=true,privacy=strict,layout=grid\"; Path=/; HttpOnly; Secure">> - ] - }, - to_string_session_and_temp_data => - { - [ - #{ - <<"SESSION_ID">> => - #{ - <<"value">> => <<"abc123xyz ">>, - <<"attributes">> => #{ <<"path">> => <<"/dashboard">>, <<"samesite">> => <<"Strict">> }, - <<"flags">> => [<<"Secure">>] - }, - <<"temp_data">> => - #{ - <<"value">> => <<"cleanup_me">>, - <<"attributes">> => #{ <<"Max-Age">> => <<"-1">>, <<"Path">> => <<"/">> } - } - } - ], - [ - <<"SESSION_ID=\"abc123xyz \"; path=/dashboard; samesite=Strict; Secure">>, - <<"temp_data=\"cleanup_me\"; Max-Age=-1; Path=/">> - ] - }, - to_string_empty_and_anonymous => - { - [ - #{ - <<"user_preference">> => - #{ - <<"value">> => <<"">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"HttpOnly">>] - }, - <<>> => - #{ - <<"value">> => <<"anonymous_session_123">>, - <<"attributes">> => #{ <<"Path">> => <<"/guest">> } - } - } - ], - [ - <<"=\"anonymous_session_123\"; Path=/guest">>, - <<"user_preference=\"\"; Path=/; HttpOnly">> - ] - }, - to_string_app_config_and_analytics => - { - [ - #{ - <<"$app_config$">> => - #{ - <<"value">> => <<"theme@dark!%20mode">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - }, - <<"analytics_session_data_with_very_long_name_for_tracking_purposes">> => - #{ - <<"value">> => <<"comprehensive_user_behavior_analytics_data_including_page_views_click_events_scroll_depth_time_spent_geographic_location_device_info_browser_details_and_more">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - } - } - ], - [ - <<"$app_config$=\"theme@dark!%20mode\"; Path=/">>, - <<"analytics_session_data_with_very_long_name_for_tracking_purposes=\"comprehensive_user_behavior_analytics_data_including_page_views_click_events_scroll_depth_time_spent_geographic_location_device_info_browser_details_and_more\"; Path=/">> - ] - }, - to_string_debug_and_tracking => - { - [ - #{ - <<"debug_info">> => - #{ - <<"value">> => <<"\\tIndented\\t\\nMultiline\\n">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - }, - <<"tracking_id">> => - #{ - <<"value">> => <<"user_12345">>, - <<"attributes">> => #{ - <<"CustomAttr">> => <<"CustomValue">>, - <<"Analytics">> => <<"Enabled">>, - <<"Path">> => <<"/">> - }, - <<"flags">> => [<<"HttpOnly">>] - } - } - ], - [ - <<"debug_info=\"\\tIndented\\t\\nMultiline\\n\"; Path=/">>, - <<"tracking_id=\"user_12345\"; Analytics=Enabled; CustomAttr=CustomValue; Path=/; HttpOnly">> - ] - }, - to_string_cache_and_form_token => - { - [ - #{ - <<"cache_bust">> => - #{ - <<"value">> => <<"v1.2.3">>, - <<"attributes">> => #{ - <<"Expires">> => <<"Mon, 99 Feb 2099 25:99:99 GMT">>, - <<"Path">> => <<"/">> - } - }, - <<"form_token">> => - #{ - <<"value">> => <<"form_abc123">>, - <<"attributes">> => #{ <<"SameSite">> => <<"Strick">> }, - <<"flags">> => [<<"Secure">>] - } - } - ], - [ - <<"cache_bust=\"v1.2.3\"; Expires=Mon, 99 Feb 2099 25:99:99 GMT; Path=/">>, - <<"form_token=\"form_abc123\"; SameSite=Strick; Secure">> - ] - }, - to_string_token_and_reactions => - { - [ - #{ - <<"access_token">> => - #{ - <<"value">> => <<"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"HttpOnly">>, <<"Secure">>] - }, - <<"reaction_prefs">> => - #{ - <<"value">> => <<"👍👎">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"Secure">>] - } - } - ], - [ - <<"access_token=\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c\"; Path=/; HttpOnly; Secure">>, - <<"reaction_prefs=\"👍👎\"; Path=/; Secure">> - ] - }, - to_string_error_log_and_auth_token => - { - [ - #{ - <<"error_log">> => - #{ - <<"value">> => <<"timestamp=2024-01-15 10:30:00\\nlevel=ERROR\\tmessage=Database connection failed">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - }, - <<"auth_token">> => - #{ - <<"value">> => <<"bearer_xyz789">>, - <<"attributes">> => #{ <<"Path">> => <<"/api">> }, - <<"flags">> => [<<"HttpOnly">>, <<"Secure">>, <<"Secure">>] - } - } - ], - [ - <<"auth_token=\"bearer_xyz789\"; Path=/api; HttpOnly; Secure; Secure">>, - <<"error_log=\"timestamp=2024-01-15 10:30:00\\nlevel=ERROR\\tmessage=Database connection failed\"; Path=/">> - ] - }, - to_string_csrf_and_quick_setting => - { - [ - #{ - <<"csrf_token">> => - #{ - <<"value">> => <<"abc123">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> }, - <<"flags">> => [<<"HttpOnly">>] - }, - <<"quick_setting">> => <<"enabled">> - } - ], - [ - <<"csrf_token=\"abc123\"; Path=/; HttpOnly">>, - <<"quick_setting=\"enabled\"">> - ] - }, - to_string_admin_and_upload => - { - [ - #{ - <<"secret_key">> => - #{ - <<"value">> => <<"confidential">>, - <<"attributes">> => #{ <<"Path">> => <<"%2Fadmin">> } - }, - <<"admin_flag">> => - #{ - <<"value">> => <<"true">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - } - } - ], - [ - <<"admin_flag=\"true\"; Path=/">>, - <<"secret_key=\"confidential\"; Path=%2Fadmin">> - ] - }, - to_string_search_and_tags => - { - [ - #{ - <<"search_history">> => - #{ - <<"value">> => <<"query,results">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - }, - <<"user_tags">> => - #{ - <<"value">> => <<"work,personal">>, - <<"attributes">> => #{ <<"Path">> => <<"/">> } - } - } - ], - [ - <<"search_history=\"query,results\"; Path=/">>, - <<"user_tags=\"work,personal\"; Path=/">> - ] - } - }. -``` - -### from_string_basic_test - -```erlang -from_string_basic_test() -> - assert_set(from_string_raw_value, fun from_string/1). -``` - -### from_string_attributes_test - -```erlang -from_string_attributes_test() -> - assert_set(from_string_attributes, fun from_string/1). -``` - -### from_string_flags_test - -```erlang -from_string_flags_test() -> - assert_set(from_string_flags, fun from_string/1). -``` - -### to_string_basic_test - -```erlang -to_string_basic_test() -> - assert_set(to_string_raw_value, fun to_string/1). -``` - -### to_string_attributes_test - -```erlang -to_string_attributes_test() -> - assert_set(to_string_attributes, fun to_string/1). -``` - -### to_string_flags_test - -```erlang -to_string_flags_test() -> - assert_set(to_string_flags, fun to_string/1). -``` - -### parse_realworld_test - -```erlang -parse_realworld_test() -> - assert_set(parse_realworld_1, fun from_string/1). -``` - -### parse_user_settings_and_permissions_test - -```erlang -parse_user_settings_and_permissions_test() -> - assert_set(parse_user_settings_and_permissions, fun from_string/1). -``` - -### parse_session_and_temp_data_test - -```erlang -parse_session_and_temp_data_test() -> - assert_set(parse_session_and_temp_data, fun from_string/1). -``` - -### parse_empty_and_anonymous_test - -```erlang -parse_empty_and_anonymous_test() -> - assert_set(parse_empty_and_anonymous, fun from_string/1). -``` - -### parse_app_config_and_analytics_test - -```erlang -parse_app_config_and_analytics_test() -> - assert_set(parse_app_config_and_analytics, fun from_string/1). -``` - -### parse_debug_and_tracking_test - -```erlang -parse_debug_and_tracking_test() -> - assert_set(parse_debug_and_tracking, fun from_string/1). -``` - -### parse_cache_and_form_token_test - -```erlang -parse_cache_and_form_token_test() -> - assert_set(parse_cache_and_form_token, fun from_string/1). -``` - -### parse_token_and_reactions_test - -```erlang -parse_token_and_reactions_test() -> - assert_set(parse_token_and_reactions, fun from_string/1). -``` - -### parse_error_log_and_auth_token_test - -```erlang -parse_error_log_and_auth_token_test() -> - assert_set(parse_error_log_and_auth_token, fun from_string/1). -``` - -### parse_csrf_and_quick_setting_test - -```erlang -parse_csrf_and_quick_setting_test() -> - assert_set(parse_csrf_and_quick_setting, fun from_string/1). -``` - -### parse_admin_and_upload_test - -```erlang -parse_admin_and_upload_test() -> - assert_set(parse_admin_and_upload, fun from_string/1). -``` - -### parse_search_and_tags_test - -```erlang -parse_search_and_tags_test() -> - assert_set(parse_search_and_tags, fun from_string/1). -``` - -### to_string_realworld_1_test - -```erlang -to_string_realworld_1_test() -> - assert_set(to_string_realworld_1, fun to_string/1). -``` - -### to_string_user_settings_and_permissions_test - -```erlang -to_string_user_settings_and_permissions_test() -> - assert_set(to_string_user_settings_and_permissions, fun to_string/1). -``` - -### to_string_session_and_temp_data_test - -```erlang -to_string_session_and_temp_data_test() -> - assert_set(to_string_session_and_temp_data, fun to_string/1). -``` - -### to_string_empty_and_anonymous_test - -```erlang -to_string_empty_and_anonymous_test() -> - assert_set(to_string_empty_and_anonymous, fun to_string/1). -``` - -### to_string_app_config_and_analytics_test - -```erlang -to_string_app_config_and_analytics_test() -> - assert_set(to_string_app_config_and_analytics, fun to_string/1). -``` - -### to_string_debug_and_tracking_test - -```erlang -to_string_debug_and_tracking_test() -> - assert_set(to_string_debug_and_tracking, fun to_string/1). -``` - -### to_string_cache_and_form_token_test - -```erlang -to_string_cache_and_form_token_test() -> - assert_set(to_string_cache_and_form_token, fun to_string/1). -``` - -### to_string_token_and_reactions_test - -```erlang -to_string_token_and_reactions_test() -> - assert_set(to_string_token_and_reactions, fun to_string/1). -``` - -### to_string_error_log_and_auth_token_test - -```erlang -to_string_error_log_and_auth_token_test() -> - assert_set(to_string_error_log_and_auth_token, fun to_string/1). -``` - -### to_string_csrf_and_quick_setting_test - -```erlang -to_string_csrf_and_quick_setting_test() -> - assert_set(to_string_csrf_and_quick_setting, fun to_string/1). -``` - -### to_string_admin_and_upload_test - -```erlang -to_string_admin_and_upload_test() -> - assert_set(to_string_admin_and_upload, fun to_string/1). -``` - -### to_string_search_and_tags_test - -```erlang -to_string_search_and_tags_test() -> -``` - ---- - -*Generated from [dev_codec_cookie_test_vectors.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_cookie_test_vectors.erl)* diff --git a/docs/book/src/dev_codec_flat.erl.md b/docs/book/src/dev_codec_flat.erl.md deleted file mode 100644 index 4bb41a9a6..000000000 --- a/docs/book/src/dev_codec_flat.erl.md +++ /dev/null @@ -1,266 +0,0 @@ -# dev_codec_flat - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_flat.erl) - -A codec for turning TABMs into/from flat Erlang maps that have -(potentially multi-layer) paths as their keys, and a normal TABM binary as -their value. - ---- - -## Exported Functions - -- `commit/3` -- `deserialize/1` -- `from/3` -- `serialize/1` -- `serialize/2` -- `to/3` -- `verify/3` - ---- - -### commit - -A codec for turning TABMs into/from flat Erlang maps that have - -```erlang -commit(Msg, Req, Opts) -> dev_codec_httpsig:commit(Msg, Req, Opts). -``` - -### verify - -A codec for turning TABMs into/from flat Erlang maps that have -Convert a flat map to a TABM. - -```erlang -verify(Msg, Req, Opts) -> dev_codec_httpsig:verify(Msg, Req, Opts). -``` - -### from - -A codec for turning TABMs into/from flat Erlang maps that have -Convert a flat map to a TABM. - -```erlang -from(Bin, _, _Opts) when is_binary(Bin) -> {ok, Bin}; -``` - -### from - -A codec for turning TABMs into/from flat Erlang maps that have -Convert a flat map to a TABM. - -```erlang -from(Map, Req, Opts) when is_map(Map) -> - {ok, - maps:fold( - fun(Path, Value, Acc) -> - case Value of - [] -> - ?event(error, - {empty_list_value, - {path, Path}, - {value, Value}, - {map, Map} - } - ); - _ -> - ok - end, - hb_util:deep_set( - hb_path:term_to_path_parts(Path, Opts), - hb_util:ok(from(Value, Req, Opts)), - Acc, - Opts - ) - end, - #{}, - Map - ) - }. -``` - -### to - -Convert a TABM to a flat map. - -```erlang -to(Bin, _, _Opts) when is_binary(Bin) -> {ok, Bin}; -``` - -### to - -Convert a TABM to a flat map. - -```erlang -to(Map, Req, Opts) when is_map(Map) -> - Res = - maps:fold( - fun(Key, Value, Acc) -> - case to(Value, Req, Opts) of - {ok, SubMap} when is_map(SubMap) -> - maps:fold( - fun(SubKey, SubValue, InnerAcc) -> - maps:put( - hb_path:to_binary([Key, SubKey]), - SubValue, - InnerAcc - ) - end, - Acc, - SubMap - ); - {ok, SimpleValue} -> - maps:put(hb_path:to_binary([Key]), SimpleValue, Acc) - end - end, - #{}, - Map - ), - {ok, Res}. -``` - -### serialize - -```erlang -serialize(Map) when is_map(Map) -> - serialize(Map, #{}). -``` - -### serialize - -```erlang -serialize(Map, Opts) when is_map(Map) -> - Flattened = hb_message:convert(Map, <<"flat@1.0">>, #{}), - {ok, - iolist_to_binary(lists:foldl( - fun(Key, Acc) -> - [ - Acc, - hb_path:to_binary(Key), - <<": ">>, - hb_maps:get(Key, Flattened, Opts), <<"\n">> - ] - end, - <<>>, - hb_util:to_sorted_keys(Flattened, Opts) - ) - ) - }. -``` - -### deserialize - -```erlang -deserialize(Bin) when is_binary(Bin) -> - Flat = lists:foldl( - fun(Line, Acc) -> - case binary:split(Line, <<": ">>, [global]) of - [Key, Value] -> - Acc#{ Key => Value }; - _ -> - Acc - end - end, - #{}, - binary:split(Bin, <<"\n">>, [global]) - ), - {ok, hb_message:convert(Flat, <<"structured@1.0">>, <<"flat@1.0">>, #{})}. -%%% Tests -``` - -### simple_conversion_test - -```erlang -simple_conversion_test() -> - Flat = #{[<<"a">>] => <<"value">>}, - Nested = #{<<"a">> => <<"value">>}, - ?assert(hb_message:match(Nested, hb_util:ok(dev_codec_flat:from(Flat, #{}, #{})))), - ?assert(hb_message:match(Flat, hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})))). -``` - -### nested_conversion_test - -```erlang -nested_conversion_test() -> - Flat = #{<<"a/b">> => <<"value">>}, - Nested = #{<<"a">> => #{<<"b">> => <<"value">>}}, - Unflattened = hb_util:ok(dev_codec_flat:from(Flat, #{}, #{})), - Flattened = hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})), - ?assert(hb_message:match(Nested, Unflattened)), - ?assert(hb_message:match(Flat, Flattened)). -``` - -### multiple_paths_test - -```erlang -multiple_paths_test() -> - Flat = #{ - <<"x/y">> => <<"1">>, - <<"x/z">> => <<"2">>, - <<"a">> => <<"3">> - }, - Nested = #{ - <<"x">> => #{ - <<"y">> => <<"1">>, - <<"z">> => <<"2">> - }, - <<"a">> => <<"3">> - }, - ?assert(hb_message:match(Nested, hb_util:ok(dev_codec_flat:from(Flat, #{}, #{})))), - ?assert(hb_message:match(Flat, hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})))). -``` - -### path_list_test - -```erlang -path_list_test() -> - Nested = #{ - <<"x">> => #{ - [<<"y">>, <<"z">>] => #{ - <<"a">> => <<"2">> - }, - <<"a">> => <<"2">> - } - }, - Flat = hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})), - lists:foreach( - fun(Key) -> - ?assert(not lists:member($\n, binary_to_list(Key))) - end, - hb_maps:keys(Flat, #{}) - ). -``` - -### binary_passthrough_test - -```erlang -binary_passthrough_test() -> - Bin = <<"raw binary">>, - ?assertEqual(Bin, hb_util:ok(dev_codec_flat:from(Bin, #{}, #{}))), - ?assertEqual(Bin, hb_util:ok(dev_codec_flat:to(Bin, #{}, #{}))). -``` - -### deep_nesting_test - -```erlang -deep_nesting_test() -> - Flat = #{<<"a/b/c/d">> => <<"deep">>}, - Nested = #{<<"a">> => #{<<"b">> => #{<<"c">> => #{<<"d">> => <<"deep">>}}}}, - Unflattened = hb_util:ok(dev_codec_flat:from(Flat, #{}, #{})), - Flattened = hb_util:ok(dev_codec_flat:to(Nested, #{}, #{})), - ?assert(hb_message:match(Nested, Unflattened)), - ?assert(hb_message:match(Flat, Flattened)). -``` - -### empty_map_test - -```erlang -empty_map_test() -> - ?assertEqual(#{}, hb_util:ok(dev_codec_flat:from(#{}, #{}, #{}))), -``` - ---- - -*Generated from [dev_codec_flat.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_flat.erl)* diff --git a/docs/book/src/dev_codec_http_auth.erl.md b/docs/book/src/dev_codec_http_auth.erl.md deleted file mode 100644 index 50b677225..000000000 --- a/docs/book/src/dev_codec_http_auth.erl.md +++ /dev/null @@ -1,210 +0,0 @@ -# dev_codec_http_auth - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_http_auth.erl) - -Implements a two-step authentication process for HTTP requests, using -the `Basic` authentication scheme. This device is a viable implementation -of the `generator` interface type employed by `~auth-hook@1.0`, as well as -the `~message@1.0` commitment scheme interface. -`http-auth@1.0``s `commit` and `verify` keys proxy to the `~httpsig@1.0` -secret key HMAC commitment scheme, utilizing a secret key derived from the -user's authentication information. Callers may also utilize the `generate` -key directly to derive entropy from HTTP Authorization headers provided by -the user. If no Authorization header is provided, the `generate` key will -return a `401 Unauthorized` response, which triggers a recipient`s browser -to prompt the user for authentication details and resend the request. -The `generate` key derives secrets for it's users by calling PBKDF2 with -the user's authentication information. The parameters for the PBKDF2 -algorithm are configurable, and can be specified in the request message: -
-  salt:       The salt to use for the PBKDF2 algorithm. Defaults to
-              `sha256("constant:ao")`.
-  iterations: The number of iterations to use for the PBKDF2 algorithm.
-              Defaults to `1,200,000`.
-  alg:        The hashing algorithm to use with PBKDF2. Defaults to
-              `sha256`.
-  key-length: The length of the key to derive from PBKDF2. Defaults to
-              `64`.
-
-The default iteration count was chosen at two times the recommendation of -OWASP in 2023 (600,000), and executes at a run rate of ~5-10 key derivations -per second on modern CPU hardware. Additionally, the default salt was chosen -such that it is a public constant (needed in order for reproducibility -between nodes), and hashed in order to provide additional entropy, in -alignment with RFC 8018, Section 4.1. - ---- - -## Exported Functions - -- `commit/3` -- `generate/3` -- `verify/3` - ---- - -### commit - -Implements a two-step authentication process for HTTP requests, using -The default salt to use for the PBKDF2 algorithm. This value must be -Generate or extract a new secret and commit to the message with the - -```erlang -commit(Base, Req, Opts) -> - case generate(Base, Req, Opts) of - {ok, Key} -> - {ok, CommitRes} = - dev_codec_httpsig_proxy:commit( - <<"http-auth@1.0">>, - Key, - Base, - Req, - Opts - ), - ?event({commit_result, CommitRes}), - {ok, CommitRes}; - {error, Err} -> - {error, Err} - end. -``` - -### verify - -Verify a given `Base` message with a derived `Key` using the - -```erlang -verify(Base, RawReq, Opts) -> - ?event({verify_invoked, {base, Base}, {req, RawReq}}), - {ok, Key} = generate(Base, RawReq, Opts), - ?event({verify_found_key, {key, Key}, {base, Base}, {req, RawReq}}), - {ok, VerifyRes} = - dev_codec_httpsig_proxy:verify( - Key, - Base, - RawReq, - Opts - ), - ?event({verify_result, VerifyRes}), - {ok, VerifyRes}. -``` - -### generate - -Collect authentication information from the client. If the `raw` flag - -```erlang -generate(_Msg, ReqLink, Opts) when ?IS_LINK(ReqLink) -> - generate(_Msg, hb_cache:ensure_loaded(ReqLink, Opts), Opts); -``` - -### generate - -Collect authentication information from the client. If the `raw` flag - -```erlang -generate(_Msg, #{ <<"secret">> := Secret }, _Opts) -> - {ok, Secret}; -``` - -### generate - -Collect authentication information from the client. If the `raw` flag - -```erlang -generate(_Msg, Req, Opts) -> - case hb_maps:get(<<"authorization">>, Req, undefined, Opts) of - <<"Basic ", Auth/binary>> -> - Decoded = base64:decode(Auth), - ?event(key_gen, {generated_key, {auth, Auth}, {decoded, Decoded}}), - case hb_maps:get(<<"raw">>, Req, false, Opts) of - true -> {ok, Decoded}; - false -> derive_key(Decoded, Req, Opts) - end; - undefined -> - {error, - #{ - <<"status">> => 401, - <<"www-authenticate">> => <<"Basic">>, - <<"details">> => <<"No authorization header provided.">> - } - }; - Unrecognized -> - {error, - #{ - <<"status">> => 400, - <<"details">> => - <<"Unrecognized authorization header: ", Unrecognized/binary>> - } - } - end. -``` - -### derive_key - -Derive a key from the authentication information using the PBKDF2 - -```erlang -derive_key(Decoded, Req, Opts) -> - Alg = hb_util:atom(hb_maps:get(<<"alg">>, Req, <<"sha256">>, Opts)), - Salt = - hb_maps:get( - <<"salt">>, - Req, - hb_crypto:sha256(?DEFAULT_SALT), - Opts - ), - Iterations = hb_maps:get(<<"iterations">>, Req, 2 * 600_000, Opts), - KeyLength = hb_maps:get(<<"key-length">>, Req, 64, Opts), - ?event(key_gen, - {derive_key, - {alg, Alg}, - {salt, Salt}, - {iterations, Iterations}, - {key_length, KeyLength} - } - ), - case hb_crypto:pbkdf2(Alg, Decoded, Salt, Iterations, KeyLength) of - {ok, Key} -> - EncodedKey = hb_util:encode(Key), - {ok, EncodedKey}; - {error, Err} -> - ?event(key_gen, - {pbkdf2_error, - {alg, Alg}, - {salt, Salt}, - {iterations, Iterations}, - {key_length, KeyLength}, - {error, Err} - } - ), - {error, - #{ - <<"status">> => 500, - <<"details">> => <<"Failed to derive key.">> - } - } - end. -``` - -### benchmark_pbkdf2_test - -```erlang -benchmark_pbkdf2_test() -> - Key = crypto:strong_rand_bytes(32), - Iterations = 2 * 600_000, - KeyLength = 32, - Derivations = - hb_test_utils:benchmark( - fun() -> - hb_crypto:pbkdf2(sha256, Key, <<"salt">>, Iterations, KeyLength) - end - ), - hb_test_utils:benchmark_print( - <<"Derived">>, - <<"keys (1.2m iterations each)">>, - Derivations -``` - ---- - -*Generated from [dev_codec_http_auth.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_http_auth.erl)* diff --git a/docs/book/src/dev_codec_httpsig.erl.md b/docs/book/src/dev_codec_httpsig.erl.md deleted file mode 100644 index ae5c48ab9..000000000 --- a/docs/book/src/dev_codec_httpsig.erl.md +++ /dev/null @@ -1,447 +0,0 @@ -# dev_codec_httpsig - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig.erl) - -This module implements HTTP Message Signatures as described in RFC-9421 -(https://datatracker.ietf.org/doc/html/rfc9421), as an AO-Core device. -It implements the codec standard (from/1, to/1), as well as the optional -commitment functions (id/3, sign/3, verify/3). The commitment functions -are found in this module, while the codec functions are relayed to the -`dev_codec_httpsig_conv` module. - ---- - -## Exported Functions - -- `add_content_digest/2` -- `commit/3` -- `from/3` -- `normalize_for_encoding/3` -- `serialize/2` -- `serialize/3` -- `to/3` -- `verify/3` - ---- - -### to - -This module implements HTTP Message Signatures as described in RFC-9421 - -```erlang -to(Msg, Req, Opts) -> dev_codec_httpsig_conv:to(Msg, Req, Opts). -``` - -### from - -This module implements HTTP Message Signatures as described in RFC-9421 -Generate the `Opts` to use during AO-Core operations in the codec. - -```erlang -from(Msg, Req, Opts) -> dev_codec_httpsig_conv:from(Msg, Req, Opts). -``` - -### opts - -This module implements HTTP Message Signatures as described in RFC-9421 -Generate the `Opts` to use during AO-Core operations in the codec. - -```erlang -opts(RawOpts) -> - RawOpts#{ - hashpath => ignore, - cache_control => [<<"no-cache">>, <<"no-store">>], - force_message => false - }. -``` - -### serialize - -A helper utility for creating a direct encoding of a HTTPSig message. - -```erlang -serialize(Msg, Opts) -> serialize(Msg, #{}, Opts). -``` - -### serialize - -A helper utility for creating a direct encoding of a HTTPSig message. - -```erlang -serialize(Msg, #{ <<"format">> := <<"components">> }, Opts) -> - % Convert to HTTPSig via TABM through calling `hb_message:convert` rather - % than executing `to/3` directly. This ensures that our responses are - % normalized. -``` - -### serialize - -```erlang -serialize(Msg, _Req, Opts) -> - % We assume the default format of `binary` if none of the prior clauses - % match. -``` - -### verify - -```erlang -verify(Base, Req, RawOpts) -> - % A rsa-pss-sha512 commitment is verified by regenerating the signature - % base and validating against the signature. -``` - -### commit - -Commit to a message using the HTTP-Signature format. We use the `type` - -```erlang -commit(Msg, Req = #{ <<"type">> := <<"unsigned">> }, Opts) -> - commit(Msg, Req#{ <<"type">> => <<"hmac-sha256">> }, Opts); -``` - -### commit - -Commit to a message using the HTTP-Signature format. We use the `type` - -```erlang -commit(Msg, Req = #{ <<"type">> := <<"signed">> }, Opts) -> - commit(Msg, Req#{ <<"type">> => <<"rsa-pss-sha512">> }, Opts); -``` - -### commit - -Commit to a message using the HTTP-Signature format. We use the `type` - -```erlang -commit(MsgToSign, Req = #{ <<"type">> := <<"rsa-pss-sha512">> }, RawOpts) -> - ?event( - {generating_rsa_pss_sha512_commitment, {msg, MsgToSign}, {req, Req}} - ), - Opts = opts(RawOpts), - Wallet = hb_opts:get(priv_wallet, no_viable_wallet, Opts), - if Wallet =:= no_viable_wallet -> - throw({cannot_commit, no_viable_wallet, MsgToSign}); - true -> - ok - end, - % Utilize the hashpath, if present, as the tag for the commitment. -``` - -### commit - -```erlang -commit(BaseMsg, Req = #{ <<"type">> := <<"hmac-sha256">> }, RawOpts) -> - % Extract the key material from the request. -``` - -### maybe_bundle_tag_commitment - -Annotate the commitment with the `bundle` key if the request contains - -```erlang -maybe_bundle_tag_commitment(Commitment, Req, _Opts) -> - case hb_util:atom(maps:get(<<"bundle">>, Req, false)) of - true -> Commitment#{ <<"bundle">> => <<"true">> }; - false -> Commitment - end. -``` - -### keys_to_commit - -Derive the set of keys to commit to from a `commit` request and a - -```erlang -keys_to_commit(_Base, #{ <<"committed">> := Explicit}, _Opts) -> - % Case 1: Explicitly provided keys to commit. -``` - -### keys_to_commit - -```erlang -keys_to_commit(Base, _Req, Opts) -> - % Extract the set of committed keys from the message. -``` - -### add_content_digest - -If the `body` key is present and a binary, replace it with a - -```erlang -add_content_digest(Msg, _Opts) -> - case maps:get(<<"body">>, Msg, not_found) of - Body when is_binary(Body) -> - % Remove the body from the message and add the content-digest, - % encoded as a structured field. -``` - -### normalize_for_encoding - -Given a base message and a commitment, derive the message and commitment - -```erlang -normalize_for_encoding(Msg, Commitment, Opts) -> - % Extract the requested keys to include in the signature base. -``` - -### key_present - -Calculate if a key or its `+link` TABM variant is present in a message. -create the signature base that will be signed in order to create the - -```erlang -key_present(Key, Msg) -> - NormalizedKey = hb_ao:normalize_key(Key), - maps:is_key(NormalizedKey, Msg) - orelse maps:is_key(<>, Msg). -%% -``` - -### signature_base - -Calculate if a key or its `+link` TABM variant is present in a message. -create the signature base that will be signed in order to create the - -```erlang -signature_base(EncodedMsg, Commitment, Opts) -> - ComponentsLines = - signature_components_line( - EncodedMsg, - Commitment, - Opts - ), - ?event({component_identifiers_for_sig_base, ComponentsLines}), - ParamsLine = signature_params_line(Commitment, Opts), - SignatureBase = - << - ComponentsLines/binary, "\n", - "\"@signature-params\": ", ParamsLine/binary - >>, - ?event(signature_base, {signature_base, {string, SignatureBase}}), - SignatureBase. -``` - -### signature_components_line - -Given a list of Component Identifiers and a Request/Response Message -construct the "signature-params-line" part of the signature base. - -```erlang -signature_components_line(Req, Commitment, _Opts) -> - ComponentsLines = - lists:map( - fun(Name) -> - case maps:get(Name, Req, not_found) of - not_found -> - throw( - { - missing_key_for_signature_component_line, - Name, - {message, Req}, - {commitment, Commitment} - } - ); - Value -> - << <<"\"">>/binary, Name/binary, <<"\"">>/binary, <<": ">>/binary, Value/binary>> - end - end, - maps:get(<<"committed">>, Commitment) - ), - iolist_to_binary(lists:join(<<"\n">>, ComponentsLines)). -%% -``` - -### signature_params_line - -Given a list of Component Identifiers and a Request/Response Message -construct the "signature-params-line" part of the signature base. - -```erlang -signature_params_line(RawCommitment, Opts) -> - Commitment = - maps:without( - [<<"signature">>, <<"signature-input">>], - RawCommitment - ), - ?event(debug_enc, {signature_params_line, {commitment, Commitment}}), - hb_util:bin( - hb_structured_fields:list( - [ - { - list, - lists:map( - fun(Key) -> {item, {string, Key}, []} end, - dev_codec_httpsig_siginfo:add_derived_specifiers( - hb_util:message_to_ordered_list( - maps:get(<<"committed">>, Commitment), - Opts - ) - ) - ), - lists:map( - fun ({<<"alg">>, Param}) when is_binary(Param) -> - {<<"alg">>, {string, Param}}; - ({Name, Param}) when is_binary(Param) -> - {Name, {string, Param}}; - ({Name, Param}) when is_integer(Param) -> - {Name, Param} - end, - lists:sort(maps:to_list( - maps:with( - [ - <<"created">>, - <<"expires">>, - <<"nonce">>, - <<"alg">>, - <<"keyid">>, - <<"tag">>, - <<"bundle">> - ], - Commitment#{ <<"alg">> => maps:get(<<"type">>, Commitment) } - ) - )) - ) - } - ] - ) - ). -``` - -### validate_large_message_from_http_test - -Ensure that we can validate a signature on an extremely large and complex - -```erlang -validate_large_message_from_http_test() -> - Node = hb_http_server:start_node(Opts = #{ - force_signed => true, - commitment_device => <<"httpsig@1.0">>, - extra => - [ - [ - [ - #{ - <<"n">> => N, - <<"m">> => M, - <<"o">> => O - } - || - O <- lists:seq(1, 3) - ] - || - M <- lists:seq(1, 3) - ] - || - N <- lists:seq(1, 3) - ] - }), - {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), - Signers = hb_message:signers(Res, Opts), - ?event({received, {signers, Signers}, {res, Res}}), - ?assert(length(Signers) == 1), - ?assert(hb_message:verify(Res, Signers, Opts)), - ?event({sig_verifies, Signers}), - ?assert(hb_message:verify(Res, all, Opts)), - ?event({hmac_verifies, <<"hmac-sha256">>}), - {ok, OnlyCommitted} = hb_message:with_only_committed(Res, Opts), - ?event({msg_with_only_committed, OnlyCommitted}), - ?assert(hb_message:verify(OnlyCommitted, Signers, Opts)), - ?event({msg_with_only_committed_verifies, Signers}), - ?assert(hb_message:verify(OnlyCommitted, all, Opts)), - ?event({msg_with_only_committed_verifies_hmac, <<"hmac-sha256">>}). -``` - -### committed_id_test - -Ensure that we can validate a signature on an extremely large and complex - -```erlang -committed_id_test() -> - Msg = #{ <<"basic">> => <<"value">> }, - Signed = hb_message:commit(Msg, hb:wallet()), - ?assert(hb_message:verify(Signed, all, #{})), - ?event({signed_msg, Signed}), - UnsignedID = hb_message:id(Signed, none), - SignedID = hb_message:id(Signed, all), - ?event({ids, {unsigned_id, UnsignedID}, {signed_id, SignedID}}), - ?assertNotEqual(UnsignedID, SignedID). -``` - -### commit_secret_key_test - -```erlang -commit_secret_key_test() -> - Msg = #{ <<"basic">> => <<"value">> }, - CommittedMsg = - hb_message:commit( - Msg, - #{}, - #{ - <<"type">> => <<"hmac-sha256">>, - <<"secret">> => <<"test-secret">>, - <<"commitment-device">> => <<"httpsig@1.0">>, - <<"scheme">> => <<"secret">> - } - ), - ?event({committed_msg, CommittedMsg}), - Committers = hb_message:signers(CommittedMsg, #{}), - ?assert(length(Committers) == 1), - ?event({committers, Committers}), - ?assert( - hb_message:verify( - CommittedMsg, - #{ <<"committers">> => Committers, <<"secret">> => <<"test-secret">> }, - #{} - ) - ), - ?assertNot( - hb_message:verify( - CommittedMsg, - #{ <<"committers">> => Committers, <<"secret">> => <<"bad-secret">> }, - #{} - ) - ). -``` - -### multicommitted_id_test - -```erlang -multicommitted_id_test() -> - Msg = #{ <<"basic">> => <<"value">> }, - Signed1 = hb_message:commit(Msg, Wallet1 = ar_wallet:new()), - Signed2 = hb_message:commit(Signed1, Wallet2 = ar_wallet:new()), - Addr1 = hb_util:human_id(ar_wallet:to_address(Wallet1)), - Addr2 = hb_util:human_id(ar_wallet:to_address(Wallet2)), - ?event({signed_msg, Signed2}), - UnsignedID = hb_message:id(Signed2, none), - SignedID = hb_message:id(Signed2, all), - ?event({ids, {unsigned_id, UnsignedID}, {signed_id, SignedID}}), - ?assertNotEqual(UnsignedID, SignedID), - ?assert(hb_message:verify(Signed2, [])), - ?assert(hb_message:verify(Signed2, [Addr1])), - ?assert(hb_message:verify(Signed2, [Addr2])), - ?assert(hb_message:verify(Signed2, [Addr1, Addr2])), - ?assert(hb_message:verify(Signed2, [Addr2, Addr1])), - ?assert(hb_message:verify(Signed2, all)). -``` - -### sign_and_verify_link_test - -Test that we can sign and verify a message with a link. We use - -```erlang -sign_and_verify_link_test() -> - Msg = #{ - <<"normal">> => <<"typical-value">>, - <<"untyped">> => #{ <<"inner-untyped">> => <<"inner-value">> }, - <<"typed">> => #{ <<"inner-typed">> => 123 } - }, - NormMsg = hb_message:convert(Msg, <<"structured@1.0">>, #{}), - ?event({msg, NormMsg}), - Signed = hb_message:commit(NormMsg, hb:wallet()), - ?event({signed_msg, Signed}), - ?assert(hb_message:verify(Signed)). -``` - ---- - -*Generated from [dev_codec_httpsig.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig.erl)* diff --git a/docs/book/src/dev_codec_httpsig_conv.erl.md b/docs/book/src/dev_codec_httpsig_conv.erl.md deleted file mode 100644 index 26c4d2130..000000000 --- a/docs/book/src/dev_codec_httpsig_conv.erl.md +++ /dev/null @@ -1,564 +0,0 @@ -# dev_codec_httpsig_conv - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_conv.erl) - -A codec that marshals TABM encoded messages to and from the "HTTP" -message structure. -Every HTTP message is an HTTP multipart message. -See https://datatracker.ietf.org/doc/html/rfc7578 -For each TABM Key: -The Key/Value Pair will be encoded according to the following rules: - "signatures" -> {SignatureInput, Signature} header Tuples, each encoded - as a Structured Field Dictionary - "body" -> - - if a map, then recursively encode as its own HyperBEAM message - - otherwise encode as a normal field - _ -> encode as a normal field -Each field will be mapped to the HTTP Message according to the following -rules: - "body" -> always encoded part of the body as with Content-Disposition - type of "inline" - _ -> - - If the byte size of the value is less than the ?MAX_TAG_VALUE, - then encode as a header, also attempting to encode as a - structured field. - - Otherwise encode the value as a part in the multipart response - ---- - -## Exported Functions - -- `encode_http_msg/2` -- `from/3` -- `to/3` - ---- - -### from - -A codec that marshals TABM encoded messages to and from the "HTTP" -Convert a HTTP Message into a TABM. - -```erlang -from(Bin, _Req, _Opts) when is_binary(Bin) -> {ok, Bin}; -``` - -### from - -A codec that marshals TABM encoded messages to and from the "HTTP" -Convert a HTTP Message into a TABM. - -```erlang -from(Link, _Req, _Opts) when ?IS_LINK(Link) -> {ok, Link}; -``` - -### from - -A codec that marshals TABM encoded messages to and from the "HTTP" -Convert a HTTP Message into a TABM. - -```erlang -from(HTTP, _Req, Opts) -> - % First, parse all headers excluding the signature-related headers, as they - % are handled separately. -``` - -### body_to_tabm - -Generate the body TABM from the `body` key of the encoded message. - -```erlang -body_to_tabm(HTTP, Opts) -> - % Extract the body and content-type from the HTTP message. -``` - -### body_to_parts - -Split the body into parts, if it is a multipart. - -```erlang -body_to_parts(_ContentType, no_body, _Opts) -> no_body; -``` - -### body_to_parts - -Split the body into parts, if it is a multipart. - -```erlang -body_to_parts(ContentType, Body, _Opts) -> - ?event( - {from_body, - {content_type, {explicit, ContentType}}, - {body, Body} - } - ), - Params = - case ContentType of - undefined -> []; - _ -> - {item, {_, _XT}, XParams} = - hb_structured_fields:parse_item(ContentType), - XParams - end, - case lists:keyfind(<<"boundary">>, 1, Params) of - false -> - % The body is not a multipart, so just set as is to the Inlined key on - % the TABM. -``` - -### from_body_part - -Parse a single part of a multipart body into a TABM. - -```erlang -from_body_part(InlinedKey, Part, Opts) -> - % Extract the Headers block and Body. Only split on the FIRST double CRLF - {RawHeadersBlock, RawBody} = - case binary:split(Part, [?DOUBLE_CRLF], []) of - [XRawHeadersBlock] -> - % The message has no body. -``` - -### to - -Convert a TABM into an HTTP Message. The HTTP Message is a simple Erlang Map - -```erlang -to(TABM, Req, Opts) -> to(TABM, Req, [], Opts). -``` - -### to - -Convert a TABM into an HTTP Message. The HTTP Message is a simple Erlang Map - -```erlang -to(Bin, _Req, _FormatOpts, _Opts) when is_binary(Bin) -> {ok, Bin}; -``` - -### to - -Convert a TABM into an HTTP Message. The HTTP Message is a simple Erlang Map - -```erlang -to(Link, _Req, _FormatOpts, _Opts) when ?IS_LINK(Link) -> {ok, Link}; -``` - -### to - -Convert a TABM into an HTTP Message. The HTTP Message is a simple Erlang Map - -```erlang -to(TABM, Req = #{ <<"index">> := true }, _FormatOpts, Opts) -> - % If the caller has specified that an `index` page is requested, we: - % 1. Convert the message to HTTPSig as usual. -``` - -### to - -```erlang -to(TABM, Req, FormatOpts, Opts) when is_map(TABM) -> - % Ensure that the material for the message is loaded, if the request is - % asking for a bundle. -``` - -### do_to - -```erlang -do_to(Binary, _FormatOpts, _Opts) when is_binary(Binary) -> Binary; -``` - -### do_to - -```erlang -do_to(TABM, FormatOpts, Opts) when is_map(TABM) -> - InlineKey = - case lists:keyfind(inline, 1, FormatOpts) of - {inline, _InlineFieldHdrs, Key} -> Key; - _ -> not_set - end, - % Calculate the initial encoding from the TABM - Enc0 = - maps:fold( - fun(<<"body">>, Value, AccMap) -> - OldBody = maps:get(<<"body">>, AccMap, #{}), - AccMap#{ <<"body">> => OldBody#{ <<"body">> => Value } }; - (Key, Value, AccMap) when Key =:= InlineKey andalso InlineKey =/= not_set -> - OldBody = maps:get(<<"body">>, AccMap, #{}), - AccMap#{ <<"body">> => OldBody#{ InlineKey => Value } }; - (Key, Value, AccMap) -> - field_to_http(AccMap, {Key, Value}, #{}) - end, - % Add any inline field denotations to the HTTP message - case lists:keyfind(inline, 1, FormatOpts) of - {inline, InlineFieldHdrs, _InlineKey} -> InlineFieldHdrs; - _ -> #{} - end, - maps:without([<<"priv">>], TABM) - ), - ?event({prepared_body_map, {msg, Enc0}}), - BodyMap = maps:get(<<"body">>, Enc0, #{}), - GroupedBodyMap = group_maps(BodyMap, <<>>, #{}, Opts), - Enc1 = - case GroupedBodyMap of - EmptyBody when map_size(EmptyBody) =:= 0 -> - % If the body map is empty, then simply set the body to be a - % corresponding empty binary. -``` - -### group_ids - -Group all elements with: - -```erlang -group_ids(Map) -> - % Find all keys that are IDs. -``` - -### ungroup_ids - -Decode the `ao-ids` key into a map. - -```erlang -ungroup_ids(Msg = #{ <<"ao-ids">> := IDBin }, Opts) -> - % Extract the ID binary from the Map - EncodedIDsMap = hb_structured_fields:parse_dictionary(IDBin), - % Convert the value back into a raw binary - IDsMap = - lists:map( - fun({K, {item, {string, Bin}, _}}) -> {K, Bin} end, - EncodedIDsMap - ), - % Add the decoded IDs to the Map and remove the `ao-ids' key - hb_maps:merge(hb_maps:without([<<"ao-ids">>], Msg, Opts), hb_maps:from_list(IDsMap), Opts); -``` - -### ungroup_ids - -Decode the `ao-ids` key into a map. -Merge maps at the same level, if possible. - -```erlang -ungroup_ids(Msg, _Opts) -> Msg. -``` - -### group_maps - -Decode the `ao-ids` key into a map. -Merge maps at the same level, if possible. - -```erlang -group_maps(Map) -> - group_maps(Map, <<>>, #{}, #{}). -``` - -### group_maps - -```erlang -group_maps(Map, Parent, Top, Opts) when is_map(Map) -> - ?event({group_maps, {map, Map}, {parent, Parent}, {top, Top}}), - {Flattened, NewTop} = hb_maps:fold( - fun(Key, Value, {CurMap, CurTop}) -> - ?event({group_maps, {key, Key}, {value, Value}}), - NormKey = hb_ao:normalize_key(Key), - FlatK = - case Parent of - <<>> -> NormKey; - _ -> <> - end, - case Value of - _ when is_map(Value) orelse is_list(Value) -> - NormMsg = - if is_list(Value) -> - hb_message:convert( - Value, - tabm, - <<"structured@1.0">>, - Opts - ); - true -> - Value - end, - case hb_maps:size(NormMsg, Opts) of - 0 -> - { - CurMap, - hb_maps:put( - FlatK, - #{ <<"ao-types">> => <<"empty-message">> }, - CurTop, - Opts - ) - }; - _ -> - NewTop = group_maps(NormMsg, FlatK, CurTop, Opts), - {CurMap, NewTop} - end; - _ -> - ?event({group_maps, {norm_key, NormKey}, {value, Value}}), - case byte_size(Value) > ?MAX_HEADER_LENGTH of - % the value is too large to be encoded as a header - % within a part, so instead lift it to be a top level - % part - true -> - NewTop = hb_maps:put(FlatK, Value, CurTop, Opts), - {CurMap, NewTop}; - % Encode the value in the current part - false -> - NewCurMap = hb_maps:put(NormKey, Value, CurMap, Opts), - {NewCurMap, CurTop} - end - end - end, - {#{}, Top}, - Map, - Opts - ), - case hb_maps:size(Flattened, Opts) of - 0 -> NewTop; - _ -> case Parent of - <<>> -> hb_maps:merge(NewTop, Flattened, Opts); - _ -> - Res = NewTop#{ Parent => Flattened }, - ?event({returning_res, {res, Res}}), - Res - end - end. -``` - -### boundary_from_parts - -Generate a unique, reproducible boundary for the - -```erlang -boundary_from_parts(PartList) -> - BodyBin = - iolist_to_binary( - lists:join(?CRLF, - lists:map( - fun ({_PartName, PartBin}) -> PartBin end, - PartList - ) - ) - ), - RawBoundary = crypto:hash(sha256, BodyBin), - hb_util:encode(RawBoundary). -``` - -### encode_body_part - -Encode a multipart body part to a flat binary. - -```erlang -encode_body_part(PartName, BodyPart, InlineKey, Opts) -> - % We'll need to prepend a Content-Disposition header - % to the part, using the field name as the form part - % name. -``` - -### inline_key - -given a message, returns a binary tuple: - -```erlang -inline_key(Msg) -> - inline_key(Msg, #{}). -``` - -### inline_key - -```erlang -inline_key(Msg, Opts) -> - % The message can name a key whose value will be placed in the body as the - % inline part. Otherwise, the Msg <<"body">> is used. If not present, the - % Msg <<"data">> is used. - InlineBodyKey = hb_maps:get(<<"ao-body-key">>, Msg, false, Opts), - ?event({inlined, InlineBodyKey}), - case { - InlineBodyKey, - hb_maps:is_key(<<"body">>, Msg, Opts) - andalso not ?IS_LINK(maps:get(<<"body">>, Msg, Opts)), - hb_maps:is_key(<<"data">>, Msg, Opts) - andalso not ?IS_LINK(maps:get(<<"data">>, Msg, Opts)) - } of - % ao-body-key already exists, so no need to add one - {Explicit, _, _} when Explicit =/= false -> {#{}, InlineBodyKey}; - % ao-body-key defaults to <<"body">> (see below) - % So no need to add one - {_, true, _} -> {#{}, <<"body">>}; - % We need to preserve the ao-body-key, as the <<"data">> field, - % so that it is preserved during encoding and decoding - {_, _, true} -> {#{<<"ao-body-key">> => <<"data">>}, <<"data">>}; - % default to body being the inlined part. -``` - -### encode_http_msg - -Encode a HTTP message into a binary, converting it to `httpsig@1.0` - -```erlang -encode_http_msg(Msg, Opts) -> - % Convert the message to a HTTP-Sig encoded output. -``` - -### encode_http_flat_msg - -Encode a HTTP message into a binary. The input *must* be a raw map of - -```erlang -encode_http_flat_msg(Httpsig, Opts) -> - % Serialize the headers, to be included in the part of the multipart response - HeaderList = - lists:foldl( - fun ({HeaderName, RawHeaderVal}, Acc) -> - HVal = hb_cache:ensure_loaded(RawHeaderVal, Opts), - ?event({encoding_http_header, {header, HeaderName}, {value, HVal}}), - [<> | Acc] - end, - [], - hb_maps:to_list(hb_maps:without([<<"body">>, <<"priv">>], Httpsig, Opts), Opts) - ), - EncodedHeaders = iolist_to_binary(lists:join(?CRLF, lists:reverse(HeaderList))), - case hb_maps:get(<<"body">>, Httpsig, <<>>, Opts) of - <<>> -> EncodedHeaders; - % Some-Headers: some-value - % content-type: image/png - % - % - SubBody -> <> - end. -``` - -### field_to_http - -All maps are encoded into the body of the HTTP message - -```erlang -field_to_http(Httpsig, {Name, Value}, Opts) when is_map(Value) -> - NormalizedName = hb_ao:normalize_key(Name), - OldBody = hb_maps:get(<<"body">>, Httpsig, #{}, Opts), - Httpsig#{ <<"body">> => OldBody#{ NormalizedName => Value } }; -``` - -### field_to_http - -All maps are encoded into the body of the HTTP message - -```erlang -field_to_http(Httpsig, {Name, Value}, Opts) when is_binary(Value) -> - NormalizedName = hb_ao:normalize_key(Name), - % The default location where the value is encoded within the HTTP - % message depends on its size. -``` - -### group_maps_test - -```erlang -group_maps_test() -> - Map = #{ - <<"a">> => <<"1">>, - <<"b">> => #{ - <<"x">> => <<"10">>, - <<"y">> => #{ - <<"z">> => <<"20">> - }, - <<"foo">> => #{ - <<"bar">> => #{ - <<"fizz">> => <<"buzz">> - } - } - }, - <<"c">> => #{ - <<"d">> => <<"30">> - }, - <<"e">> => <<"2">>, - <<"buf">> => <<"hello">>, - <<"nested">> => #{ - <<"foo">> => <<"iiiiii">>, - <<"here">> => #{ - <<"bar">> => <<"baz">>, - <<"fizz">> => <<"buzz">>, - <<"pop">> => #{ - <<"very-fizzy">> => <<"very-buzzy">> - } - } - } - }, - Lifted = group_maps(Map), - ?assertEqual( - Lifted, - #{ - <<"a">> => <<"1">>, - <<"b">> => #{<<"x">> => <<"10">>}, - <<"b/foo/bar">> => #{<<"fizz">> => <<"buzz">>}, - <<"b/y">> => #{<<"z">> => <<"20">>}, - <<"buf">> => <<"hello">>, - <<"c">> => #{<<"d">> => <<"30">>}, - <<"e">> => <<"2">>, - <<"nested">> => #{<<"foo">> => <<"iiiiii">>}, - <<"nested/here">> => #{<<"bar">> => <<"baz">>, <<"fizz">> => <<"buzz">>}, - <<"nested/here/pop">> => #{<<"very-fizzy">> => <<"very-buzzy">>} - } - ), - ok. -``` - -### group_maps_flat_compatible_test - -The grouped maps encoding is a subset of the flat encoding, - -```erlang -group_maps_flat_compatible_test() -> - Map = #{ - <<"a">> => <<"1">>, - <<"b">> => #{ - <<"x">> => <<"10">>, - <<"y">> => #{ - <<"z">> => <<"20">> - }, - <<"foo">> => #{ - <<"bar">> => #{ - <<"fizz">> => <<"buzz">> - } - } - }, - <<"c">> => #{ - <<"d">> => <<"30">> - }, - <<"e">> => <<"2">>, - <<"buf">> => <<"hello">>, - <<"nested">> => #{ - <<"foo">> => <<"iiiiii">>, - <<"here">> => #{ - <<"bar">> => <<"baz">>, - <<"fizz">> => <<"buzz">> - } - } - }, - Lifted = group_maps(Map), - ?assertEqual(dev_codec_flat:from(Lifted, #{}, #{}), {ok, Map}), - ok. -``` - -### encode_message_with_links_test - -```erlang -encode_message_with_links_test() -> - Msg = #{ - <<"immediate-key">> => <<"immediate-value">>, - <<"typed-key">> => 4 - }, - {ok, Path} = hb_cache:write(Msg, #{}), - {ok, Read} = hb_cache:read(Path, #{}), - % Ensure that the message now has a lazy link - ?assertMatch({link, _, _}, maps:get(<<"typed-key">>, Read, #{})), - % Encode and decode the message as `httpsig@1.0` - Enc = hb_message:convert(Msg, <<"httpsig@1.0">>, #{}), - ?event({encoded, Enc}), - Dec = hb_message:convert(Enc, <<"structured@1.0">>, <<"httpsig@1.0">>, #{}), - % Ensure that the result is the same as the original message - ?event({decoded, Dec}), -``` - ---- - -*Generated from [dev_codec_httpsig_conv.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_conv.erl)* diff --git a/docs/book/src/dev_codec_httpsig_keyid.erl.md b/docs/book/src/dev_codec_httpsig_keyid.erl.md deleted file mode 100644 index d65cfe36a..000000000 --- a/docs/book/src/dev_codec_httpsig_keyid.erl.md +++ /dev/null @@ -1,195 +0,0 @@ -# dev_codec_httpsig_keyid - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_keyid.erl) - -A library for extracting and validating key material for `httpsig@1.0` -requests. Offers support for the following keyid schemes: -- `publickey`: The keyid is an encoded public key with the `publickey:` prefix. -- `constant`: The key is simply the keyid itself, including the `public:` - prefix if given. -- `secret`: The key is hashed and the `secret:` prefix is added to the - result in order to generate a keyid. -These functions are abstracted in order to allow for the addition of new -schemes in the future. - ---- - -## Exported Functions - -- `keyid_to_committer/1` -- `keyid_to_committer/2` -- `remove_scheme_prefix/1` -- `req_to_key_material/2` -- `secret_key_to_committer/1` - ---- - -### req_to_key_material - -A library for extracting and validating key material for `httpsig@1.0` -Extract the key and keyid from a request, returning - -```erlang -req_to_key_material(Req, Opts) -> - ?event({req_to_key_material, {req, Req}}), - KeyID = maps:get(<<"keyid">>, Req, undefined), - ?event({keyid_to_key_material, {keyid, KeyID}}), - case find_scheme(KeyID, Req, Opts) of - {ok, Scheme} -> - ?event({scheme_found, {scheme, Scheme}}), - ApplyRes = apply_scheme(Scheme, KeyID, Req), - ?event({apply_scheme_result, {apply_res, ApplyRes}}), - case ApplyRes of - {ok, _, CalcKeyID} when KeyID /= undefined, CalcKeyID /= KeyID -> - {error, key_mismatch}; - {ok, Key, CalcKeyID} -> - {ok, Scheme, Key, CalcKeyID}; - {error, Reason} -> - {error, Reason} - end; - {error, undefined_scheme} -> - {ok, DefaultScheme} = req_to_default_scheme(Req, Opts), - req_to_key_material(Req#{ <<"scheme">> => DefaultScheme }, Opts); - {error, Reason} -> - {error, Reason} - end. -``` - -### find_scheme - -Find the scheme from a keyid or request. Returns `{ok, Scheme}` or - -```erlang -find_scheme(KeyID, Req = #{ <<"scheme">> := RawScheme }, Opts) -> - Scheme = hb_util:atom(RawScheme), - % Validate that the scheme in the request matches the scheme in the keyid. -``` - -### find_scheme - -```erlang -find_scheme(undefined, _Req, _Opts) -> - {error, undefined_scheme}; -``` - -### find_scheme - -```erlang -find_scheme(KeyID, Req, Opts) -> - SchemeRes = - case binary:split(KeyID, <<":">>) of - [SchemeBin, _KeyID] -> {ok, SchemeBin}; - [_NoSchemeKeyID] -> - % Determine the default scheme based on the `type' of the request. -``` - -### req_to_default_scheme - -Determine the default scheme based on the `type` of the request. - -```erlang -req_to_default_scheme(Req, _Opts) -> - case maps:find(<<"type">>, Req) of - {ok, Type} -> - case maps:find(Type, ?DEFAULT_SCHEMES_BY_TYPE) of - {ok, Scheme} -> {ok, Scheme}; - error -> {error, unsupported_scheme} - end; - error -> - {error, no_request_type} - end. -``` - -### apply_scheme - -Apply the requested scheme to generate the key material (key and keyid). - -```erlang -apply_scheme(publickey, KeyID, _Req) -> - % Remove the `publickey:' prefix from the keyid and return the key. -``` - -### apply_scheme - -```erlang -apply_scheme(constant, RawKeyID, _Req) -> - % In the `constant' scheme, the key is simply the key itself, including the - % `constant:' prefix if given. -``` - -### apply_scheme - -```erlang -apply_scheme(secret, _KeyID, Req) -> - % In the `secret' scheme, the key is hashed to generate a keyid. -``` - -### apply_scheme - -```erlang -apply_scheme(_Scheme, _Key, _KeyID) -> - {error, unsupported_scheme}. -``` - -### keyid_to_committer - -Given a keyid and a scheme, generate the committer value for a commitment. - -```erlang -keyid_to_committer(KeyID) -> - case find_scheme(KeyID, #{}, #{}) of - {ok, Scheme} -> keyid_to_committer(Scheme, KeyID); - {error, _} -> undefined - end. -``` - -### keyid_to_committer - -```erlang -keyid_to_committer(publickey, KeyID) -> - % Note: There is a subtlety here. The `KeyID' is decoded with the - % `hb_util:decode' function rather than `base64:decode'. The reason for this - % is that certain codecs (e.g. `ans104@1.0') encode the public key with - % `base64url' encoding, rather than the standard `base64' encoding in - % HTTPSig. Our `hb_util:decode' function handles both cases returning the - % same raw bytes, and is subsequently safe. -``` - -### keyid_to_committer - -```erlang -keyid_to_committer(secret, KeyID) -> - remove_scheme_prefix(KeyID); -``` - -### keyid_to_committer - -```erlang -keyid_to_committer(constant, _KeyID) -> - undefined. -``` - -### secret_key_to_committer - -Given a secret key, generate the committer value for a commitment. - -```erlang -secret_key_to_committer(Key) -> - hb_util:human_id(hb_crypto:sha256(Key)). -``` - -### remove_scheme_prefix - -Remove the `scheme:` prefix from a keyid. - -```erlang -remove_scheme_prefix(KeyID) -> - case binary:split(KeyID, <<":">>) of - [_Scheme, Key] -> Key; - [Key] -> Key - end. -``` - ---- - -*Generated from [dev_codec_httpsig_keyid.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_keyid.erl)* diff --git a/docs/book/src/dev_codec_httpsig_proxy.erl.md b/docs/book/src/dev_codec_httpsig_proxy.erl.md deleted file mode 100644 index 9e32c0c99..000000000 --- a/docs/book/src/dev_codec_httpsig_proxy.erl.md +++ /dev/null @@ -1,54 +0,0 @@ -# dev_codec_httpsig_proxy - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_proxy.erl) - -A utility module that contains proxy functions for calling the -`~httpsig@1.0` codec's HMAC commitment functions with secret keys. -These tools are helpful for implementing a standardized pattern: -1. A device verifies a user's request/derives a secret key for them. -2. The device then wants to commit a message with the user's secret key - using the `secret:[h(secret)]` commitment scheme. -3. The commitment must then be modified to reference a different device - as the `commitment-device` key. -4. When `/verify` is called, the `~httpsig@1.0` codec is used under-the-hood - to validate the commitment on the re-derived secret key. -This module is currently used by the `~cookie@1.0` and `~http-auth@1.0` -devices. - ---- - -## Exported Functions - -- `commit/5` -- `verify/4` - ---- - -### commit - -A utility module that contains proxy functions for calling the -Commit to a given `Base` message with a given `Secret`, setting the - -```erlang -commit(Device, Secret, Base, Req, Opts) -> - % If there are no existing commitments, we use the unmodified base message. -``` - -### verify - -Verify a given `Base` message with a given `Secret` using the `~httpsig@1.0` - -```erlang -verify(Secret, Base, RawReq, Opts) -> - ProxyRequest = - RawReq#{ - <<"commitment-device">> => <<"httpsig@1.0">>, - <<"path">> => <<"verify">>, - <<"secret">> => Secret - }, - ?event({proxy_request, ProxyRequest}), -``` - ---- - -*Generated from [dev_codec_httpsig_proxy.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_proxy.erl)* diff --git a/docs/book/src/dev_codec_httpsig_siginfo.erl.md b/docs/book/src/dev_codec_httpsig_siginfo.erl.md deleted file mode 100644 index b899d9e03..000000000 --- a/docs/book/src/dev_codec_httpsig_siginfo.erl.md +++ /dev/null @@ -1,387 +0,0 @@ -# dev_codec_httpsig_siginfo - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_siginfo.erl) - -A list of components that are `derived` in the context of RFC-9421 from the -request message. - ---- - -## Exported Functions - -- `add_derived_specifiers/1` -- `commitment_to_sig_name/1` -- `commitments_to_siginfo/3` -- `committed_keys_to_siginfo/1` -- `from_siginfo_keys/3` -- `remove_derived_specifiers/1` -- `siginfo_to_commitments/3` -- `to_siginfo_keys/3` - ---- - -### commitments_to_siginfo - -A module for converting between commitments and their encoded `signature` -Generate a `signature` and `signature-input` key pair from a commitment - -```erlang -commitments_to_siginfo(_Msg, Comms, _Opts) when ?IS_EMPTY_MESSAGE(Comms) -> - #{}; -``` - -### commitments_to_siginfo - -A module for converting between commitments and their encoded `signature` -Generate a `signature` and `signature-input` key pair from a commitment - -```erlang -commitments_to_siginfo(Msg, Comms, Opts) -> - % Generate a SF item for each commitment's signature and signature-input. -``` - -### commitment_to_sf_siginfo - -Generate a `signature` and `signature-input` key pair from a given - -```erlang -commitment_to_sf_siginfo(Msg, Commitment, Opts) -> - % Generate the `alg' key from the commitment. -``` - -### get_additional_params - -```erlang -get_additional_params(Commitment) -> - AdditionalParams = - sets:to_list( - sets:subtract( - sets:from_list(maps:keys(Commitment)), - sets:from_list( - [ - <<"alg">>, - <<"keyid">>, - <<"tag">>, - <<"created">>, - <<"expires">>, - <<"nonce">>, - <<"committed">>, - <<"signature">>, - <<"type">>, - <<"commitment-device">>, - <<"committer">> - ] - ) - ) - ), - lists:map(fun(Param) -> - ParamValue = maps:get(Param, Commitment), - case ParamValue of - Val when is_atom(Val) -> - {Param, {string, atom_to_binary(Val, utf8)}}; - Val when is_binary(Val) -> - {Param, {string, Val}}; - Val when is_list(Val) -> - {Param, {string, list_to_binary(lists:join(<<", ">>, Val))}}; - Val when is_map(Val) -> - Map = nested_map_to_string(Val), - {Param, {string, list_to_binary(lists:join(<<", ">>, Map))} } - end - end, AdditionalParams). -``` - -### nested_map_to_string - -```erlang -nested_map_to_string(Map) -> - lists:map(fun(I) -> - case maps:get(I, Map) of - Val when is_map(Val) -> - Name = maps:get(<<"name">>, Val), - Value = hb_util:encode(maps:get(<<"value">>, Val)), - <>; - Val -> - Val - end - end, maps:keys(Map)). -``` - -### siginfo_to_commitments - -Take a message with a `signature` and `signature-input` key pair and - -```erlang -siginfo_to_commitments( - Msg = - #{ - <<"signature">> := <<"comm-", SFSigBin/binary>>, - <<"signature-input">> := <<"comm-", SFSigInputBin/binary>> - }, - BodyKeys, - Opts) -> - % Parse the signature and signature-input structured-fields. -``` - -### siginfo_to_commitments - -```erlang -siginfo_to_commitments(_Msg, _BodyKeys, _Opts) -> - % If the message does not contain a `signature' or `signature-input' key, - % we return an empty map. -``` - -### sf_siginfo_to_commitment - -Take a signature and signature-input as parsed structured-fields and - -```erlang -sf_siginfo_to_commitment(Msg, BodyKeys, SFSig, SFSigInput, Opts) -> - % Extract the signature and signature-input from the structured-fields. -``` - -### decoding_nested_map_binary - -```erlang -decoding_nested_map_binary(Bin) -> - MapBinary = - lists:foldl( - fun (X, Acc) -> - case binary:split(X, <<":">>, [global]) of - [ID, Key, Value] -> - Acc#{ - ID => #{ - <<"name">> => Key, - <<"value">> => hb_util:decode(Value) - } - }; - _ -> - X - end - end, - #{}, - binary:split(Bin, <<", ">>, [global]) - ), - case MapBinary of - Res when is_map(Res) -> - Res; - Res -> - Res - end. -``` - -### to_siginfo_keys - -Normalize a list of AO-Core keys to their equivalents in `httpsig@1.0` -Normalize a list of `httpsig@1.0` keys to their equivalents in AO-Core - -```erlang -to_siginfo_keys(Msg, Commitment, Opts) -> - {ok, _EncMsg, EncComm, _} = - dev_codec_httpsig:normalize_for_encoding(Msg, Commitment, Opts), - maps:get(<<"committed">>, EncComm). -``` - -### from_siginfo_keys - -Normalize a list of AO-Core keys to their equivalents in `httpsig@1.0` -Normalize a list of `httpsig@1.0` keys to their equivalents in AO-Core - -```erlang -from_siginfo_keys(HTTPEncMsg, BodyKeys, SigInfoCommitted) -> - % 1. Remove specifiers from the list. -``` - -### committed_keys_to_siginfo - -Convert committed keys to their siginfo format. This involves removing - -```erlang -committed_keys_to_siginfo(Msg) when is_map(Msg) -> - committed_keys_to_siginfo(hb_util:message_to_ordered_list(Msg)); -``` - -### committed_keys_to_siginfo - -Convert committed keys to their siginfo format. This involves removing - -```erlang -committed_keys_to_siginfo([]) -> []; -``` - -### committed_keys_to_siginfo - -Convert committed keys to their siginfo format. This involves removing - -```erlang -committed_keys_to_siginfo([<<"body">> | Rest]) -> - [<<"content-digest">> | Rest]; -``` - -### committed_keys_to_siginfo - -Convert committed keys to their siginfo format. This involves removing - -```erlang -committed_keys_to_siginfo([Key | Rest]) -> - [Key | committed_keys_to_siginfo(Rest)]. -``` - -### commitment_to_device_specifiers - -Convert an `alg` to a commitment device. If the `alg` has the form of - -```erlang -commitment_to_device_specifiers(Commitment, Opts) when is_map(Commitment) -> - commitment_to_device_specifiers(maps:get(<<"alg">>, Commitment), Opts); -``` - -### commitment_to_device_specifiers - -Convert an `alg` to a commitment device. If the `alg` has the form of - -```erlang -commitment_to_device_specifiers(Alg, _Opts) -> - case binary:split(Alg, <<"@">>) of - [Type] -> - % The `alg' is not a device specifier, so we assume that it is a - % type of the `httpsig@1.0' algorithm. -``` - -### commitment_to_alg - -Calculate an `alg` string from a commitment message, using its - -```erlang -commitment_to_alg(#{ <<"commitment-device">> := <<"httpsig@1.0">>, <<"type">> := Type }, _Opts) -> - Type; -``` - -### commitment_to_alg - -Calculate an `alg` string from a commitment message, using its - -```erlang -commitment_to_alg(Commitment, _Opts) -> - Type = - case maps:get(<<"type">>, Commitment, undefined) of - undefined -> <<>>; - TypeSpecifier -> <<"/", TypeSpecifier/binary>> - end, - CommitmentDevice = maps:get(<<"commitment-device">>, Commitment), - <>. -``` - -### commitment_to_sig_name - -Generate a signature name from a commitment. The commitment message is -Normalize key parameters to ensure their names are correct for inclusion - -```erlang -commitment_to_sig_name(Commitment) -> - BaseStr = - case maps:get(<<"committer">>, Commitment, undefined) of - undefined -> maps:get(<<"keyid">>, Commitment); - Committer -> - << - (hb_util:to_hex(binary:part(hb_util:native_id(Committer), 1, 8))) - /binary - >> - end, - DeviceStr = - binary:replace( - maps:get( - <<"commitment-device">>, - Commitment - ), - <<"@">>, - <<"-">> - ), - <>. -``` - -### add_derived_specifiers - -Generate a signature name from a commitment. The commitment message is -Normalize key parameters to ensure their names are correct for inclusion - -```erlang -add_derived_specifiers(ComponentIdentifiers) -> - % Remove the @ prefix from the component identifiers, if present. -``` - -### remove_derived_specifiers - -Remove derived specifiers from a list of component identifiers. - -```erlang -remove_derived_specifiers(ComponentIdentifiers) -> - lists:map( - fun(<<"@", Key/binary>>) -> - Key; - (Key) -> - Key - end, - ComponentIdentifiers - ). -``` - -### parse_alg_test - -```erlang -parse_alg_test() -> - ?assertEqual( - commitment_to_device_specifiers(#{ <<"alg">> => <<"rsa-pss-sha512">> }, #{}), - #{ - <<"commitment-device">> => <<"httpsig@1.0">>, - <<"type">> => <<"rsa-pss-sha512">> - } - ), - ?assertEqual( - commitment_to_device_specifiers( - #{ <<"alg">> => <<"ans104@1.0/rsa-pss-sha256">> }, - #{}), - #{ - <<"commitment-device">> => <<"ans104@1.0">>, - <<"type">> => <<"rsa-pss-sha256">> - } - ). -``` - -### escaped_value_test - -Test that tag values with special characters are correctly encoded and - -```erlang -escaped_value_test() -> - KeyID = crypto:strong_rand_bytes(32), - Committer = hb_util:human_id(ar_wallet:to_address(KeyID)), - Signature = crypto:strong_rand_bytes(512), - ID = hb_util:human_id(crypto:hash(sha256, Signature)), - Commitment = #{ - <<"committed">> => #{}, - <<"committer">> => Committer, - <<"commitment-device">> => <<"tx@1.0">>, - <<"keyid">> => <<"publickey:", (hb_util:encode(KeyID))/binary>>, - <<"original-tags">> => #{ - <<"1">> => #{ - <<"name">> => <<"Key">>, - <<"value">> => <<"value">> - }, - <<"2">> => #{ - <<"name">> => <<"Quotes">>, - <<"value">> => <<"{\"function\":\"mint\"}">> - } - }, - <<"signature">> => hb_util:encode(Signature), - <<"type">> => <<"rsa-pss-sha256">> - }, - SigInfo = commitments_to_siginfo(#{}, #{ ID => Commitment }, #{}), - Commitments = siginfo_to_commitments(SigInfo, #{}, #{}), - ?event(debug_test, {siginfo, {explicit, SigInfo}}), - ?event(debug_test, {commitments, {explicit, Commitments}}), - ?assertEqual(#{ ID => Commitment }, Commitments). -``` - ---- - -*Generated from [dev_codec_httpsig_siginfo.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_httpsig_siginfo.erl)* diff --git a/docs/book/src/dev_codec_json.erl.md b/docs/book/src/dev_codec_json.erl.md deleted file mode 100644 index 56bfbac9c..000000000 --- a/docs/book/src/dev_codec_json.erl.md +++ /dev/null @@ -1,170 +0,0 @@ -# dev_codec_json - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_json.erl) - -A simple JSON codec for HyperBEAM's message format. Takes a -message as TABM and returns an encoded JSON string representation. -This codec utilizes the httpsig@1.0 codec for signing and verifying. - ---- - -## Exported Functions - -- `commit/3` -- `committed/3` -- `content_type/1` -- `deserialize/3` -- `from/3` -- `serialize/3` -- `to/3` -- `verify/3` - ---- - -### content_type - -A simple JSON codec for HyperBEAM's message format. Takes a -Return the content type for the codec. -Encode a message to a JSON string, using JSON-native typing. - -```erlang -content_type(_) -> {ok, <<"application/json">>}. -``` - -### to - -A simple JSON codec for HyperBEAM's message format. Takes a -Return the content type for the codec. -Encode a message to a JSON string, using JSON-native typing. - -```erlang -to(Msg, _Req, _Opts) when is_binary(Msg) -> - {ok, hb_util:bin(json:encode(Msg))}; -``` - -### to - -A simple JSON codec for HyperBEAM's message format. Takes a -Return the content type for the codec. -Encode a message to a JSON string, using JSON-native typing. - -```erlang -to(Msg, Req, Opts) -> - % The input to this function will be a TABM message, so we: - % 1. Convert it to a structured message. -``` - -### from - -Decode a JSON string to a message. - -```erlang -from(Map, _Req, _Opts) when is_map(Map) -> {ok, Map}; -``` - -### from - -Decode a JSON string to a message. - -```erlang -from(JSON, _Req, Opts) -> - % The JSON string will be a partially-TABM encoded message: Rich number - % and list types, but no `atom's. Subsequently, we convert it to a fully - % structured message after decoding, then turn the result back into a TABM. -``` - -### commit - -```erlang -commit(Msg, Req, Opts) -> dev_codec_httpsig:commit(Msg, Req, Opts). -``` - -### verify - -```erlang -verify(Msg, Req, Opts) -> dev_codec_httpsig:verify(Msg, Req, Opts). -``` - -### committed - -```erlang -committed(Msg, Req, Opts) when is_binary(Msg) -> - committed(hb_util:ok(from(Msg, Req, Opts)), Req, Opts); -``` - -### committed - -```erlang -committed(Msg, _Req, Opts) -> - hb_message:committed(Msg, all, Opts). -``` - -### deserialize - -Deserialize the JSON string found at the given path. - -```erlang -deserialize(Base, Req, Opts) -> - Payload = - hb_ao:get( - Target = - hb_ao:get( - <<"target">>, - Req, - <<"body">>, - Opts - ), - Base, - Opts - ), - case Payload of - not_found -> {error, #{ - <<"status">> => 404, - <<"body">> => - << - "JSON payload not found in the base message.", - "Searched for: ", Target/binary - >> - }}; - _ -> - from(Payload, Req, Opts) - end. -``` - -### serialize - -Serialize a message to a JSON string. - -```erlang -serialize(Base, Msg, Opts) -> - {ok, - #{ - <<"content-type">> => <<"application/json">>, - <<"body">> => hb_util:ok(to(Base, Msg, Opts)) - } - }. -``` - -### decode_with_atom_test - -```erlang -decode_with_atom_test() -> - JSON = - <<""" - [ - { - "store-module": "hb_store_fs", - "name": "cache-TEST/json-test-store", - "ao-types": "store-module=\"atom\"" - } - ] - """>>, - Msg = hb_message:convert(JSON, <<"structured@1.0">>, <<"json@1.0">>, #{}), - ?assertMatch( - [#{ <<"store-module">> := hb_store_fs }|_], - hb_cache:ensure_all_loaded(Msg, #{}) -``` - ---- - -*Generated from [dev_codec_json.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_json.erl)* diff --git a/docs/book/src/dev_codec_structured.erl.md b/docs/book/src/dev_codec_structured.erl.md deleted file mode 100644 index d78e1379f..000000000 --- a/docs/book/src/dev_codec_structured.erl.md +++ /dev/null @@ -1,500 +0,0 @@ -# dev_codec_structured - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_structured.erl) - -A device implementing the codec interface (to/1, from/1) for -HyperBEAM's internal, richly typed message format. Supported rich types are: -- `integer` -- `float` -- `atom` -- `list` -Encoding to TABM can be limited to a subset of types (with other types -passing through in their rich representation) by specifying the types -that should be encoded with the `encode-types` request key. -This format mirrors HTTP Structured Fields, aside from its limitations of -compound type depths, as well as limited floating point representations. -As with all AO-Core codecs, its target format (the format it expects to -receive in the `to/1` function, and give in `from/1`) is TABM. -For more details, see the HTTP Structured Fields (RFC-9651) specification. - ---- - -## Exported Functions - -- `commit/3` -- `decode_ao_types/2` -- `decode_value/2` -- `encode_ao_types/2` -- `encode_value/1` -- `from/3` -- `implicit_keys/2` -- `is_list_from_ao_types/2` -- `to/3` -- `verify/3` - ---- - -### commit - -A device implementing the codec interface (to/1, from/1) for - -```erlang -commit(Msg, Req, Opts) -> dev_codec_httpsig:commit(Msg, Req, Opts). -``` - -### verify - -A device implementing the codec interface (to/1, from/1) for -Convert a rich message into a 'Type-Annotated-Binary-Message' (TABM). - -```erlang -verify(Msg, Req, Opts) -> dev_codec_httpsig:verify(Msg, Req, Opts). -``` - -### from - -A device implementing the codec interface (to/1, from/1) for -Convert a rich message into a 'Type-Annotated-Binary-Message' (TABM). - -```erlang -from(Bin, _Req, _Opts) when is_binary(Bin) -> {ok, Bin}; -``` - -### from - -A device implementing the codec interface (to/1, from/1) for -Convert a rich message into a 'Type-Annotated-Binary-Message' (TABM). - -```erlang -from(List, Req, Opts) when is_list(List) -> - % Encode the list as a map, then -- if our request indicates that we are - % encoding lists -- add the `.' key to the `ao-types' field, indicating - % that this message is a list and return. Otherwise, if the downstream - % encoding did not set its own `ao-types' field, we convert the message - % back to a list. -``` - -### from - -```erlang -from(Msg, Req, Opts) when is_map(Msg) -> - % Normalize the message, offloading links to the cache. -``` - -### from - -Find the types that should be encoded from the request and options. - -```erlang -from(Other, _Req, _Opts) -> {ok, hb_path:to_binary(Other)}. -``` - -### find_encode_types - -Find the types that should be encoded from the request and options. -Determine the type for a value. - -```erlang -find_encode_types(Req, Opts) -> - hb_maps:get(<<"encode-types">>, Req, ?SUPPORTED_TYPES, Opts). -``` - -### type - -Find the types that should be encoded from the request and options. -Determine the type for a value. - -```erlang -type(Int) when is_integer(Int) -> <<"integer">>; -``` - -### type - -Find the types that should be encoded from the request and options. -Determine the type for a value. - -```erlang -type(Float) when is_float(Float) -> <<"float">>; -``` - -### type - -Find the types that should be encoded from the request and options. -Determine the type for a value. - -```erlang -type(Atom) when is_atom(Atom) -> <<"atom">>; -``` - -### type - -Find the types that should be encoded from the request and options. -Determine the type for a value. - -```erlang -type(List) when is_list(List) -> <<"list">>; -``` - -### type - -Find the types that should be encoded from the request and options. -Determine the type for a value. -Discern the linkify mode from the request and the options. - -```erlang -type(Other) -> Other. -``` - -### linkify_mode - -Find the types that should be encoded from the request and options. -Determine the type for a value. -Discern the linkify mode from the request and the options. - -```erlang -linkify_mode(Req, Opts) -> - case hb_maps:get(<<"bundle">>, Req, not_found, Opts) of - not_found -> hb_opts:get(linkify_mode, offload, Opts); - true -> - % The request is asking for a bundle, so we should _not_ linkify. -``` - -### to - -Convert a TABM into a native HyperBEAM message. - -```erlang -to(Bin, _Req, _Opts) when is_binary(Bin) -> {ok, Bin}; -``` - -### to - -Convert a TABM into a native HyperBEAM message. - -```erlang -to(TABM0, Req, Opts) when is_list(TABM0) -> - % If we receive a list, we convert it to a message and run `to/3' on it. -``` - -### to - -```erlang -to(TABM0, Req, Opts) -> - Types = decode_ao_types(TABM0, Opts), - % Decode all links to their HyperBEAM-native, resolvable form. -``` - -### encode_ao_types - -Generate an `ao-types` structured field from a map of keys and their - -```erlang -encode_ao_types(Types, _Opts) -> - iolist_to_binary(hb_structured_fields:dictionary( - lists:map( - fun(Key) -> - {ok, Item} = hb_structured_fields:to_item(maps:get(Key, Types)), - {hb_escape:encode(Key), Item} - end, - hb_util:to_sorted_keys(Types) - ) - )). -``` - -### decode_ao_types - -Parse the `ao-types` field of a TABM if present, and return a map of - -```erlang -decode_ao_types(List, _Opts) when is_list(List) -> #{}; -``` - -### decode_ao_types - -Parse the `ao-types` field of a TABM if present, and return a map of - -```erlang -decode_ao_types(Msg, Opts) when is_map(Msg) -> - decode_ao_types(hb_maps:get(<<"ao-types">>, Msg, <<>>, Opts), Opts); -``` - -### decode_ao_types - -Parse the `ao-types` field of a TABM if present, and return a map of - -```erlang -decode_ao_types(Bin, _Opts) when is_binary(Bin) -> - hb_maps:from_list( - lists:map( - fun({Key, {item, {_, Value}, _}}) -> - {hb_escape:decode(Key), Value} - end, - hb_structured_fields:parse_dictionary(Bin) - ) - ). -``` - -### is_list_from_ao_types - -Determine if the `ao-types` field of a TABM indicates that the message - -```erlang -is_list_from_ao_types(Types, Opts) when is_binary(Types) -> - is_list_from_ao_types(decode_ao_types(Types, Opts), Opts); -``` - -### is_list_from_ao_types - -Determine if the `ao-types` field of a TABM indicates that the message - -```erlang -is_list_from_ao_types(Types, _Opts) -> - case maps:find(<<".">>, Types) of - {ok, <<"list">>} -> true; - _ -> false - end. -``` - -### implicit_keys - -Find the implicit keys of a TABM. - -```erlang -implicit_keys(Req, Opts) -> - hb_maps:keys( - hb_maps:filtermap( - fun(_Key, Val = <<"empty-", _/binary>>) -> {true, Val}; - (_Key, _Val) -> false - end, - decode_ao_types(Req, Opts), - Opts - ), - Opts - ). -``` - -### maybe_encode_value - -Encode a value if it is in the list of supported types. - -```erlang -maybe_encode_value(Value, EncodeTypes) -> - case lists:member(type(Value), EncodeTypes) of - true -> encode_value(Value); - false -> skip - end. -``` - -### encode_value - -Convert a term to a binary representation, emitting its type for - -```erlang -encode_value(Value) when is_integer(Value) -> - [Encoded, _] = hb_structured_fields:item({item, Value, []}), - {<<"integer">>, Encoded}; -``` - -### encode_value - -Convert a term to a binary representation, emitting its type for - -```erlang -encode_value(Value) when is_float(Value) -> - ?no_prod("Must use structured field representation for floats!"), - {<<"float">>, float_to_binary(Value)}; -``` - -### encode_value - -Convert a term to a binary representation, emitting its type for - -```erlang -encode_value(Value) when is_atom(Value) -> - EncodedIOList = - hb_structured_fields:item({item, {token, hb_util:bin(Value)}, []}), - Encoded = hb_util:bin(EncodedIOList), - {<<"atom">>, Encoded}; -``` - -### encode_value - -Convert a term to a binary representation, emitting its type for - -```erlang -encode_value(Values) when is_list(Values) -> - EncodedValues = - lists:map( - fun(Bin) when is_binary(Bin) -> {item, {string, Bin}, []}; - (Item) -> - {RawType, Encoded} = encode_value(Item), - Type = hb_ao:normalize_key(RawType), - { - item, - { - string, - << - "(ao-type-", Type/binary, ") ", - Encoded/binary - >> - }, - [] - } - end, - Values - ), - EncodedList = hb_structured_fields:list(EncodedValues), - {<<"list">>, iolist_to_binary(EncodedList)}; -``` - -### encode_value - -Convert a term to a binary representation, emitting its type for - -```erlang -encode_value(Value) when is_binary(Value) -> - {<<"binary">>, Value}; -``` - -### encode_value - -Convert a term to a binary representation, emitting its type for - -```erlang -encode_value(Value) -> - Value. -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(Type, Value) when is_list(Type) -> - decode_value(list_to_binary(Type), Value); -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(Type, Value) when is_binary(Type) -> - ?event({decoding, {type, Type}, {value, Value}}), - decode_value( - binary_to_existing_atom( - list_to_binary(string:to_lower(binary_to_list(Type))), - latin1 - ), - Value - ); -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(integer, Value) -> - {item, Number, _} = hb_structured_fields:parse_item(Value), - Number; -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(float, Value) -> - binary_to_float(Value); -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(atom, Value) -> - {item, {_, AtomString}, _} = - hb_structured_fields:parse_item(Value), - hb_util:atom(AtomString); -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(list, Value) when is_binary(Value) -> - lists:map( - fun({item, {string, <<"(ao-type-", Rest/binary>>}, _}) -> - [Type, Item] = binary:split(Rest, <<") ">>), - decode_value(Type, Item); - ({item, Item, _}) -> hb_structured_fields:from_bare_item(Item) - end, - hb_structured_fields:parse_list(iolist_to_binary(Value)) - ); -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(list, Value) when is_map(Value) -> - hb_util:message_to_ordered_list(Value); -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(map, Value) -> - hb_maps:from_list( - lists:map( - fun({Key, {item, Item, _}}) -> - ?event({decoded_item, {explicit, Key}, Item}), - {Key, hb_structured_fields:from_bare_item(Item)} - end, - hb_structured_fields:parse_dictionary(iolist_to_binary(Value)) - ) - ); -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(BinType, Value) when is_binary(BinType) -> - decode_value( - list_to_existing_atom( - string:to_lower( - binary_to_list(BinType) - ) - ), - Value - ); -``` - -### decode_value - -Convert non-binary values to binary for serialization. - -```erlang -decode_value(OtherType, Value) -> - ?event({unexpected_type, OtherType, Value}), - throw({unexpected_type, OtherType, Value}). -``` - -### list_encoding_test - -```erlang -list_encoding_test() -> - % Test that we can encode and decode a list of integers. -``` - ---- - -*Generated from [dev_codec_structured.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_codec_structured.erl)* diff --git a/docs/book/src/dev_copycat.erl.md b/docs/book/src/dev_copycat.erl.md deleted file mode 100644 index 4a8df13c1..000000000 --- a/docs/book/src/dev_copycat.erl.md +++ /dev/null @@ -1,42 +0,0 @@ -# dev_copycat - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat.erl) - -A device for orchestrating indexing of messages from foreign sources -into a HyperBEAM node's caches. -Supported sources of messages are as follows: -- A remote Arweave GraphQL endpoint. -- A remote Arweave node. -Each source is implemented as a separate engine, with `dev_copycat_[ENGINE]` -as the module name. - ---- - -## Exported Functions - -- `arweave/3` -- `graphql/3` - ---- - -### graphql - -A device for orchestrating indexing of messages from foreign sources -Fetch data from a GraphQL endpoint for replication. See - -```erlang -graphql(Base, Request, Opts) -> - dev_copycat_graphql:graphql(Base, Request, Opts). -``` - -### arweave - -Fetch data from an Arweave node for replication. See `dev_copycat_arweave` - -```erlang -arweave(Base, Request, Opts) -> -``` - ---- - -*Generated from [dev_copycat.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat.erl)* diff --git a/docs/book/src/dev_copycat_arweave.erl.md b/docs/book/src/dev_copycat_arweave.erl.md deleted file mode 100644 index 64b304897..000000000 --- a/docs/book/src/dev_copycat_arweave.erl.md +++ /dev/null @@ -1,112 +0,0 @@ -# dev_copycat_arweave - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat_arweave.erl) - -A `~copycat@1.0` engine that fetches block data from an Arweave node for -replication. This engine works in _reverse_ chronological order by default, -fetching blocks from the latest known block towards the Genesis block. The -node avoids retrieving blocks that are already present in the cache using -`~arweave@2.9-pre`'s built-in caching mechanism. - ---- - -## Exported Functions - -- `arweave/3` - ---- - -### arweave - -A `~copycat@1.0` engine that fetches block data from an Arweave node for -Fetch blocks from an Arweave node between a given range, or from the - -```erlang -arweave(_Base, Request, Opts) -> - {From, To} = parse_range(Request, Opts), - fetch_blocks(Request, From, To, Opts). -``` - -### parse_range - -Parse the range from the request. - -```erlang -parse_range(Request, Opts) -> - From = - case hb_maps:find(<<"from">>, Request, Opts) of - {ok, Height} -> Height; - error -> - {ok, LatestHeight} = - hb_ao:resolve( - <>, - Opts - ), - LatestHeight - end, - To = hb_maps:get(<<"to">>, Request, 0, Opts), - {From, To}. -``` - -### fetch_blocks - -Fetch blocks from an Arweave node between a given range. - -```erlang -fetch_blocks(Req, Current, Current, _Opts) -> - ?event(copycat_arweave, - {arweave_block_indexing_completed, - {reached_target, Current}, - {initial_request, Req} - } - ), - {ok, Current}; -``` - -### fetch_blocks - -Fetch blocks from an Arweave node between a given range. - -```erlang -fetch_blocks(Req, Current, To, Opts) -> - BlockRes = - hb_ao:resolve( - << - ?ARWEAVE_DEVICE/binary, - "/block=", - (hb_util:bin(Current))/binary - >>, - Opts - ), - process_block(BlockRes, Req, Current, To, Opts), - fetch_blocks(Req, Current - 1, To, Opts). -``` - -### process_block - -Process a block. - -```erlang -process_block(BlockRes, _Req, Current, To, _Opts) -> - case BlockRes of - {ok, _} -> - ?event( - copycat_short, - {arweave_block_cached, - {height, Current}, - {target, To} - } - ); - {error, not_found} -> - ?event( - copycat_short, - {arweave_block_not_found, - {height, Current}, - {target, To} - } - ) -``` - ---- - -*Generated from [dev_copycat_arweave.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat_arweave.erl)* diff --git a/docs/book/src/dev_copycat_graphql.erl.md b/docs/book/src/dev_copycat_graphql.erl.md deleted file mode 100644 index 8398ab922..000000000 --- a/docs/book/src/dev_copycat_graphql.erl.md +++ /dev/null @@ -1,219 +0,0 @@ -# dev_copycat_graphql - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat_graphql.erl) - -A `~copycat@1.0` engine that fetches data from a GraphQL endpoint for -replication. - ---- - -## Exported Functions - -- `graphql/3` - ---- - -### graphql - -A `~copycat@1.0` engine that fetches data from a GraphQL endpoint for -Takes a GraphQL query, optionally with a node address, and curses through - -```erlang -graphql(Base, Req, Opts) -> - case parse_query(Base, Req, Opts) of - {ok, Query} -> - Node = maps:get(<<"node">>, Opts, undefined), - OpName = hb_maps:get(<<"operationName">>, Req, undefined, Opts), - Vars = hb_maps:get(<<"variables">>, Req, #{}, Opts), - index_graphql(0, Query, Vars, Node, OpName, Opts); - Other -> - Other - end. -``` - -### index_graphql - -Index a GraphQL query into the node's caches. - -```erlang -index_graphql(Total, Query, Vars, Node, OpName, Opts) -> - maybe - ?event( - {graphql_run_called, - {query, {string, Query}}, - {operation, OpName}, - {variables, Vars} - } - ), - {ok, RawRes} ?= hb_gateway_client:query(Query, Vars, Node, OpName, Opts), - Res = hb_util:deep_get(<<"data/transactions">>, RawRes, #{}, Opts), - NodeStructs = hb_util:deep_get(<<"edges">>, Res, [], Opts), - ?event({graphql_request_returned_items, length(NodeStructs)}), - ?event( - {graphql_indexing_responses, - {query, {string, Query}}, - {variables, Vars}, - {result, Res} - } - ), - ParsedMsgs = - lists:filtermap( - fun(NodeStruct) -> - Struct = hb_maps:get(<<"node">>, NodeStruct, not_found, Opts), - try - {ok, ParsedMsg} = - hb_gateway_client:result_to_message( - Struct, - Opts - ), - {true, ParsedMsg} - catch - error:Reason -> - ?event( - warning, - {indexer_graphql_parse_failed, - {struct, NodeStruct}, - {reason, Reason} - } - ), - false - end - end, - NodeStructs - ), - ?event({graphql_parsed_msgs, length(ParsedMsgs)}), - WrittenMsgs = - lists:filter( - fun(ParsedMsg) -> - try - {ok, _} = hb_cache:write(ParsedMsg, Opts), - true - catch - error:Reason -> - ?event( - warning, - {indexer_graphql_write_failed, - {reason, Reason}, - {msg, ParsedMsg} - } - ), - false - end - end, - ParsedMsgs - ), - NewTotal = Total + length(WrittenMsgs), - ?event(copycat_short, - {indexer_graphql_wrote, - {total, NewTotal}, - {batch, length(WrittenMsgs)}, - {batch_failures, length(ParsedMsgs) - length(WrittenMsgs)} - } - ), - HasNextPage = hb_util:deep_get(<<"pageInfo/hasNextPage">>, Res, false, Opts), - case HasNextPage of - true -> - % Get the last cursor from the node structures and recurse. -``` - -### parse_query - -Find or create a GraphQL query from a given base and request. We expect - -```erlang -parse_query(Base, Req, Opts) -> - % Merge the keys of the base and request maps, and remove duplicates. -``` - -### default_query - -Return a default query for a given filter type. - -```erlang -default_query(<<"tags">>, RawMessage, Opts) -> - Message = hb_cache:ensure_all_loaded(RawMessage, Opts), - BinaryPairs = - lists:map( - fun({Key, Value}) -> {hb_util:bin(Key), hb_util:bin(Value)} end, - hb_maps:to_list(Message, Opts) - ), - TagsQueryStr = - hb_util:bin( - [ - <<"{name: \"", Key/binary, "\", values: [\"", Value/binary, "\"]}">> - || - {Key, Value} <- BinaryPairs - ] - ), - ?event({tags_query, - {message, Message}, - {binary_pairs, BinaryPairs}, - {tags_query_str, {string, TagsQueryStr}} - }), - {ok, <<"query($after: String) { ", - "transactions(after: $after, tags: [", - TagsQueryStr/binary, - "]) { ", - "edges { ", (hb_gateway_client:item_spec())/binary , " } ", - "pageInfo { hasNextPage }", - "} }">>}; -``` - -### default_query - -Return a default query for a given filter type. - -```erlang -default_query(<<"tag">>, {Key, Value}, _Opts) -> - {ok, <<"query($after: String) { ", - "transactions(after: $after, tags: [", - "{name: \"", Key/binary, "\", values: [\"", Value/binary, "\"]}", - "]) { ", - "edges { ", (hb_gateway_client:item_spec())/binary , " } ", - "pageInfo { hasNextPage }", - "} }">>}; -``` - -### default_query - -Return a default query for a given filter type. - -```erlang -default_query(<<"recipient">>, Merged, Opts) -> - Recipient = hb_maps:get(<<"recipient">>, Merged, <<>>, Opts), - {ok, <<"query($after: String) { ", - "transactions(after: $after, recipients: [\"", Recipient/binary, "\"]) { ", - "edges { ", (hb_gateway_client:item_spec())/binary , " } ", - "pageInfo { hasNextPage }", - "} }">>}; -``` - -### default_query - -Return a default query for a given filter type. - -```erlang -default_query(<<"owner">>, Merged, Opts) -> - Owner = hb_maps:get(<<"owner">>, Merged, <<>>, Opts), - {ok, <<"query($after: String) { ", - "transactions(after: $after, owner: \"", Owner/binary, "\") { ", - "edges { ", (hb_gateway_client:item_spec())/binary , " } ", - "pageInfo { hasNextPage }", - "} }">>}; -``` - -### default_query - -Return a default query for a given filter type. - -```erlang -default_query(<<"all">>, _Merged, _Opts) -> - {ok, <<"query($after: String) { ", - "transactions(after: $after) { ", - "edges { ", (hb_gateway_client:item_spec())/binary , " } ", - "pageInfo { hasNextPage }", -``` - ---- - -*Generated from [dev_copycat_graphql.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_copycat_graphql.erl)* diff --git a/docs/book/src/dev_cron.erl.md b/docs/book/src/dev_cron.erl.md deleted file mode 100644 index ef30611d6..000000000 --- a/docs/book/src/dev_cron.erl.md +++ /dev/null @@ -1,400 +0,0 @@ -# dev_cron - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cron.erl) - -A device that inserts new messages into the schedule to allow processes -to passively 'call' themselves without user interaction. - ---- - -## Exported Functions - -- `every/3` -- `info/1` -- `info/3` -- `once/3` -- `stop/3` - ---- - -### info - -A device that inserts new messages into the schedule to allow processes -Exported function for getting device info. - -```erlang -info(_) -> - #{ exports => [info, once, every, stop] }. -``` - -### info - -Exported function for scheduling a one-time message. - -```erlang -info(_Msg1, _Msg2, _Opts) -> - InfoBody = #{ - <<"description">> => <<"Cron device for scheduling messages">>, - <<"version">> => <<"1.0">>, - <<"paths">> => #{ - <<"info">> => <<"Get device info">>, - <<"once">> => <<"Schedule a one-time message">>, - <<"every">> => <<"Schedule a recurring message">>, - <<"stop">> => <<"Stop a scheduled task {task}">> - } - }, - {ok, #{<<"status">> => 200, <<"body">> => InfoBody}}. -``` - -### once - -Exported function for scheduling a one-time message. - -```erlang -once(_Msg1, Msg2, Opts) -> - case hb_ao:get(<<"cron-path">>, Msg2, Opts) of - not_found -> - {error, <<"No cron path found in message.">>}; - CronPath -> - ReqMsgID = hb_message:id(Msg2, all, Opts), - % make the path specific for the end device to be used - ModifiedMsg2 = - maps:remove( - <<"cron-path">>, - maps:put(<<"path">>, CronPath, Msg2) - ), - Name = {<<"cron@1.0">>, ReqMsgID}, - Pid = spawn(fun() -> once_worker(CronPath, ModifiedMsg2, Opts) end), - hb_name:register(Name, Pid), - {ok, ReqMsgID} - end. -``` - -### once_worker - -Internal function for scheduling a one-time message. - -```erlang -once_worker(Path, Req, Opts) -> - % Directly call the meta device on the newly constructed 'singleton', just - % as hb_http_server does. -``` - -### every - -Exported function for scheduling a recurring message. - -```erlang -every(_Msg1, Msg2, Opts) -> - case { - hb_ao:get(<<"cron-path">>, Msg2, Opts), - hb_ao:get(<<"interval">>, Msg2, Opts) - } of - {not_found, _} -> - {error, <<"No cron path found in message.">>}; - {_, not_found} -> - {error, <<"No interval found in message.">>}; - {CronPath, IntervalString} -> - try - IntervalMillis = parse_time(IntervalString), - if IntervalMillis =< 0 -> - throw({error, invalid_interval_value}); - true -> - ok - end, - ReqMsgID = hb_message:id(Msg2, all, Opts), - ModifiedMsg2 = - maps:remove( - <<"cron-path">>, - maps:remove(<<"interval">>, Msg2) - ), - TracePID = hb_tracer:start_trace(), - Pid = - spawn( - fun() -> - every_worker_loop( - CronPath, - ModifiedMsg2, - Opts#{ trace => TracePID }, - IntervalMillis - ) - end - ), - Name = {<<"cron@1.0">>, ReqMsgID}, - hb_name:register(Name, Pid), - {ok, ReqMsgID} - catch - error:{invalid_time_unit, Unit} -> - {error, <<"Invalid time unit: ", Unit/binary>>}; - error:{invalid_interval_value} -> - {error, <<"Invalid interval value.">>}; - error:{Reason, _Stack} -> - {error, {<<"Error parsing interval">>, Reason}} - end - end. -``` - -### stop - -Exported function for stopping a scheduled task. - -```erlang -stop(_Msg1, Msg2, Opts) -> - case hb_ao:get(<<"task">>, Msg2, Opts) of - not_found -> - {error, <<"No task ID found in message.">>}; - TaskID -> - Name = {<<"cron@1.0">>, TaskID}, - case hb_name:lookup(Name) of - Pid when is_pid(Pid) -> - ?event({cron_stopping_task, {task_id, TaskID}, {pid, Pid}}), - exit(Pid, kill), - hb_name:unregister(Name), - {ok, #{<<"status">> => 200, <<"body">> => #{ - <<"message">> => <<"Task stopped successfully">>, - <<"task_id">> => TaskID - }}}; - undefined -> - {error, <<"Task not found.">>}; - Error -> - ?event({cron_stop_lookup_error, {task_id, TaskID}, {error, Error}}), - {error, #{ - <<"error">> => - <<"Failed to lookup task or unexpected result">>, - <<"details">> => Error - }} - end - end. -``` - -### every_worker_loop - -```erlang -every_worker_loop(CronPath, Req, Opts, IntervalMillis) -> - Req1 = Req#{<<"path">> => CronPath}, - ?event( - {cron_every_worker_executing, - {path, CronPath}, - {req_id, hb_message:id(Req, all, Opts)} - } - ), - try - dev_meta:handle(Opts, Req1), - ?event({cron_every_worker_executed, {path, CronPath}}) - catch - Class:Reason:Stack -> - ?event(cron_error, {cron_every_worker_error, - {path, CronPath}, - {error, Class, Reason, Stack}}) - end, - timer:sleep(IntervalMillis), - every_worker_loop(CronPath, Req, Opts, IntervalMillis). -``` - -### parse_time - -Parse a time string into milliseconds. - -```erlang -parse_time(BinString) -> - [AmountStr, UnitStr] = binary:split(BinString, <<"-">>), - Amount = binary_to_integer(AmountStr), - Unit = string:lowercase(binary_to_list(UnitStr)), - case Unit of - "millisecond" ++ _ -> Amount; - "second" ++ _ -> Amount * 1000; - "minute" ++ _ -> Amount * 60 * 1000; - "hour" ++ _ -> Amount * 60 * 60 * 1000; - "day" ++ _ -> Amount * 24 * 60 * 60 * 1000; - _ -> throw({error, invalid_time_unit, UnitStr}) - end. -``` - -### stop_once_test - -This test verifies that a recurring task can be stopped by - -```erlang -stop_once_test() -> - % Start a new node - Node = hb_http_server:start_node(), - % Set up a standard test worker (even though delay doesn't use its state) - TestWorkerPid = spawn(fun test_worker/0), - TestWorkerNameId = hb_util:human_id(crypto:strong_rand_bytes(32)), - hb_name:register({<<"test">>, TestWorkerNameId}, TestWorkerPid), - % Create a "once" task targeting the delay function - OnceUrlPath = <<"/~cron@1.0/once?test-id=", TestWorkerNameId/binary, - "&cron-path=/~test-device@1.0/delay">>, - {ok, OnceTaskID} = hb_http:get(Node, OnceUrlPath, #{}), - ?event({'cron:stop_once:test:created', {task_id, OnceTaskID}}), - % Give a short delay to ensure the task has started and called handle, - % entering the sleep - timer:sleep(200), - % Verify the once task worker process is registered and alive - OncePid = hb_name:lookup({<<"cron@1.0">>, OnceTaskID}), - ?assert(is_pid(OncePid), "Lookup did not return a PID"), - ?assert(erlang:is_process_alive(OncePid), "OnceWorker process died prematurely"), - % Call stop on the once task while it's sleeping - OnceStopPath = <<"/~cron@1.0/stop?task=", OnceTaskID/binary>>, - {ok, OnceStopResult} = hb_http:get(Node, OnceStopPath, #{}), - ?event({'cron:stop_once:test:stopped', {result, OnceStopResult}}), - % Verify success response from stop - ?assertMatch(#{<<"status">> := 200}, OnceStopResult), - % Verify name is unregistered - ?assertEqual(undefined, hb_name:lookup({<<"cron@1.0">>, OnceTaskID})), - % Allow a moment for the kill signal to be processed - timer:sleep(100), - % Verify process termination - ?assertNot(erlang:is_process_alive(OncePid), "Process not killed by stop"), - % Call stop again to verify 404 response - {error, <<"Task not found.">>} = hb_http:get(Node, OnceStopPath, #{}). -``` - -### stop_every_test - -This test verifies that a recurring task can be stopped by -This test verifies that a one-time task can be scheduled and executed. - -```erlang -stop_every_test() -> - % Start a new node - Node = hb_http_server:start_node(), - % Set up a test worker process to hold state (counter) - TestWorkerPid = spawn(fun test_worker/0), - TestWorkerNameId = hb_util:human_id(crypto:strong_rand_bytes(32)), - hb_name:register({<<"test">>, TestWorkerNameId}, TestWorkerPid), - % Create an "every" task that calls the test worker - EveryUrlPath = <<"/~cron@1.0/every?test-id=", TestWorkerNameId/binary, - "&interval=500-milliseconds", - "&cron-path=/~test-device@1.0/increment_counter">>, - {ok, CronTaskID} = hb_http:get(Node, EveryUrlPath, #{}), - ?event({'cron:stop_every:test:created', {task_id, CronTaskID}}), - % Verify the cron worker process was registered and is alive - CronWorkerPid = hb_name:lookup({<<"cron@1.0">>, CronTaskID}), - ?assert(is_pid(CronWorkerPid)), - ?assert(erlang:is_process_alive(CronWorkerPid)), - % Wait a bit to ensure the cron worker has run a few times - timer:sleep(1000), - % Call stop on the cron task using its ID - EveryStopPath = <<"/~cron@1.0/stop?task=", CronTaskID/binary>>, - {ok, EveryStopResult} = hb_http:get(Node, EveryStopPath, #{}), - ?event({'cron:stop_every:test:stopped', {result, EveryStopResult}}), - % Verify success response - ?assertMatch(#{<<"status">> := 200}, EveryStopResult), - % Verify the cron task name is unregistered (lookup returns undefined) - ?assertEqual(undefined, hb_name:lookup({<<"cron@1.0">>, CronTaskID})), - % Allow a moment for the process termination signal to be processed - timer:sleep(100), - % Verify the cron worker process is terminated - ?assertNot(erlang:is_process_alive(CronWorkerPid)), - % Check the counter in the original test worker was incremented - TestWorkerPid ! {get, self()}, - receive - {state, State = #{count := Count}} -> - ?event({'cron:stop_every:test:counter_state', {state, State}}), - ?assert(Count > 0) - after 1000 -> - throw(no_response_from_worker) - end, - % Call stop again using the same CronTaskID to verify the error - {error, <<"Task not found.">>} = hb_http:get(Node, EveryStopPath, #{}). -``` - -### once_executed_test - -This test verifies that a recurring task can be stopped by -This test verifies that a one-time task can be scheduled and executed. - -```erlang -once_executed_test() -> - % start a new node - Node = hb_http_server:start_node(), - % spawn a worker on the new node that calls test_worker/0 which inits - % test_worker/1 with a state of undefined - PID = spawn(fun test_worker/0), - % generate a random id that we can then use later to lookup the worker - ID = hb_util:human_id(crypto:strong_rand_bytes(32)), - % register the worker with the id - hb_name:register({<<"test">>, ID}, PID), - % Construct the URL path with the dynamic ID - UrlPath = <<"/~cron@1.0/once?test-id=", ID/binary, - "&cron-path=/~test-device@1.0/update_state">>, - % this should call the worker via the test device - % the test device should look up the worker via the id given - {ok, _ReqMsgId} = hb_http:get(Node, UrlPath, #{}), - % wait for the request to be processed - timer:sleep(1000), - % send a message to the worker to get the state - PID ! {get, self()}, - % receive the state from the worker - receive - {state, State} -> - ?event({once_executed_test_received_state, {state, State}}), - ?assertMatch(#{ <<"test-id">> := ID }, State) - after 1000 -> - FinalLookup = hb_name:lookup({<<"test">>, ID}), - ?event({timeout_waiting_for_worker, {pid, PID}, {lookup_result, FinalLookup}}), - throw(no_response_from_worker) - end. -``` - -### every_worker_loop_test - -This test verifies that a recurring task can be scheduled and executed. - -```erlang -every_worker_loop_test() -> - Node = hb_http_server:start_node(), - PID = spawn(fun test_worker/0), - ID = hb_util:human_id(crypto:strong_rand_bytes(32)), - hb_name:register({<<"test">>, ID}, PID), - UrlPath = <<"/~cron@1.0/every?test-id=", ID/binary, - "&interval=500-milliseconds", - "&cron-path=/~test-device@1.0/increment_counter">>, - ?event({'cron:every:test:sendUrl', {url_path, UrlPath}}), - {ok, ReqMsgId} = hb_http:get(Node, UrlPath, #{}), - ?event({'cron:every:test:get_done', {req_id, ReqMsgId}}), - timer:sleep(1500), - PID ! {get, self()}, - % receive the state from the worker - receive - {state, State = #{count := C}} -> - ?event({'cron:every:test:received_state', {state, State}}), - ?assert(C >= 3) - after 1000 -> - FinalLookup = hb_name:lookup({<<"test">>, ID}), - ?event({'cron:every:test:timeout', {pid, PID}, {lookup_result, FinalLookup}}), - throw({test_timeout_waiting_for_state, {id, ID}}) - end. -``` - -### test_worker - -This is a helper function that is used to test the cron device. - -```erlang -test_worker() -> test_worker(#{count => 0}). -``` - -### test_worker - -This is a helper function that is used to test the cron device. - -```erlang -test_worker(State) -> - receive - {increment} -> - NewCount = maps:get(count, State, 0) + 1, - ?event({'test_worker:incremented', {new_count, NewCount}}), - test_worker(State#{count := NewCount}); - {update, NewState} -> - ?event({'test_worker:updated', {new_state, NewState}}), - test_worker(NewState); - {get, Pid} -> - Pid ! {state, State}, - test_worker(State) -``` - ---- - -*Generated from [dev_cron.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cron.erl)* diff --git a/docs/book/src/dev_cu.erl.md b/docs/book/src/dev_cu.erl.md deleted file mode 100644 index 4b9f4ce28..000000000 --- a/docs/book/src/dev_cu.erl.md +++ /dev/null @@ -1,46 +0,0 @@ -# dev_cu - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cu.erl) - -## Exported Functions - -- `execute/2` -- `push/2` - ---- - -### push - -```erlang -push(Msg, S = #{ assignment := Assignment, logger := _Logger }) -> - ?event( - {pushing_message, - {assignment, hb_util:id(Assignment, unsigned)}, - {message, hb_util:id(Msg, unsigned)} - } - ), - case hb_client:compute(Assignment, Msg) of - {ok, Results} -> - ?event(computed_results), - {ok, S#{ results => Results }}; - Error -> - throw({cu_error, Error}) - end. -``` - -### execute - -```erlang -execute(CarrierMsg, S) -> - MaybeBundle = ar_bundles:hd(CarrierMsg), - Store = hb_opts:get(store), - Wallet = hb:wallet(), - {ok, Results} = - case MaybeBundle of - #tx{data = #{ <<"body">> := _Msg, <<"assignment">> := Assignment }} -> - % TODO: Execute without needing to call the SU unnecessarily. -``` - ---- - -*Generated from [dev_cu.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_cu.erl)* diff --git a/docs/book/src/dev_dedup.erl.md b/docs/book/src/dev_dedup.erl.md deleted file mode 100644 index 193fc9d17..000000000 --- a/docs/book/src/dev_dedup.erl.md +++ /dev/null @@ -1,93 +0,0 @@ -# dev_dedup - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_dedup.erl) - -A device that deduplicates messages in an evaluation stream, returning -status `skip` if the message has already been seen. -This device is typically used to ensure that a message is only executed -once, even if assigned multiple times, upon a `~process@1.0` evaluation. -It can, however, be used in many other contexts. -This device honors the `pass` key if it is present in the message. If so, -it will only run on the first pass. Additionally, the device supports -a `subject-key` key that allows the caller to specify the key whose ID -should be used for deduplication. If the `subject-key` key is not present, -the device will use the `body` of the request as the subject. If the key is -set to `request`, the device will use the entire request itself as the -subject. -This device runs on the first pass of the `compute` key call if executed -in a stack, and not in subsequent passes. Currently the device stores its -list of already seen items in memory, but at some point it will likely make -sense to drop them in the cache. - ---- - -## Exported Functions - -- `info/1` - ---- - -### info - -A device that deduplicates messages in an evaluation stream, returning - -```erlang -info(_M1) -> - #{ - default => fun handle/4, - exclude => [keys, set, id, commit] - }. -``` - -### handle - -Forward the keys and `set` functions to the message device, handle all - -```erlang -handle(<<"keys">>, M1, _M2, _Opts) -> - dev_message:keys(M1); -``` - -### handle - -Forward the keys and `set` functions to the message device, handle all - -```erlang -handle(<<"set">>, M1, M2, Opts) -> - dev_message:set(M1, M2, Opts); -``` - -### handle - -Forward the keys and `set` functions to the message device, handle all - -```erlang -handle(Key, M1, M2, Opts) -> - ?event({dedup_handle, {key, Key}, {msg1, M1}, {msg2, M2}}), - % Find the relevant parameters from the messages. We search for the - % `dedup-key' key in the first message, and use that value as the key to - % look for in the second message. -``` - -### dedup_test - -```erlang -dedup_test() -> - hb:init(), - % Create a stack with a dedup device and 2 devices that will append to a - % `Result' key. -``` - -### dedup_with_multipass_test - -```erlang -dedup_with_multipass_test() -> - % Create a stack with a dedup device and 2 devices that will append to a - % `Result' key and a `Multipass' device that will repeat the message for - % an additional pass. We want to ensure that Multipass is not hindered by - % the dedup device. -``` - ---- - -*Generated from [dev_dedup.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_dedup.erl)* diff --git a/docs/book/src/dev_delegated_compute.erl.md b/docs/book/src/dev_delegated_compute.erl.md deleted file mode 100644 index eabf786ae..000000000 --- a/docs/book/src/dev_delegated_compute.erl.md +++ /dev/null @@ -1,242 +0,0 @@ -# dev_delegated_compute - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_delegated_compute.erl) - -Simple wrapper module that enables compute on remote machines, -implementing the JSON-Iface. This can be used either as a standalone, to -bring trusted results into the local node, or as the `Execution-Device` of -an AO process. - ---- - -## Exported Functions - -- `compute/3` -- `init/3` -- `normalize/3` -- `snapshot/3` - ---- - -### init - -Simple wrapper module that enables compute on remote machines, -Initialize or normalize the compute-lite device. For now, we don't - -```erlang -init(Msg1, _Msg2, _Opts) -> - {ok, Msg1}. -``` - -### normalize - -We assume that the compute engine stores its own internal state, -Call the delegated server to compute the result. The endpoint is - -```erlang -normalize(Msg1, _Msg2, Opts) -> - hb_ao:set(Msg1, #{ <<"snapshot">> => unset }, Opts). -``` - -### compute - -We assume that the compute engine stores its own internal state, -Call the delegated server to compute the result. The endpoint is - -```erlang -compute(Msg1, Msg2, Opts) -> - OutputPrefix = dev_stack:prefix(Msg1, Msg2, Opts), - % Extract the process ID - this identifies which process to run compute - % against. -``` - -### do_compute - -Execute computation on a remote machine via relay and the JSON-Iface. - -```erlang -do_compute(ProcID, Msg2, Opts) -> - ?event({do_compute_msg, {req, Msg2}}), - Slot = hb_ao:get(<<"slot">>, Msg2, Opts), - {ok, AOS2 = #{ <<"body">> := Body }} = - dev_scheduler_formats:assignments_to_aos2( - ProcID, - #{ - Slot => Msg2 - }, - false, - Opts - ), - ?event({do_compute_body, {aos2, {string, Body}}}), - % Send to external CU via relay using /result endpoint - Response = - do_relay( - <<"POST">>, - <<"/result/", (hb_util:bin(Slot))/binary, "?process-id=", ProcID/binary>>, - Body, - AOS2, - Opts#{ - hashpath => ignore, - cache_control => [<<"no-store">>, <<"no-cache">>] - } - ), - extract_json_res(Response, Opts). -``` - -### do_dryrun - -Execute dry-run computation on a remote machine via relay and use - -```erlang -do_dryrun(ProcID, Msg2, Opts) -> - ?event({do_dryrun_msg, {req, Msg2}}), - % Remove commitments from the message before sending to the external CU - Body = - hb_json:encode( - dev_json_iface:message_to_json_struct( - hb_maps:without([<<"commitments">>], Msg2, Opts), - Opts - ) - ), - ?event({do_dryrun_body, {string, Body}}), - % Send to external CU via relay using /dry-run endpoint - Response = do_relay( - <<"POST">>, - <<"/dry-run?process-id=", ProcID/binary>>, - Body, - #{}, - Opts#{ - hashpath => ignore, - cache_control => [<<"no-store">>, <<"no-cache">>] - } - ), - extract_json_res(Response, Opts). -``` - -### do_relay - -```erlang -do_relay(Method, Path, Body, AOS2, Opts) -> - hb_ao:resolve( - #{ - <<"device">> => <<"relay@1.0">>, - <<"content-type">> => <<"application/json">> - }, - AOS2#{ - <<"path">> => <<"call">>, - <<"relay-method">> => Method, - <<"relay-body">> => Body, - <<"relay-path">> => Path, - <<"content-type">> => <<"application/json">> - }, - Opts - ). -``` - -### extract_json_res - -Extract the JSON response from the delegated compute response. - -```erlang -extract_json_res(Response, Opts) -> - case Response of - {ok, Res} -> - JSONRes = hb_ao:get(<<"body">>, Res, Opts), - ?event({ - delegated_compute_res_metadata, - {req, hb_maps:without([<<"body">>], Res, Opts)} - }), - {ok, JSONRes}; - {Err, Error} when Err == error; Err == failure -> - {error, Error} - end. -``` - -### get_process_id - -```erlang -get_process_id(Msg1, Msg2, Opts) -> - RawProcessID = dev_process:process_id(Msg1, #{}, Opts), - case RawProcessID of - not_found -> hb_ao:get(<<"process-id">>, Msg2, Opts); - ProcID -> ProcID - end. -``` - -### handle_relay_response - -Handle the response from the delegated compute server. Assumes that the - -```erlang -handle_relay_response(Msg1, Msg2, Opts, Response, OutputPrefix, ProcessID, Slot) -> - case Response of - {ok, JSONRes} -> - ?event( - {compute_lite_res, - {process_id, ProcessID}, - {slot, Slot}, - {json_res, {string, JSONRes}}, - {req, Msg2} - } - ), - {ok, Msg} = dev_json_iface:json_to_message(JSONRes, Opts), - {ok, - hb_ao:set( - Msg1, - #{ - <> => Msg, - <> => - #{ - <<"content-type">> => <<"application/json">>, - <<"body">> => JSONRes - } - }, - Opts - ) - }; - {error, Error} -> - {error, Error} - end. -``` - -### snapshot - -Generate a snapshot of a running computation by calling the - -```erlang -snapshot(Msg, Msg2, Opts) -> - ?event({snapshotting, {req, Msg2}}), - ProcID = dev_process:process_id(Msg, #{}, Opts), - Res = - hb_ao:resolve( - #{ - <<"device">> => <<"relay@1.0">>, - <<"content-type">> => <<"application/json">> - }, - #{ - <<"path">> => <<"call">>, - <<"relay-method">> => <<"POST">>, - <<"relay-path">> => <<"/snapshot/", ProcID/binary>>, - <<"content-type">> => <<"application/json">>, - <<"body">> => <<"{}">> - }, - Opts#{ - hashpath => ignore, - cache_control => [<<"no-store">>, <<"no-cache">>] - } - ), - ?event({snapshotting_result, Res}), - case Res of - {ok, Response} -> - {ok, Response}; - {error, Error} -> - {ok, - #{ - <<"error">> => <<"No checkpoint produced.">>, - <<"error-details">> => Error - }} -``` - ---- - -*Generated from [dev_delegated_compute.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_delegated_compute.erl)* diff --git a/docs/book/src/dev_faff.erl.md b/docs/book/src/dev_faff.erl.md deleted file mode 100644 index e09e3d64a..000000000 --- a/docs/book/src/dev_faff.erl.md +++ /dev/null @@ -1,62 +0,0 @@ -# dev_faff - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_faff.erl) - -A module that implements a 'friends and family' pricing policy. -It will allow users to process requests only if their addresses are -in the allow-list for the node. -Fundamentally against the spirit of permissionlessness, but it is useful if -you are running a node for your own purposes and would not like to allow -others to make use of it -- even for a fee. It also serves as a useful -example of how to implement a custom pricing policy, as it implements stubs -for both the pricing and ledger P4 APIs. - ---- - -## Exported Functions - -- `charge/3` -- `estimate/3` - ---- - -### estimate - -A module that implements a 'friends and family' pricing policy. -Decide whether or not to service a request from a given address. - -```erlang -estimate(_, Msg, NodeMsg) -> - ?event(payment, {estimate, {msg, Msg}}), - % Check if the address is in the allow-list. -``` - -### is_admissible - -Check whether all of the signers of the request are in the allow-list. - -```erlang -is_admissible(Msg, NodeMsg) -> - AllowList = hb_opts:get(faff_allow_list, [], NodeMsg), - Req = hb_ao:get(<<"request">>, Msg, NodeMsg), - Signers = hb_message:signers(Req, NodeMsg), - ?event(payment, {is_admissible, {signers, Signers}, {allow_list, AllowList}}), - lists:all( - fun(Signer) -> lists:member(Signer, AllowList) end, - Signers - ). -``` - -### charge - -Charge the user's account if the request is allowed. - -```erlang -charge(_, Req, _NodeMsg) -> - ?event(payment, {charge, Req}), - {ok, true}. -``` - ---- - -*Generated from [dev_faff.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_faff.erl)* diff --git a/docs/book/src/dev_genesis_wasm.erl.md b/docs/book/src/dev_genesis_wasm.erl.md deleted file mode 100644 index b906f31bc..000000000 --- a/docs/book/src/dev_genesis_wasm.erl.md +++ /dev/null @@ -1,756 +0,0 @@ -# dev_genesis_wasm - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_genesis_wasm.erl) - -A device that mimics an environment suitable for `legacynet` AO -processes, using HyperBEAM infrastructure. This allows existing `legacynet` -AO process definitions to be used in HyperBEAM. - ---- - -## Exported Functions - -- `compute/3` -- `init/3` -- `normalize/3` -- `snapshot/3` - ---- - -### init - -A device that mimics an environment suitable for `legacynet` AO -Initialize the device. -Normalize the device. - -```erlang -init(Msg, _Msg2, _Opts) -> {ok, Msg}. -``` - -### normalize - -A device that mimics an environment suitable for `legacynet` AO -Initialize the device. -Normalize the device. - -```erlang -normalize(Msg, Msg2, Opts) -> - dev_delegated_compute:normalize(Msg, Msg2, Opts). -``` - -### compute - -Genesis-wasm device compute handler. - -```erlang -compute(Msg, Msg2, Opts) -> - % Validate whether the genesis-wasm feature is enabled. -``` - -### snapshot - -Snapshot the state of the process via the `delegated-compute@1.0` device. - -```erlang -snapshot(Msg, Msg2, Opts) -> - delegate_request(Msg, Msg2, Opts). -``` - -### delegate_request - -Proxy a request to the delegated-compute@1.0 device, ensuring that - -```erlang -delegate_request(Msg, Msg2, Opts) -> - % Validate whether the genesis-wasm feature is enabled. -``` - -### do_compute - -Handle normal compute execution with state persistence (GET method). - -```erlang -do_compute(Msg, Msg2, Opts) -> - % Resolve the `delegated-compute@1.0' device. -``` - -### ensure_started - -Ensure the local `genesis-wasm@1.0` is live. If it not, start it. - -```erlang -ensure_started(Opts) -> - % Check if the `genesis-wasm@1.0' device is already running. The presence - % of the registered name implies its availability. -``` - -### is_genesis_wasm_server_running - -Check if the genesis-wasm server is running, using the cached process ID - -```erlang -is_genesis_wasm_server_running(Opts) -> - case get(genesis_wasm_pid) of - undefined -> - ?event(genesis_wasm_pinging_server), - Parent = self(), - PID = spawn( - fun() -> - ?event({genesis_wasm_get_info_endpoint, {worker, self()}}), - Parent ! {ok, self(), status(Opts)} - end - ), - receive - {ok, PID, Status} -> - put(genesis_wasm_pid, Status), - ?event({genesis_wasm_received_status, Status}), - Status - after ?STATUS_TIMEOUT -> - ?event({genesis_wasm_status_check, timeout}), - erlang:exit(PID, kill), - false - end; - _ -> true - end. -``` - -### status - -Check if the genesis-wasm server is running by requesting its status - -```erlang -status(Opts) -> - ServerPort = - integer_to_binary( - hb_opts:get( - genesis_wasm_port, - 6363, - Opts - ) - ), - try hb_http:get(<<"http://localhost:", ServerPort/binary, "/status">>, Opts) of - {ok, Res} -> - ?event({genesis_wasm_status_check, {res, Res}}), - true; - Err -> - ?event({genesis_wasm_status_check, {err, Err}}), - false - catch - _:Err -> - ?event({genesis_wasm_status_check, {error, Err}}), - false - end. -``` - -### collect_events - -Collect events from the port and log them. - -```erlang -collect_events(Port) -> - collect_events(Port, <<>>). -``` - -### collect_events - -```erlang -collect_events(Port, Acc) -> - receive - {Port, {data, Data}} -> - collect_events(Port, - log_server_events(<>) - ); - stop -> - port_close(Port), - ?event(genesis_wasm_stopped, {pid, self()}), - ok - end. -``` - -### log_server_events - -Log lines of output from the genesis-wasm server. - -```erlang -log_server_events(Bin) when is_binary(Bin) -> - log_server_events(binary:split(Bin, <<"\n">>, [global])); -``` - -### log_server_events - -Log lines of output from the genesis-wasm server. - -```erlang -log_server_events([Remaining]) -> Remaining; -``` - -### log_server_events - -Log lines of output from the genesis-wasm server. - -```erlang -log_server_events([Line | Rest]) -> - ?event(genesis_wasm_server, {server_logged, {string, Line}}), - log_server_events(Rest). -``` - -### test_base_process - -```erlang -test_base_process() -> - test_base_process(#{}). -``` - -### test_base_process - -```erlang -test_base_process(Opts) -> - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - Address = hb_util:human_id(ar_wallet:to_address(Wallet)), - hb_message:commit(#{ - <<"device">> => <<"process@1.0">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"scheduler-location">> => Address, - <<"type">> => <<"Process">>, - <<"test-random-seed">> => rand:uniform(1337) - }, #{ priv_wallet => Wallet }). -``` - -### test_wasm_process - -```erlang -test_wasm_process(WASMImage) -> - test_wasm_process(WASMImage, #{}). -``` - -### test_wasm_process - -```erlang -test_wasm_process(WASMImage, Opts) -> - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - #{ <<"image">> := WASMImageID } = dev_wasm:cache_wasm_image(WASMImage, Opts), - hb_message:commit( - maps:merge( - hb_message:uncommitted(test_base_process(Opts)), - #{ - <<"execution-device">> => <<"stack@1.0">>, - <<"device-stack">> => [<<"WASM-64@1.0">>], - <<"image">> => WASMImageID - } - ), - #{ priv_wallet => Wallet } - ). -``` - -### test_wasm_stack_process - -```erlang -test_wasm_stack_process(Opts, Stack) -> - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - Address = hb_util:human_id(ar_wallet:to_address(Wallet)), - WASMProc = test_wasm_process(<<"test/aos-2-pure-xs.wasm">>, Opts), - hb_message:commit( - maps:merge( - hb_message:uncommitted(WASMProc), - #{ - <<"device-stack">> => Stack, - <<"execution-device">> => <<"genesis-wasm@1.0">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"patch-from">> => <<"/results/outbox">>, - <<"passes">> => 2, - <<"stack-keys">> => - [ - <<"init">>, - <<"compute">>, - <<"snapshot">>, - <<"normalize">>, - <<"compute">> - ], - <<"scheduler">> => Address, - <<"authority">> => Address, - <<"module">> => <<"URgYpPQzvxxfYQtjrIQ116bl3YBfcImo3JEnNo8Hlrk">>, - <<"data-protocol">> => <<"ao">>, - <<"type">> => <<"Process">> - } - ), - #{ priv_wallet => Wallet } - ). -``` - -### test_genesis_wasm_process - -```erlang -test_genesis_wasm_process() -> - Opts = #{ - genesis_wasm_db_dir => "cache-mainnet-test/genesis-wasm", - genesis_wasm_checkpoints_dir => "cache-mainnet-test/genesis-wasm/checkpoints", - genesis_wasm_log_level => "error", - genesis_wasm_port => 6363, - execution_device => <<"genesis-wasm@1.0">> - }, - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - Address = hb_util:human_id(ar_wallet:to_address(Wallet)), - WASMProc = test_wasm_process(<<"test/aos-2-pure-xs.wasm">>, Opts), - hb_message:commit( - maps:merge( - hb_message:uncommitted(WASMProc), - #{ - <<"execution-device">> => <<"genesis-wasm@1.0">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"push-device">> => <<"push@1.0">>, - <<"patch-from">> => <<"/results/outbox">>, - <<"passes">> => 1, - <<"scheduler">> => Address, - <<"authority">> => Address, - <<"module">> => <<"URgYpPQzvxxfYQtjrIQ116bl3YBfcImo3JEnNo8Hlrk">>, - <<"data-protocol">> => <<"ao">>, - <<"type">> => <<"Process">> - }), - #{ priv_wallet => Wallet } - ). -``` - -### schedule_test_message - -```erlang -schedule_test_message(Msg1, Text) -> - schedule_test_message(Msg1, Text, #{}). -``` - -### schedule_test_message - -```erlang -schedule_test_message(Msg1, Text, MsgBase) -> - Wallet = hb:wallet(), - UncommittedBase = hb_message:uncommitted(MsgBase), - Msg2 = - hb_message:commit(#{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - UncommittedBase#{ - <<"type">> => <<"Message">>, - <<"test-label">> => Text - }, - #{ priv_wallet => Wallet } - ) - }, - #{ priv_wallet => Wallet } - ), - hb_ao:resolve(Msg1, Msg2, #{}). -``` - -### schedule_aos_call - -```erlang -schedule_aos_call(Msg1, Code) -> - schedule_aos_call(Msg1, Code, <<"Eval">>, #{}). -``` - -### schedule_aos_call - -```erlang -schedule_aos_call(Msg1, Code, Action) -> - schedule_aos_call(Msg1, Code, Action, #{}). -``` - -### schedule_aos_call - -```erlang -schedule_aos_call(Msg1, Code, Action, Opts) -> - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - ProcID = hb_message:id(Msg1, all), - Msg2 = - hb_message:commit( - #{ - <<"action">> => Action, - <<"data">> => Code, - <<"target">> => ProcID, - <<"timestamp">> => os:system_time(millisecond) - }, - #{ priv_wallet => Wallet } - ), - schedule_test_message(Msg1, <<"TEST MSG">>, Msg2). -``` - -### spawn_and_execute_slot_test_ - -```erlang -spawn_and_execute_slot_test_() -> - { timeout, 900, fun spawn_and_execute_slot/0 }. -``` - -### spawn_and_execute_slot - -```erlang -spawn_and_execute_slot() -> - application:ensure_all_started(hb), - Opts = #{ - priv_wallet => hb:wallet(), - cache_control => <<"always">>, - store => hb_opts:get(store) - }, - Msg1 = test_genesis_wasm_process(), - hb_cache:write(Msg1, Opts), - {ok, _SchedInit} = - hb_ao:resolve( - Msg1, - #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => Msg1 - }, - Opts - ), - {ok, _} = schedule_aos_call(Msg1, <<"return 1+1">>), - {ok, _} = schedule_aos_call(Msg1, <<"return 2+2">>), - {ok, SchedulerRes} = - hb_ao:resolve(Msg1, #{ - <<"method">> => <<"GET">>, - <<"path">> => <<"schedule">> - }, Opts), - % Verify process message is scheduled first - ?assertMatch( - <<"Process">>, - hb_ao:get(<<"assignments/0/body/type">>, SchedulerRes) - ), - % Verify messages are scheduled - ?assertMatch( - <<"return 1+1">>, - hb_ao:get(<<"assignments/1/body/data">>, SchedulerRes) - ), - ?assertMatch( - <<"return 2+2">>, - hb_ao:get(<<"assignments/2/body/data">>, SchedulerRes) - ), - {ok, Result} = hb_ao:resolve(Msg1, #{ <<"path">> => <<"now">> }, Opts), - ?assertEqual(<<"4">>, hb_ao:get(<<"results/data">>, Result)). -``` - -### compare_result_genesis_wasm_and_wasm_test_ - -```erlang -compare_result_genesis_wasm_and_wasm_test_() -> - { timeout, 900, fun compare_result_genesis_wasm_and_wasm/0 }. -``` - -### compare_result_genesis_wasm_and_wasm - -```erlang -compare_result_genesis_wasm_and_wasm() -> - application:ensure_all_started(hb), - Opts = #{ - priv_wallet => hb:wallet(), - cache_control => <<"always">>, - store => hb_opts:get(store) - }, - % Test with genesis-wasm - MsgGenesisWasm = test_genesis_wasm_process(), - hb_cache:write(MsgGenesisWasm, Opts), - {ok, _SchedInitGenesisWasm} = - hb_ao:resolve( - MsgGenesisWasm, - #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => MsgGenesisWasm - }, - Opts - ), - % Test with wasm - MsgWasm = test_wasm_stack_process(Opts, [ - <<"WASI@1.0">>, - <<"JSON-Iface@1.0">>, - <<"WASM-64@1.0">>, - <<"Multipass@1.0">> - ]), - hb_cache:write(MsgWasm, Opts), - {ok, _SchedInitWasm} = - hb_ao:resolve( - MsgWasm, - #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => MsgWasm - }, - Opts - ), - % Schedule messages - {ok, _} = schedule_aos_call(MsgGenesisWasm, <<"return 1+1">>), - {ok, _} = schedule_aos_call(MsgGenesisWasm, <<"return 2+2">>), - {ok, _} = schedule_aos_call(MsgWasm, <<"return 1+1">>), - {ok, _} = schedule_aos_call(MsgWasm, <<"return 2+2">>), - % Get results - {ok, ResultGenesisWasm} = - hb_ao:resolve( - MsgGenesisWasm, - #{ <<"path">> => <<"now">> }, - Opts - ), - {ok, ResultWasm} = - hb_ao:resolve( - MsgWasm, - #{ <<"path">> => <<"now">> }, - Opts - ), - ?assertEqual( - hb_ao:get(<<"results/data">>, ResultGenesisWasm), - hb_ao:get(<<"results/data">>, ResultWasm) - ). -``` - -### send_message_between_genesis_wasm_processes_test_ - -```erlang -send_message_between_genesis_wasm_processes_test_() -> - { timeout, 900, fun send_message_between_genesis_wasm_processes/0 }. -``` - -### send_message_between_genesis_wasm_processes - -```erlang -send_message_between_genesis_wasm_processes() -> - application:ensure_all_started(hb), - Opts = #{ - priv_wallet => hb:wallet(), - cache_control => <<"always">>, - store => hb_opts:get(store) - }, - % Create receiver process with handler - MsgReceiver = test_genesis_wasm_process(), - hb_cache:write(MsgReceiver, Opts), - ProcId = dev_process:process_id(MsgReceiver, #{}, #{}), - {ok, _SchedInitReceiver} = - hb_ao:resolve( - MsgReceiver, - #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => MsgReceiver - }, - Opts - ), - schedule_aos_call(MsgReceiver, <<"Number = 10">>), - schedule_aos_call(MsgReceiver, <<" - Handlers.add('foo', function(msg) - print(\"Number: \" .. Number * 2) - return Number * 2 end) - ">>), - schedule_aos_call(MsgReceiver, <<"return Number">>), - {ok, ResultReceiver} = hb_ao:resolve(MsgReceiver, <<"now">>, Opts), - ?assertEqual(<<"10">>, hb_ao:get(<<"results/data">>, ResultReceiver)), - % Create sender process to send message to receiver - MsgSender = test_genesis_wasm_process(), - hb_cache:write(MsgSender, Opts), - {ok, _SchedInitSender} = - hb_ao:resolve( - MsgSender, - #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => MsgSender - }, - Opts - ), - {ok, SendMsgToReceiver} = - schedule_aos_call( - MsgSender, - <<"Send({ Target = \"", ProcId/binary, "\", Action = \"foo\" })">> - ), - {ok, ResultSender} = hb_ao:resolve(MsgSender, <<"now">>, Opts), - {ok, Slot} = hb_ao:resolve(SendMsgToReceiver, <<"slot">>, Opts), - {ok, Res} = - hb_ao:resolve( - MsgSender, - #{ - <<"path">> => <<"push">>, - <<"slot">> => Slot, - <<"result-depth">> => 1 - }, - Opts - ), - % Get schedule for receiver - {ok, ScheduleReceiver} = - hb_ao:resolve( - MsgReceiver, - #{ - <<"method">> => <<"GET">>, - <<"path">> => <<"schedule">> - }, - Opts - ), - ?assertEqual( - <<"foo">>, - hb_ao:get(<<"assignments/4/body/action">>, ScheduleReceiver) - ), - {ok, NewResultReceiver} = hb_ao:resolve(MsgReceiver, <<"now">>, Opts), - ?assertEqual( - <<"Number: 20">>, - hb_ao:get(<<"results/data">>, NewResultReceiver) - ). -``` - -### dryrun_genesis_wasm_test_ - -```erlang -dryrun_genesis_wasm_test_() -> - { timeout, 900, fun dryrun_genesis_wasm/0 }. -``` - -### dryrun_genesis_wasm - -```erlang -dryrun_genesis_wasm() -> - application:ensure_all_started(hb), - Opts = #{ - priv_wallet => hb:wallet(), - cache_control => <<"always">>, - store => hb_opts:get(store) - }, - % Set up process with increment handler to receive messages - ProcReceiver = test_genesis_wasm_process(), - hb_cache:write(ProcReceiver, #{}), - {ok, _SchedInit1} = - hb_ao:resolve( - ProcReceiver, - #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => ProcReceiver - }, - Opts - ), - ProcReceiverId = dev_process:process_id(ProcReceiver, #{}, #{}), - % Initialize increment handler - {ok, _} = schedule_aos_call(ProcReceiver, <<" - Number = Number or 5 - Handlers.add('Increment', function(msg) - Number = Number + 1 - ao.send({ Target = msg.From, Data = 'The current number is ' .. Number .. '!' }) - return 'The current number is ' .. Number .. '!' - end) - ">>), - % Ensure Handlers were properly added - schedule_aos_call(ProcReceiver, <<"return #Handlers.list">>), - {ok, NumHandlers} = - hb_ao:resolve( - ProcReceiver, - <<"now/results/data">>, - Opts - ), - % _eval, _default, Increment - ?assertEqual(<<"3">>, NumHandlers), - schedule_aos_call(ProcReceiver, <<"return Number">>), - {ok, InitialNumber} = - hb_ao:resolve( - ProcReceiver, - <<"now/results/data">>, - Opts - ), - % Number is initialized to 5 - ?assertEqual(<<"5">>, InitialNumber), - % Set up sender process to send Action: Increment to receiver - ProcSender = test_genesis_wasm_process(), - hb_cache:write(ProcSender, #{}), - {ok, _SchedInit2} = hb_ao:resolve( - ProcSender, - #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => ProcSender - }, - Opts - ), - % First increment + push - {ok, ToPush} = - schedule_aos_call( - ProcSender, - << - "Send({ Target = \"", - (ProcReceiverId)/binary, - "\", Action = \"Increment\" })" - >> - ), - SlotToPush = hb_ao:get(<<"slot">>, ToPush, Opts), - ?assertEqual(1, SlotToPush), - {ok, PushRes1} = - hb_ao:resolve( - ProcSender, - #{ - <<"path">> => <<"push">>, - <<"slot">> => SlotToPush, - <<"result-depth">> => 1 - }, - Opts - ), - % Check that number incremented normally - schedule_aos_call(ProcReceiver, <<"return Number">>), - {ok, AfterIncrementResult} = - hb_ao:resolve( - ProcReceiver, - <<"now/results/data">>, - Opts - ), - ?assertEqual(<<"6">>, AfterIncrementResult), - % Send another increment and push it - {ok, ToPush2} = - schedule_aos_call( - ProcSender, - << - "Send({ Target = \"", - (ProcReceiverId)/binary, - "\", Action = \"Increment\" })" - >> - ), - SlotToPush2 = hb_ao:get(<<"slot">>, ToPush2, Opts), - ?assertEqual(3, SlotToPush2), - {ok, PushRes2} = - hb_ao:resolve( - ProcSender, - #{ - <<"path">> => <<"push">>, - <<"slot">> => SlotToPush2, - <<"result-depth">> => 1 - }, - Opts - ), - % Check that number incremented normally - schedule_aos_call(ProcReceiver, <<"return Number">>), - {ok, AfterIncrementResult2} = - hb_ao:resolve( - ProcReceiver, - <<"now/results/data">>, - Opts - ), - ?assertEqual(<<"7">>, AfterIncrementResult2), - % Test dryrun by calling compute with no assignment - % Should return result without changing state - DryrunMsg = - hb_message:commit( - #{ - <<"path">> => <<"as/compute">>, - <<"as-device">> => <<"execution">>, - <<"action">> => <<"Increment">>, - <<"target">> => ProcReceiverId - }, - Opts - ), - {ok, DryrunResult} = hb_ao:resolve(ProcReceiver, DryrunMsg, Opts), - {ok, DryrunData} = - hb_ao:resolve(DryrunResult, <<"results/outbox/1/Data">>, Opts), - ?assertEqual(<<"The current number is 8!">>, DryrunData), - % Ensure that number did not increment - schedule_aos_call(ProcReceiver, <<"return Number">>), - {ok, AfterDryrunResult} = - hb_ao:resolve( - ProcReceiver, - <<"now/results/data">>, - Opts - ), - ?assertEqual(<<"7">>, AfterDryrunResult). -``` - ---- - -*Generated from [dev_genesis_wasm.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_genesis_wasm.erl)* diff --git a/docs/book/src/dev_green_zone.erl.md b/docs/book/src/dev_green_zone.erl.md deleted file mode 100644 index f2e6256e5..000000000 --- a/docs/book/src/dev_green_zone.erl.md +++ /dev/null @@ -1,342 +0,0 @@ -# dev_green_zone - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_green_zone.erl) - -The green zone device, which provides secure communication and identity -management between trusted nodes. -It handles node initialization, joining existing green zones, key exchange, -and node identity cloning. All operations are protected by hardware -commitment and encryption. - ---- - -## Exported Functions - -- `become/3` -- `info/1` -- `info/3` -- `init/3` -- `is_trusted/3` -- `join/3` -- `key/3` - ---- - -### info - -The green zone device, which provides secure communication and identity -Controls which functions are exposed via the device API. - -```erlang -info(_) -> - #{ exports => [info, init, join, become, key, is_trusted] }. -``` - -### info - -Provides information about the green zone device and its API. -Provides the default required options for a green zone. -Replace values of <<"self">> in a configuration map with corresponding values from Opts. -Returns `true` if the request is signed by a trusted node. - -```erlang --spec replace_self_values(Config :: map(), Opts :: map()) -> map(). -replace_self_values(Config, Opts) -> - maps:map( - fun(Key, Value) -> - case Value of - <<"self">> -> - hb_opts:get(Key, not_found, Opts); - _ -> - Value - end - end, - Config - ). -``` - -```erlang -info(_Msg1, _Msg2, _Opts) -> - InfoBody = #{ - <<"description">> => - <<"Green Zone secure communication and identity management for trusted nodes">>, - <<"version">> => <<"1.0">>, - <<"api">> => #{ - <<"info">> => #{ - <<"description">> => <<"Get device info">> - }, - <<"init">> => #{ - <<"description">> => <<"Initialize the green zone">>, - <<"details">> => - <<"Sets up the node's cryptographic identity with wallet and AES key">> - }, - <<"join">> => #{ - <<"description">> => <<"Join an existing green zone">>, - <<"required_node_opts">> => #{ - <<"green_zone_peer_location">> => <<"Target peer's address">>, - <<"green_zone_peer_id">> => <<"Target peer's unique identifier">> - } - }, - <<"key">> => #{ - <<"description">> => <<"Retrieve and encrypt the node's private key">>, - <<"details">> => - <<"Returns the node's private key encrypted with the shared AES key">> - }, - <<"become">> => #{ - <<"description">> => <<"Clone the identity of a target node">>, - <<"required_node_opts">> => #{ - <<"green_zone_peer_location">> => <<"Target peer's address">>, - <<"green_zone_peer_id">> => <<"Target peer's unique identifier">> - } - } - } - }, - {ok, #{<<"status">> => 200, <<"body">> => InfoBody}}. -%% -%% -%% -%% -``` - -### is_trusted - -Provides information about the green zone device and its API. -Provides the default required options for a green zone. -Replace values of <<"self">> in a configuration map with corresponding values from Opts. -Returns `true` if the request is signed by a trusted node. - -```erlang --spec replace_self_values(Config :: map(), Opts :: map()) -> map(). -replace_self_values(Config, Opts) -> - maps:map( - fun(Key, Value) -> - case Value of - <<"self">> -> - hb_opts:get(Key, not_found, Opts); - _ -> - Value - end - end, - Config - ). -``` - -```erlang -is_trusted(_M1, Req, Opts) -> - Signers = hb_message:signers(Req, Opts), - {ok, - hb_util:bin( - lists:any( - fun(Signer) -> - lists:member( - Signer, - maps:keys(hb_opts:get(trusted_nodes, #{}, Opts)) - ) - end, - Signers - ) - ) - }. -``` - -### join - -Initiates the join process for a node to enter an existing green zone. - -```erlang --spec join(M1 :: term(), M2 :: term(), Opts :: map()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -join(M1, M2, Opts) -> - ?event(green_zone, {join, start}), - PeerLocation = hb_opts:get(<<"green_zone_peer_location">>, undefined, Opts), - PeerID = hb_opts:get(<<"green_zone_peer_id">>, undefined, Opts), - Identities = hb_opts:get(identities, #{}, Opts), - HasGreenZoneIdentity = maps:is_key(<<"green-zone">>, Identities), - ?event(green_zone, {join_peer, PeerLocation, PeerID, HasGreenZoneIdentity}), - if (not HasGreenZoneIdentity) andalso (PeerLocation =/= undefined) andalso (PeerID =/= undefined) -> - join_peer(PeerLocation, PeerID, M1, M2, Opts); - true -> - validate_join(M1, M2, hb_cache:ensure_all_loaded(Opts, Opts)) - end. -``` - -### key - -Encrypts and provides the node's private key for secure sharing. - -```erlang --spec key(M1 :: term(), M2 :: term(), Opts :: map()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -key(_M1, _M2, Opts) -> - ?event(green_zone, {get_key, start}), - % Retrieve the shared AES key and the node's wallet. -``` - -### become - -Clones the identity of a target node in the green zone. - -```erlang --spec become(M1 :: term(), M2 :: term(), Opts :: map()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -become(_M1, _M2, Opts) -> - ?event(green_zone, {become, start}), - % 1. Retrieve the target node's address from the incoming message. -``` - -### finalize_become - -```erlang -finalize_become(KeyResp, NodeLocation, NodeID, GreenZoneAES, Opts) -> - % 4. Decode the response to obtain the encrypted key and IV. -``` - -### join_peer - -Processes a join request to a specific peer node. - -```erlang --spec join_peer( - PeerLocation :: binary(), - PeerID :: binary(), - M1 :: term(), - M2 :: term(), - Opts :: map()) -> {ok, map()} | {error, map() | binary()}. -``` - -```erlang -join_peer(PeerLocation, PeerID, _M1, _M2, InitOpts) -> - % Check here if the node is already part of a green zone. -``` - -### validate_join - -Validates an incoming join request from another node. - -```erlang --spec validate_join(M1 :: term(), Req :: map(), Opts :: map()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -validate_join(M1, Req, Opts) -> - case validate_peer_opts(Req, Opts) of - true -> do_nothing; - false -> throw(invalid_join_request) - end, - ?event(green_zone, {join, start}), - % Retrieve the commitment report and address from the join request. -``` - -### add_trusted_node - -Adds a node to the trusted nodes list with its commitment report. - -```erlang --spec add_trusted_node( - NodeAddr :: binary(), - Report :: map(), - RequesterPubKey :: term(), Opts :: map()) -> ok. -``` - -```erlang -add_trusted_node(NodeAddr, Report, RequesterPubKey, Opts) -> - % Retrieve the current trusted nodes map. -``` - -### decrypt_zone_key - -Decrypts an AES key using the node's RSA private key. - -```erlang --spec decrypt_zone_key(EncZoneKey :: binary(), Opts :: map()) -> - {ok, binary()} | {error, binary()}. -``` - -```erlang -decrypt_zone_key(EncZoneKey, Opts) -> - % Decode if necessary - RawEncKey = case is_binary(EncZoneKey) of - true -> base64:decode(EncZoneKey); - false -> EncZoneKey - end, - % Get wallet and extract key components - {{_KeyType = {rsa, E}, Priv, Pub}, _PubKey} = - hb_opts:get(priv_wallet, #{}, Opts), - % Create RSA private key record - RSAPrivKey = #'RSAPrivateKey'{ - publicExponent = E, - modulus = crypto:bytes_to_integer(Pub), - privateExponent = crypto:bytes_to_integer(Priv) - }, - DecryptedKey = public_key:decrypt_private(RawEncKey, RSAPrivKey), - ?event(green_zone, {decrypt_zone_key, complete}), - {ok, DecryptedKey}. -``` - -### try_mount_encrypted_volume - -Attempts to mount an encrypted volume using the green zone AES key. - -```erlang -try_mount_encrypted_volume(Key, Opts) -> - ?event(debug_volume, {try_mount_encrypted_volume, start}), - % Set up options for volume mounting with default paths - VolumeOpts = Opts#{ - priv_volume_key => Key, - volume_skip_decryption => <<"true">> - }, - % Call the dev_volume:mount function to handle the complete process - case dev_volume:mount(undefined, undefined, VolumeOpts) of - {ok, Result} -> - ?event(debug_volume, {volume_mount, success, Result}), - ok; - {error, Error} -> - ?event(debug_volume, {volume_mount, error, Error}), - ok % Still return ok as this is an optional operation - end. -``` - -### rsa_wallet_integration_test - -Test RSA operations with the existing wallet structure. - -```erlang -rsa_wallet_integration_test() -> - % Create a new wallet using ar_wallet - Wallet = ar_wallet:new(), - {{KeyType, Priv, Pub}, {KeyType, Pub}} = Wallet, - % Create test message - PlainText = <<"HyperBEAM integration test message.">>, - % Create RSA public key record for encryption - RsaPubKey = #'RSAPublicKey'{ - publicExponent = 65537, - modulus = crypto:bytes_to_integer(Pub) - }, - % Encrypt using public key - Encrypted = public_key:encrypt_public(PlainText, RsaPubKey), - % Create RSA private key record for decryption - RSAPrivKey = #'RSAPrivateKey'{ - publicExponent = 65537, - modulus = crypto:bytes_to_integer(Pub), - privateExponent = crypto:bytes_to_integer(Priv) - }, - % Verify decryption works - Decrypted = public_key:decrypt_private(Encrypted, RSAPrivKey), - % Verify roundtrip - ?assertEqual(PlainText, Decrypted), - % Verify wallet structure -``` - ---- - -*Generated from [dev_green_zone.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_green_zone.erl)* diff --git a/docs/book/src/dev_hook.erl.md b/docs/book/src/dev_hook.erl.md deleted file mode 100644 index 041cee5f1..000000000 --- a/docs/book/src/dev_hook.erl.md +++ /dev/null @@ -1,272 +0,0 @@ -# dev_hook - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_hook.erl) - -A generalized interface for `hooking` into HyperBEAM nodes. -This module allows users to define `hooks` that are executed at various -points in the lifecycle of nodes and message evaluations. -Hooks are maintained in the `node message` options, under the key `on` -key. Each `hook` may have zero or many `handlers` which their request is -executed against. A new `handler` of a hook can be registered by simply -adding a new key to that message. If multiple hooks need to be executed for -a single event, the key's value can be set to a list of hooks. -`hook`s themselves do not need to be added explicitly. Any device can add -a hook by simply executing `dev_hook:on(HookName, Req, Opts)`. This -function is does not affect the hashpath of a message and is not exported on -the device's API, such that it is not possible to call it directly with -AO-Core resolution. -All handlers are expressed in the form of a message, upon which the hook's -request is evaluated: - AO(HookMsg, Req, Opts) => {Status, Result} -The `Status` and `Result` of the evaluation can be used at the `hook` caller's -discretion. If multiple handlers are to be executed for a single `hook`, the -result of each is used as the input to the next, on the assumption that the -status of the previous is `ok`. If a non-`ok` status is encountered, the -evaluation is halted and the result is returned to the caller. This means -that in most cases, hooks take the form of chainable pipelines of functions, -passing the most pertinent data in the `body` key of both the request and -result. Hook definitions can also set the `hook/result` key to `ignore`, if -the result of the execution should be discarded and the prior value (the -input to the hook) should be used instead. The `hook/commit-request` key can -also be set to `true` if the request should be committed by the node before -execution of the hook. -The default HyperBEAM node implements several useful hooks. They include: - start: Executed when the node starts. - Req/body: The node's initial configuration. - Result/body: The node's possibly updated configuration. - request: Executed when a request is received via the HTTP API. - Req/body: The sequence of messages that the node will evaluate. - Req/request: The raw, unparsed singleton request. - Result/body: The sequence of messages that the node will evaluate. - step: Executed after each message in a sequence has been evaluated. - Req/body: The result of the evaluation. - Result/body: The result of the evaluation. - response: Executed when a response is sent via the HTTP API. - Req/body: The result of the evaluation. - Req/request: The raw, unparsed singleton request that was used to - generate the response. - Result/body: The message to be sent in response to the request. -Additionally, this module implements a traditional device API, allowing the -node operator to register hooks to the node and find those that are -currently active. - ---- - -## Exported Functions - -- `find/2` -- `find/3` -- `info/1` -- `on/3` - ---- - -### info - -A generalized interface for `hooking` into HyperBEAM nodes. -Device API information -Execute a named hook with the provided request and options - -```erlang -info(_) -> - #{ excludes => [<<"on">>] }. -``` - -### on - -A generalized interface for `hooking` into HyperBEAM nodes. -Device API information -Execute a named hook with the provided request and options - -```erlang -on(HookName, Req, Opts) -> - ?event(hook, {attempting_execution_for_hook, HookName}), - % Get all handlers for this hook from the options - Handlers = find(HookName, Opts), - % If no handlers are found, return the original request with ok status - case Handlers of - [] -> - ?event(hook, {no_handlers_for_hook, HookName}), - {ok, Req}; - _ -> - % Execute each handler in sequence, passing the result of each to - % the next as input. -``` - -### find - -Get all handlers for a specific hook from the node message options. - -```erlang -find(HookName, Opts) -> - find(#{}, #{ <<"target">> => <<"body">>, <<"body">> => HookName }, Opts). -``` - -### find - -Get all handlers for a specific hook from the node message options. - -```erlang -find(_Base, Req, Opts) -> - HookName = maps:get(maps:get(<<"target">>, Req, <<"body">>), Req), - case maps:get(HookName, hb_opts:get(on, #{}, Opts), []) of - Handler when is_map(Handler) -> - % If a single handler is found, wrap it in a list. -``` - -### execute_handlers - -Execute a list of handlers in sequence. - -```erlang -execute_handlers(_HookName, [], Req, _Opts) -> - % If no handlers remain, return the final request with ok status - {ok, Req}; -``` - -### execute_handlers - -Execute a list of handlers in sequence. - -```erlang -execute_handlers(HookName, [Handler|Rest], Req, Opts) -> - % Execute the current handler - ?event(hook, {executing_handler, HookName, Handler, Req}), - % Check the status of the execution - case execute_handler(HookName, Handler, Req, Opts) of - {ok, NewReq} -> - % If status is ok, continue with the next handler - ?event(hook, {handler_executed_successfully, HookName, NewReq}), - execute_handlers(HookName, Rest, NewReq, Opts); - {Status, Res} -> - % If status is error, halt execution and return the error - {Status, Res}; - Other -> - % If status is unknown, convert to error and halt execution - ?event(hook_error, {unexpected_handler_result, HookName, Other}), - {failure, - << - "Handler for hook `", - (hb_ao:normalize_key(HookName))/binary, - "` returned unexpected result." - >> - } - end. -``` - -### execute_handler - -Execute a single handler - -```erlang -execute_handler(<<"step">>, Handler, Req, Opts = #{ on := On = #{ <<"step">> := _ }}) -> - % The `step' hook is a special case: It is executed during the course of - % a resolution, and as such, the key must be removed from the node message - % before execution of the handler. Failure to do so will result in infinite - % recursion. -``` - -### execute_handler - -```erlang -execute_handler(HookName, Handler, Req, Opts) -> - try - % Resolve the handler message, setting the path to the handler name if - % it is not already set. We ensure to ignore the hashpath such that the - % handler does not affect the hashpath of a request's output. If the - % `hook/commit` key is set to `true`, the handler request will be - % committed before execution. -``` - -### no_handlers_test - -Test that hooks with no handlers return the original request - -```erlang -no_handlers_test() -> - Req = #{ <<"test">> => <<"value">> }, - Opts = #{}, - {ok, Result} = on(<<"test_hook">>, Req, Opts), - ?assertEqual(Req, Result). -``` - -### single_handler_test - -Test that a single handler is executed correctly - -```erlang -single_handler_test() -> - % Create a message with a mock handler that adds a key to the request. -``` - -### multiple_handlers_test - -Test that multiple handlers form a pipeline -Test that pipeline execution halts on error - -```erlang -multiple_handlers_test() -> - % Create mock handlers that modify the request in sequence - Handler1 = #{ - <<"device">> => #{ - <<"test-hook">> => - fun(_, Req, _) -> - {ok, Req#{ <<"handler1">> => true }} - end - } - }, - Handler2 = #{ - <<"device">> => #{ - <<"test-hook">> => - fun(_, Req, _) -> - {ok, Req#{ <<"handler2">> => true }} - end - } - }, - Req = #{ <<"test">> => <<"value">> }, - Opts = #{ on => #{ <<"test-hook">> => [Handler1, Handler2] }}, - {ok, Result} = on(<<"test-hook">>, Req, Opts), - ?assertEqual(true, maps:get(<<"handler1">>, Result)), - ?assertEqual(true, maps:get(<<"handler2">>, Result)). -``` - -### halt_on_error_test - -Test that multiple handlers form a pipeline -Test that pipeline execution halts on error - -```erlang -halt_on_error_test() -> - % Create handlers where the second one returns an error - Handler1 = #{ - <<"device">> => #{ - <<"test-hook">> => - fun(_, Req, _) -> - {ok, Req#{ <<"handler1">> => true }} - end - } - }, - Handler2 = #{ - <<"device">> => #{ - <<"test-hook">> => - fun(_, _, _) -> - {error, <<"Error in handler2">>} - end - } - }, - Handler3 = #{ - <<"device">> => #{ - <<"test-hook">> => - fun(_, Req, _) -> - {ok, Req#{ <<"handler3">> => true }} - end - } - }, - Req = #{ <<"test">> => <<"value">> }, - Opts = #{ on => #{ <<"test-hook">> => [Handler1, Handler2, Handler3] }}, - {error, Result} = on(<<"test-hook">>, Req, Opts), -``` - ---- - -*Generated from [dev_hook.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_hook.erl)* diff --git a/docs/book/src/dev_hyperbuddy.erl.md b/docs/book/src/dev_hyperbuddy.erl.md deleted file mode 100644 index 7736eefa1..000000000 --- a/docs/book/src/dev_hyperbuddy.erl.md +++ /dev/null @@ -1,291 +0,0 @@ -# dev_hyperbuddy - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_hyperbuddy.erl) - -A device that renders a REPL-like interface for AO-Core via HTML. - ---- - -## Exported Functions - -- `events/3` -- `format/3` -- `info/0` -- `metrics/3` -- `return_error/2` -- `return_file/2` -- `throw/3` - ---- - -### info - -A device that renders a REPL-like interface for AO-Core via HTML. -Export an explicit list of files via http. - -```erlang -info() -> - #{ - default => fun serve/4, - routes => #{ - % Default message viewer page: - <<"index">> => <<"index.html">>, - % HyperBEAM default homepage: - <<"dashboard">> => <<"dashboard.html">>, - % Interactive REPL: - <<"console">> => <<"console.html">>, - <<"graph">> => <<"graph.html">>, - % Styling and scripts: - <<"styles.css">> => <<"styles.css">>, - <<"metrics.js">> => <<"metrics.js">>, - <<"devices.js">> => <<"devices.js">>, - <<"utils.js">> => <<"utils.js">>, - <<"dashboard.js">> => <<"dashboard.js">>, - <<"graph.js">> => <<"graph.js">>, - <<"404.html">> => <<"404.html">> - }, - excludes => [<<"return_file">>] - }. -``` - -### metrics - -The main HTML page for the REPL device. - -```erlang -metrics(_, Req, Opts) -> - case hb_opts:get(prometheus, not hb_features:test(), Opts) of - true -> - {_, HeaderList, Body} = - prometheus_http_impl:reply( - #{path => true, - headers => - fun(Name, Default) -> - hb_ao:get(Name, Req, Default, Opts) - end, - registry => prometheus_registry:exists(<<"default">>), - standalone => false} - ), - RawHeaderMap = - hb_maps:from_list( - prometheus_cowboy:to_cowboy_headers(HeaderList) - ), - Headers = - hb_maps:map( - fun(_, Value) -> hb_util:bin(Value) end, - RawHeaderMap, - Opts - ), - {ok, Headers#{ <<"body">> => Body }}; - false -> - {ok, #{ <<"body">> => <<"Prometheus metrics disabled.">> }} - end. -``` - -### events - -Return the current event counters as a message. - -```erlang -events(_, _Req, _Opts) -> - {ok, hb_event:counters()}. -``` - -### format - -Employ HyperBEAM's internal pretty printer to format a message. - -```erlang -format(Base, Req, Opts) -> - LoadedBase = hb_cache:ensure_all_loaded(Base, Opts), - LoadedReq = hb_cache:ensure_all_loaded(Req, Opts), - {ok, - #{ - <<"body">> => - hb_util:bin( - hb_format:message( - #{ - <<"base">> => - maps:without( - [<<"device">>], - hb_private:reset(LoadedBase)), - <<"request">> => - maps:without( - [<<"path">>], - hb_private:reset(LoadedReq) - ) - }, - Opts#{ - linkify_mode => discard, - cache_control => [<<"no-cache">>, <<"no-store">>] - } - ) - ) - } - }. -``` - -### throw - -Test key for validating the behavior of the `500` HTTP response. - -```erlang -throw(_Msg, _Req, Opts) -> - case hb_opts:get(mode, prod, Opts) of - prod -> {error, <<"Forced-throw unavailable in `prod` mode.">>}; - debug -> throw({intentional_error, Opts}) - end. -``` - -### serve - -Serve a file from the priv directory. Only serves files that are explicitly - -```erlang -serve(<<"keys">>, M1, _M2, Opts) -> dev_message:keys(M1, Opts); -``` - -### serve - -Serve a file from the priv directory. Only serves files that are explicitly - -```erlang -serve(<<"set">>, M1, M2, Opts) -> dev_message:set(M1, M2, Opts); -``` - -### serve - -Serve a file from the priv directory. Only serves files that are explicitly - -```erlang -serve(Key, _, _, Opts) -> - ?event({hyperbuddy_serving, Key}), - Routes = hb_maps:get(routes, info(), no_routes, Opts), - case hb_maps:get(Key, Routes, undefined, Opts) of - undefined -> {error, not_found}; - Filename -> return_file(Filename) - end. -``` - -### return_file - -Read a file from disk and serve it as a static HTML page. - -```erlang -return_file(Name) -> - return_file(<<"hyperbuddy@1.0">>, Name, #{}). -``` - -### return_file - -Read a file from disk and serve it as a static HTML page. - -```erlang -return_file(Device, Name) -> - return_file(Device, Name, #{}). -``` - -### return_file - -```erlang -return_file(Device, Name, Template) -> - Base = hb_util:bin(code:priv_dir(hb)), - Filename = <>, - ?event({hyperbuddy_serving, Filename}), - case file:read_file(Filename) of - {ok, RawBody} -> - Body = apply_template(RawBody, Template), - {ok, #{ - <<"body">> => Body, - <<"content-type">> => - case filename:extension(Filename) of - <<".html">> -> <<"text/html">>; - <<".js">> -> <<"text/javascript">>; - <<".css">> -> <<"text/css">>; - <<".png">> -> <<"image/png">>; - <<".ico">> -> <<"image/x-icon">> - end - } - }; - {error, _} -> - {error, not_found} - end. -``` - -### return_error - -Return an error page, with the `{{error}}` template variable replaced. - -```erlang -return_error(Error, Opts) when not is_map(Error) -> - return_error(#{ <<"body">> => Error }, Opts); -``` - -### return_error - -Return an error page, with the `{{error}}` template variable replaced. - -```erlang -return_error(ErrorMsg, Opts) -> - return_file( - <<"hyperbuddy@1.0">>, - <<"500.html">>, - #{ <<"error">> => hb_format:error(ErrorMsg, Opts) } - ). -``` - -### apply_template - -Apply a template to a body. - -```erlang -apply_template(Body, Template) when is_map(Template) -> - apply_template(Body, maps:to_list(Template)); -``` - -### apply_template - -Apply a template to a body. - -```erlang -apply_template(Body, []) -> - Body; -``` - -### apply_template - -Apply a template to a body. - -```erlang -apply_template(Body, [{Key, Value} | Rest]) -> - apply_template( - re:replace( - Body, - <<"\\{\\{", Key/binary, "\\}\\}">>, - hb_util:bin(Value), - [global, {return, binary}] - ), - Rest - ). -``` - -### return_templated_file_test - -```erlang -return_templated_file_test() -> - {ok, #{ <<"body">> := Body }} = - return_file( - <<"hyperbuddy@1.0">>, - <<"500.html">>, - #{ - <<"error">> => <<"This is an error message.">> - } - ), - ?assertNotEqual( - binary:match(Body, <<"This is an error message.">>), - nomatch -``` - ---- - -*Generated from [dev_hyperbuddy.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_hyperbuddy.erl)* diff --git a/docs/book/src/dev_json_iface.erl.md b/docs/book/src/dev_json_iface.erl.md deleted file mode 100644 index c026f2389..000000000 --- a/docs/book/src/dev_json_iface.erl.md +++ /dev/null @@ -1,685 +0,0 @@ -# dev_json_iface - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_json_iface.erl) - -A device that provides a way for WASM execution to interact with -the HyperBEAM (and AO) systems, using JSON as a shared data representation. -The interface is easy to use. It works as follows: -1. The device is given a message that contains a process definition, WASM - environment, and a message that contains the data to be processed, - including the image to be used in part of `execute{pass=1}`. -2. The device is called with `execute{pass=2}`, which reads the result of - the process execution from the WASM environment and adds it to the - message. -The device has the following requirements and interface: -
-    M1/Computed when /Pass == 1 ->
-        Assumes:
-            M1/priv/wasm/instance
-            M1/Process
-            M2/Message
-            M2/Assignment/Block-Height
-        Generates:
-            /wasm/handler
-            /wasm/params
-        Side-effects:
-            Writes the process and message as JSON representations into the
-            WASM environment.
-    M1/Computed when M2/Pass == 2 ->
-        Assumes:
-            M1/priv/wasm/instance
-            M2/Results
-            M2/Process
-        Generates:
-            /Results/Outbox
-            /Results/Data
- ---- - -## Exported Functions - -- `compute/3` -- `generate_aos_msg/2` -- `generate_stack/1` -- `generate_stack/2` -- `generate_stack/3` -- `init/3` -- `json_to_message/2` -- `message_to_json_struct/2` - ---- - -### init - -A device that provides a way for WASM execution to interact with -Initialize the device. -On first pass prepare the call, on second pass get the results. - -```erlang -init(M1, _M2, Opts) -> - {ok, hb_ao:set(M1, #{<<"function">> => <<"handle">>}, Opts)}. -``` - -### compute - -A device that provides a way for WASM execution to interact with -Initialize the device. -On first pass prepare the call, on second pass get the results. - -```erlang -compute(M1, M2, Opts) -> - case hb_ao:get(<<"pass">>, M1, Opts) of - 1 -> prep_call(M1, M2, Opts); - 2 -> results(M1, M2, Opts); - _ -> {ok, M1} - end. -``` - -### prep_call - -Prepare the WASM environment for execution by writing the process string and - -```erlang -prep_call(RawM1, RawM2, Opts) -> - M1 = hb_cache:ensure_all_loaded(RawM1, Opts), - M2 = hb_cache:ensure_all_loaded(RawM2, Opts), - ?event({prep_call, M1, M2, Opts}), - Process = hb_ao:get(<<"process">>, M1, Opts#{ hashpath => ignore }), - Message = hb_ao:get(<<"body">>, M2, Opts#{ hashpath => ignore }), - Image = hb_ao:get(<<"process/image">>, M1, Opts), - BlockHeight = hb_ao:get(<<"block-height">>, M2, Opts), - Props = message_to_json_struct(denormalize_message(Message, Opts), Opts), - MsgProps = - Props#{ - <<"Module">> => Image, - <<"Block-Height">> => BlockHeight - }, - MsgJson = hb_json:encode(MsgProps), - ProcessProps = - #{ - <<"Process">> => message_to_json_struct(Process, Opts) - }, - ProcessJson = hb_json:encode(ProcessProps), - env_write(ProcessJson, MsgJson, M1, M2, Opts). -``` - -### denormalize_message - -Normalize a message for AOS-compatibility. - -```erlang -denormalize_message(Message, Opts) -> - NormOwnerMsg = - case hb_message:signers(Message, Opts) of - [] -> Message; - [PrimarySigner|_] -> - {ok, _, Commitment} = hb_message:commitment(PrimarySigner, Message, Opts), - Message#{ - <<"owner">> => hb_util:human_id(PrimarySigner), - <<"signature">> => - hb_ao:get(<<"signature">>, Commitment, <<>>, Opts) - } - end, - NormOwnerMsg#{ - <<"id">> => hb_message:id(Message, all, Opts) - }. -``` - -### message_to_json_struct - -```erlang -message_to_json_struct(RawMsg, Opts) -> - message_to_json_struct(RawMsg, [owner_as_address], Opts). -``` - -### message_to_json_struct - -```erlang -message_to_json_struct(RawMsg, Features, Opts) -> - TABM = - hb_message:convert( - hb_private:reset(RawMsg), - tabm, - Opts - ), - MsgWithoutCommitments = hb_maps:without([<<"commitments">>], TABM, Opts), - ID = hb_message:id(RawMsg, all), - ?event({encoding, {id, ID}, {msg, RawMsg}}), - {Owner, Signature} = - case hb_message:signers(RawMsg, Opts) of - [] -> {<<>>, <<>>}; - [Signer|_] -> - {ok, _, Commitment} = - hb_message:commitment(Signer, RawMsg, Opts), - CommitmentSignature = - hb_ao:get(<<"signature">>, Commitment, <<>>, Opts), - case lists:member(owner_as_address, Features) of - true -> - { - hb_util:native_id(Signer), - CommitmentSignature - }; - false -> - CommitmentOwner = - hb_ao:get_first( - [ - {Commitment, <<"key">>}, - {Commitment, <<"owner">>} - ], - no_signing_public_key_found_in_commitment, - Opts - ), - {CommitmentOwner, CommitmentSignature} - end - end, - Last = - hb_ao:get( - <<"anchor">>, - {as, <<"message@1.0">>, MsgWithoutCommitments}, - <<>>, - Opts - ), - Data = - hb_ao:get( - <<"data">>, - {as, <<"message@1.0">>, MsgWithoutCommitments}, - <<>>, - Opts - ), - Target = - hb_ao:get( - <<"target">>, - {as, <<"message@1.0">>, MsgWithoutCommitments}, - <<>>, - Opts - ), - % Set "From" if From-Process is Tag or set with "Owner" address - From = - hb_ao:get( - <<"from-process">>, - {as, <<"message@1.0">>, MsgWithoutCommitments}, - hb_util:encode(Owner), - Opts - ), - #{ - <<"Id">> => safe_to_id(ID), - % NOTE: In Arweave TXs, these are called "last_tx" - <<"Anchor">> => Last, - % NOTE: When sent to ao "Owner" is the wallet address - <<"Owner">> => hb_util:encode(Owner), - <<"From">> => case ?IS_ID(From) of true -> safe_to_id(From); false -> From end, - <<"Tags">> => prepare_tags(TABM, Opts), - <<"Target">> => safe_to_id(Target), - <<"Data">> => Data, - <<"Signature">> => - case byte_size(Signature) of - 0 -> <<>>; - 512 -> hb_util:encode(Signature); - _ -> Signature - end - }. -``` - -### prepare_tags - -Prepare the tags of a message as a key-value list, for use in the - -```erlang -prepare_tags(Msg, Opts) -> - % Prepare an ANS-104 message for JSON-Struct construction. -``` - -### prepare_header_case_tags - -Convert a message without an `original-tags` field into a list of - -```erlang -prepare_header_case_tags(TABM, Opts) -> - % Prepare a non-ANS-104 message for JSON-Struct construction. -``` - -### json_to_message - -Translates a compute result -- either from a WASM execution using the - -```erlang -json_to_message(JSON, Opts) when is_binary(JSON) -> - json_to_message(hb_json:decode(JSON), Opts); -``` - -### json_to_message - -Translates a compute result -- either from a WASM execution using the - -```erlang -json_to_message(Resp, Opts) when is_map(Resp) -> - {ok, Data, Messages, Patches} = normalize_results(Resp), - Output = - #{ - <<"outbox">> => - hb_maps:from_list( - [ - {MessageNum, preprocess_results(Msg, Opts)} - || - {MessageNum, Msg} <- - lists:zip( - lists:seq(1, length(Messages)), - Messages - ) - ] - ), - <<"patches">> => lists:map(fun(Patch) -> tags_to_map(Patch, Opts) end, Patches), - <<"data">> => Data - }, - {ok, Output}; -``` - -### json_to_message - -Translates a compute result -- either from a WASM execution using the - -```erlang -json_to_message(#{ <<"ok">> := false, <<"error">> := Error }, _Opts) -> - {error, Error}; -``` - -### json_to_message - -Translates a compute result -- either from a WASM execution using the - -```erlang -json_to_message(Other, _Opts) -> - {error, - #{ - <<"error">> => <<"Invalid JSON message input.">>, - <<"received">> => Other - } - }. -``` - -### safe_to_id - -```erlang -safe_to_id(<<>>) -> <<>>; -``` - -### safe_to_id - -```erlang -safe_to_id(ID) -> hb_util:human_id(ID). -``` - -### maybe_list_to_binary - -```erlang -maybe_list_to_binary(List) when is_list(List) -> - list_to_binary(List); -``` - -### maybe_list_to_binary - -```erlang -maybe_list_to_binary(Bin) -> - Bin. -``` - -### header_case_string - -```erlang -header_case_string(Key) -> - NormKey = hb_ao:normalize_key(Key), - Words = string:lexemes(NormKey, "-"), - TitleCaseWords = - lists:map( - fun binary_to_list/1, - lists:map( - fun string:titlecase/1, - Words - ) - ), - TitleCaseKey = list_to_binary(string:join(TitleCaseWords, "-")), - TitleCaseKey. -``` - -### results - -Read the computed results out of the WASM environment, assuming that - -```erlang -results(M1, M2, Opts) -> - Prefix = dev_stack:prefix(M1, M2, Opts), - Type = hb_ao:get(<<"results/", Prefix/binary, "/type">>, M1, Opts), - Proc = hb_ao:get(<<"process">>, M1, Opts), - case hb_ao:normalize_key(Type) of - <<"error">> -> - {error, - hb_ao:set( - M1, - #{ - <<"outbox">> => undefined, - <<"results">> => - #{ - <<"body">> => <<"WASM execution error.">> - } - }, - Opts - ) - }; - <<"ok">> -> - {ok, Str} = env_read(M1, M2, Opts), - try hb_json:decode(Str) of - #{<<"ok">> := true, <<"response">> := Resp} -> - {ok, ProcessedResults} = json_to_message(Resp, Opts), - PostProcessed = postprocess_outbox(ProcessedResults, Proc, Opts), - Out = hb_ao:set( - M1, - <<"results">>, - PostProcessed, - Opts - ), - ?event(debug_iface, {results, {processed, ProcessedResults}, {out, Out}}), - {ok, Out} - catch - _:_ -> - ?event(error, {json_error, Str}), - {error, - hb_ao:set( - M1, - #{ - <<"results/outbox">> => undefined, - <<"results/body">> => - <<"JSON error parsing result output.">> - }, - Opts - ) - } - end - end. -``` - -### env_read - -Read the results out of the execution environment. - -```erlang -env_read(M1, M2, Opts) -> - Prefix = dev_stack:prefix(M1, M2, Opts), - Output = hb_ao:get(<<"results/", Prefix/binary, "/output">>, M1, Opts), - case hb_private:get(<>, M1, Opts) of - not_found -> - {ok, Output}; - ReadFn -> - {ok, Read} = ReadFn(Output), - {ok, Read} - end. -``` - -### env_write - -Write the message and process into the execution environment. - -```erlang -env_write(ProcessStr, MsgStr, Base, Req, Opts) -> - Prefix = dev_stack:prefix(Base, Req, Opts), - Params = - case hb_private:get(<>, Base, Opts) of - not_found -> - [MsgStr, ProcessStr]; - WriteFn -> - {ok, MsgJsonPtr} = WriteFn(MsgStr), - {ok, ProcessJsonPtr} = WriteFn(ProcessStr), - [MsgJsonPtr, ProcessJsonPtr] - end, - {ok, - hb_ao:set( - Base, - #{ - <<"function">> => <<"handle">>, - <<"parameters">> => Params - }, - Opts - ) - }. -``` - -### normalize_results - -Normalize the results of an evaluation. - -```erlang -normalize_results(#{ <<"Error">> := Error }) -> - {ok, Error, [], []}; -``` - -### normalize_results - -Normalize the results of an evaluation. - -```erlang -normalize_results(Msg) -> - try - Output = maps:get(<<"Output">>, Msg, #{}), - Data = maps:get(<<"data">>, Output, maps:get(<<"Data">>, Msg, <<>>)), - {ok, - Data, - maps:get(<<"Messages">>, Msg, []), - maps:get(<<"patches">>, Msg, []) - } - catch - _:_ -> - {ok, <<>>, [], []} - end. -``` - -### preprocess_results - -After the process returns messages from an evaluation, the - -```erlang -preprocess_results(Msg, Opts) -> - Tags = tags_to_map(Msg, Opts), - FilteredMsg = - hb_maps:without( - [<<"from-process">>, <<"from-image">>, <<"anchor">>, <<"tags">>], - Msg, - Opts - ), - hb_maps:merge( - hb_maps:from_list( - lists:map( - fun({Key, Value}) -> - {hb_ao:normalize_key(Key), Value} - end, - hb_maps:to_list(FilteredMsg, Opts) - ) - ), - Tags, - Opts - ). -``` - -### tags_to_map - -Convert a message with tags into a map of their key-value pairs. - -```erlang -tags_to_map(Msg, Opts) -> - NormMsg = hb_util:lower_case_key_map( - hb_ao:normalize_keys(Msg, Opts), - Opts), - RawTags = hb_maps:get(<<"tags">>, NormMsg, [], Opts), - TagList = - [ - {hb_maps:get(<<"name">>, Tag, Opts), hb_maps:get(<<"value">>, Tag, Opts)} - || - Tag <- RawTags - ], - hb_maps:from_list(TagList). -``` - -### postprocess_outbox - -Post-process messages in the outbox to add the correct `from-process` - -```erlang -postprocess_outbox(Msg, Proc, Opts) -> - AdjustedOutbox = - hb_maps:map( - fun(_Key, XMsg) -> - XMsg#{ - <<"from-process">> => hb_ao:get(id, Proc, Opts), - <<"from-image">> => hb_ao:get(<<"image">>, Proc, Opts) - } - end, - hb_ao:get(<<"outbox">>, Msg, #{}, Opts), - Opts - ), - hb_ao:set(Msg, <<"outbox">>, AdjustedOutbox, Opts). -%%% Tests -``` - -### normalize_test_opts - -Post-process messages in the outbox to add the correct `from-process` - -```erlang -normalize_test_opts(Opts) -> - Opts#{ - priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), Opts) - }. -``` - -### test_init - -```erlang -test_init() -> - application:ensure_all_started(hb). -``` - -### generate_stack - -```erlang -generate_stack(File) -> - generate_stack(File, <<"WASM">>). -``` - -### generate_stack - -```erlang -generate_stack(File, Mode) -> - generate_stack(File, Mode, #{}). -``` - -### generate_stack - -```erlang -generate_stack(File, _Mode, RawOpts) -> - Opts = normalize_test_opts(RawOpts), - test_init(), - Msg0 = dev_wasm:cache_wasm_image(File, Opts), - Image = hb_ao:get(<<"image">>, Msg0, Opts), - Msg1 = Msg0#{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - [ - <<"wasi@1.0">>, - <<"json-iface@1.0">>, - <<"wasm-64@1.0">>, - <<"multipass@1.0">> - ], - <<"input-prefix">> => <<"process">>, - <<"output-prefix">> => <<"wasm">>, - <<"passes">> => 2, - <<"stack-keys">> => [<<"init">>, <<"compute">>], - <<"process">> => - hb_message:commit(#{ - <<"type">> => <<"Process">>, - <<"image">> => Image, - <<"scheduler">> => hb:address(), - <<"authority">> => hb:address() - }, Opts) - }, - {ok, Msg2} = hb_ao:resolve(Msg1, <<"init">>, Opts), - Msg2. -``` - -### generate_aos_msg - -```erlang -generate_aos_msg(ProcID, Code) -> - generate_aos_msg(ProcID, Code, #{}). -``` - -### generate_aos_msg - -```erlang -generate_aos_msg(ProcID, Code, RawOpts) -> - Opts = normalize_test_opts(RawOpts), - hb_message:commit(#{ - <<"path">> => <<"compute">>, - <<"body">> => - hb_message:commit(#{ - <<"action">> => <<"Eval">>, - <<"data">> => Code, - <<"target">> => ProcID - }, Opts), - <<"block-height">> => 1 - }, Opts). -``` - -### basic_aos_call_test_ - -```erlang -basic_aos_call_test_() -> - {timeout, 20, fun() -> - Msg = generate_stack("test/aos-2-pure-xs.wasm"), - Proc = hb_ao:get(<<"process">>, Msg, #{ hashpath => ignore }), - ProcID = hb_message:id(Proc, all), - {ok, Msg3} = - hb_ao:resolve( - Msg, - generate_aos_msg(ProcID, <<"return 1+1">>), - #{} - ), - ?event({res, Msg3}), - Data = hb_ao:get(<<"results/data">>, Msg3, #{}), - ?assertEqual(<<"2">>, Data) - end}. -``` - -### aos_stack_benchmark_test_ - -```erlang -aos_stack_benchmark_test_() -> - {timeout, 20, fun() -> - BenchTime = 5, - Opts = #{ store => hb_test_utils:test_store() }, - RawWASMMsg = generate_stack("test/aos-2-pure-xs.wasm", <<"WASM">>, Opts), - Proc = hb_ao:get(<<"process">>, RawWASMMsg, Opts#{ hashpath => ignore }), - ProcID = hb_ao:get(id, Proc, Opts), - Msg = generate_aos_msg(ProcID, <<"return 1">>, Opts), - {ok, Initialized} = - hb_ao:resolve( - RawWASMMsg, - Msg, - Opts - ), - Msg2 = generate_aos_msg(ProcID, <<"return 1+1">>, Opts), - Iterations = - hb_test_utils:benchmark( - fun() -> hb_ao:resolve(Initialized, Msg2, Opts) end, - BenchTime - ), - hb_test_utils:benchmark_print( - <<"(Minimal AOS stack:) Evaluated">>, - <<"messages">>, - Iterations, - BenchTime - ), - ?assert(Iterations >= 10), - ok -``` - ---- - -*Generated from [dev_json_iface.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_json_iface.erl)* diff --git a/docs/book/src/dev_local_name.erl.md b/docs/book/src/dev_local_name.erl.md deleted file mode 100644 index 2efc2a199..000000000 --- a/docs/book/src/dev_local_name.erl.md +++ /dev/null @@ -1,258 +0,0 @@ -# dev_local_name - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_local_name.erl) - -A device for registering and looking up local names. This device uses -the node message to store a local cache of its known names, and the typical -non-volatile storage of the node message to store the names long-term. - ---- - -## Exported Functions - -- `direct_register/2` -- `info/1` -- `lookup/3` -- `register/3` - ---- - -### info - -A device for registering and looking up local names. This device uses -Export only the `lookup` and `register` functions. - -```erlang -info(_Opts) -> - #{ - excludes => [<<"direct_register">>, <<"keys">>, <<"set">>], - default => fun default_lookup/4 - }. -``` - -### lookup - -Takes a `key` argument and returns the value of the name, if it exists. - -```erlang -lookup(_, Req, Opts) -> - Key = hb_ao:get(<<"key">>, Req, no_key_specified, Opts), - ?event(local_name, {lookup, Key}), - hb_ao:resolve( - find_names(Opts), - Key, - Opts - ). -``` - -### default_lookup - -Handle all other requests by delegating to the lookup function. -Takes a `key` and `value` argument and registers the name. The caller - -```erlang -default_lookup(Key, _, Req, Opts) -> - lookup(Key, Req#{ <<"key">> => Key }, Opts). -``` - -### register - -Handle all other requests by delegating to the lookup function. -Takes a `key` and `value` argument and registers the name. The caller - -```erlang -register(_, Req, Opts) -> - case dev_meta:is(admin, Req, Opts) of - false -> - {error, - #{ - <<"status">> => 403, - <<"message">> => <<"Unauthorized.">> - } - }; - true -> - direct_register(Req, Opts) - end. -``` - -### direct_register - -Register a name without checking if the caller is an operator. Exported - -```erlang -direct_register(Req, Opts) -> - case hb_cache:write(hb_ao:get(<<"value">>, Req, Opts), Opts) of - {ok, MsgPath} -> - NormKey = hb_ao:normalize_key(hb_ao:get(<<"key">>, Req, Opts)), - hb_cache:link( - MsgPath, - LinkPath = << ?DEV_CACHE/binary, "/", NormKey/binary >>, - Opts - ), - load_names(Opts), - ?event( - local_name, - {registered, - {key, NormKey}, - {msg, MsgPath}, - {path, LinkPath} - } - ), - {ok, <<"Registered.">>}; - {error, _} -> - not_found - end. -``` - -### find_names - -Returns a message containing all known names. - -```erlang -find_names(Opts) -> - case hb_opts:get(local_names, not_found, Opts#{ only => local }) of - not_found -> - find_names(load_names(Opts)); - LocalNames -> - LocalNames - end. -``` - -### load_names - -Loads all known names from the cache and returns the new `node message` - -```erlang -load_names(Opts) -> - LocalNames = - maps:from_list(lists:map( - fun(Key) -> - NormKey = hb_ao:normalize_key(Key), - Path = << ?DEV_CACHE/binary, "/", NormKey/binary >>, - ?event(local_name, {loading, Path}), - case hb_cache:read(Path, Opts) of - {ok, Value} -> - {Key, Value}; - _ -> - {Key, not_found} - end - end, - hb_cache:list(?DEV_CACHE, Opts) - )), - ?event(local_name, {found_cache_keys, LocalNames}), - update_names(LocalNames, Opts). -``` - -### update_names - -Updates the node message with the new names. Further HTTP requests will - -```erlang -update_names(LocalNames, Opts) -> - hb_http_server:set_opts(NewOpts = Opts#{ local_names => LocalNames }), - NewOpts. -``` - -### generate_test_opts - -```erlang -generate_test_opts() -> - Opts = #{ - priv_wallet => ar_wallet:new() - }, - Opts. -``` - -### no_names_test - -```erlang -no_names_test() -> - ?assertEqual( - {error, not_found}, - lookup(#{}, #{ <<"key">> => <<"name1">> }, #{}) - ). -``` - -### lookup_opts_name_test - -```erlang -lookup_opts_name_test() -> - ?assertEqual( - {ok, <<"value1">>}, - lookup( - #{}, - #{ <<"key">> => <<"name1">> }, - #{ local_names => #{ <<"name1">> => <<"value1">>} } - ) - ). -``` - -### register_test - -```erlang -register_test() -> - TestName = <<"TEST-", (integer_to_binary(os:system_time(millisecond)))/binary>>, - Value = <<"TEST-VALUE-", (integer_to_binary(os:system_time(millisecond)))/binary>>, - Opts = generate_test_opts(), - ?assertEqual( - {ok, <<"Registered.">>}, - register( - #{}, - hb_message:commit( - #{ <<"key">> => TestName, <<"value">> => Value }, - Opts - ), - Opts - ) - ), - ?assertEqual( - {ok, Value}, - lookup(#{}, #{ <<"key">> => TestName, <<"load">> => false }, Opts) - ). -``` - -### unauthorized_test - -```erlang -unauthorized_test() -> - Opts = generate_test_opts(), - ?assertEqual( - {error, #{ <<"status">> => 403, <<"message">> => <<"Unauthorized.">> }}, - register( - #{}, - hb_message:commit( - #{ <<"key">> => <<"name1">>, <<"value">> => <<"value1">> }, - Opts#{ priv_wallet => ar_wallet:new() } - ), - Opts - ) - ). -``` - -### http_test - -```erlang -http_test() -> - Opts = generate_test_opts(), - Node = hb_http_server:start_node(Opts), - hb_http:post( - Node, - <<"/~local-name@1.0/register">>, - hb_message:commit( - #{ <<"key">> => <<"name1">>, <<"value">> => <<"value1">> }, - Opts - ), - Opts - ), - ?assertEqual( - {ok, <<"value1">>}, - hb_http:get( - Node, - <<"/~local-name@1.0/lookup?key=name1">>, - Opts - ) -``` - ---- - -*Generated from [dev_local_name.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_local_name.erl)* diff --git a/docs/book/src/dev_lookup.erl.md b/docs/book/src/dev_lookup.erl.md deleted file mode 100644 index 0feb4d247..000000000 --- a/docs/book/src/dev_lookup.erl.md +++ /dev/null @@ -1,91 +0,0 @@ -# dev_lookup - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lookup.erl) - -A device that looks up an ID from a local store and returns it, honoring -the `accept` key to return the correct format. - ---- - -## Exported Functions - -- `read/3` - ---- - -### read - -A device that looks up an ID from a local store and returns it, honoring -Fetch a resource from the cache using "target" ID extracted from the message - -```erlang -read(_M1, M2, Opts) -> - ID = hb_ao:get(<<"target">>, M2, Opts), - ?event({lookup, {id, ID}, {opts, Opts}}), - case hb_cache:read(ID, Opts) of - {ok, RawRes} -> - % We are sending the result over the wire, so make sure it is - % fully loaded, to save the recipient latency. -``` - -### binary_lookup_test - -```erlang -binary_lookup_test() -> - Bin = <<"Simple unsigned data item">>, - {ok, ID} = hb_cache:write(Bin, #{}), - {ok, RetrievedBin} = read(#{}, #{ <<"target">> => ID }, #{}), - ?assertEqual(Bin, RetrievedBin). -``` - -### message_lookup_test - -```erlang -message_lookup_test() -> - Msg = #{ <<"test-key">> => <<"test-value">>, <<"data">> => <<"test-data">> }, - {ok, ID} = hb_cache:write(Msg, #{}), - {ok, RetrievedMsg} = read(#{}, #{ <<"target">> => ID }, #{}), - ?assert(hb_message:match(Msg, RetrievedMsg)). -``` - -### aos2_message_lookup_test - -```erlang -aos2_message_lookup_test() -> - Msg = #{ <<"test-key">> => <<"test-value">>, <<"data">> => <<"test-data">> }, - {ok, ID} = hb_cache:write(Msg, #{}), - {ok, RetrievedMsg} = - read( - #{}, - #{ <<"target">> => ID, <<"accept">> => <<"application/aos-2">> }, - #{} - ), - {ok, Decoded} = dev_json_iface:json_to_message(hb_ao:get(<<"body">>, RetrievedMsg, #{}), #{}), - ?assertEqual(<<"test-data">>, hb_ao:get(<<"data">>, Decoded, #{})). -``` - -### http_lookup_test - -```erlang -http_lookup_test() -> - Store = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-mainnet">> - }, - Opts = #{ store => [Store] }, - Msg = #{ <<"test-key">> => <<"test-value">>, <<"data">> => <<"test-data">> }, - {ok, ID} = hb_cache:write(Msg, Opts), - Node = hb_http_server:start_node(Opts), - Wallet = hb:wallet(), - Req = hb_message:commit(#{ - <<"path">> => <<"/~lookup@1.0/read?target=", ID/binary>>, - <<"device">> => <<"lookup@1.0">>, - <<"accept">> => <<"application/aos-2">> - }, Wallet), - {ok, Res} = hb_http:post(Node, Req, Opts), - {ok, Decoded} = dev_json_iface:json_to_message(hb_ao:get(<<"body">>, Res, Opts), Opts), -``` - ---- - -*Generated from [dev_lookup.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lookup.erl)* diff --git a/docs/book/src/dev_lua.erl.md b/docs/book/src/dev_lua.erl.md deleted file mode 100644 index 507879f99..000000000 --- a/docs/book/src/dev_lua.erl.md +++ /dev/null @@ -1,977 +0,0 @@ -# dev_lua - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua.erl) - -A device that calls a Lua module upon a request and returns the result. - ---- - -## Exported Functions - -- `decode/2` -- `encode/2` -- `functions/3` -- `info/1` -- `init/3` -- `normalize/3` -- `pure_lua_process_benchmark/1` -- `snapshot/3` - ---- - -### info - -A device that calls a Lua module upon a request and returns the result. -All keys that are not directly available in the base message are - -```erlang -info(Base) -> - #{ - default => fun compute/4, - excludes => - [<<"keys">>, <<"set">>, <<"encode">>, <<"decode">>] - ++ maps:keys(Base) - }. -``` - -### init - -Initialize the device state, loading the script into memory if it is - -```erlang -init(Base, Req, Opts) -> - ensure_initialized(Base, Req, Opts). -``` - -### ensure_initialized - -Initialize the Lua VM if it is not already initialized. Optionally takes - -```erlang -ensure_initialized(Base, _Req, Opts) -> - case hb_private:from_message(Base) of - #{<<"state">> := _} -> - ?event(debug_lua, lua_state_already_initialized), - {ok, Base}; - _ -> - ?event(debug_lua, initializing_lua_state), - case find_modules(Base, Opts) of - {ok, Modules} -> - initialize(Base, Modules, Opts); - Error -> - Error - end - end. -``` - -### find_modules - -Find the script in the base message, either by ID or by string. - -```erlang -find_modules(Base, Opts) -> - case hb_ao:get(<<"module">>, {as, <<"message@1.0">>, Base}, Opts) of - not_found -> - {error, <<"no-modules-found">>}; - Module when is_binary(Module) -> - find_modules(Base#{ <<"module">> => [Module] }, Opts); - Module when is_map(Module) -> - % If the module is a map, check its content type to see if it is - % a literal Lua module, or a map of modules with content types. -``` - -### load_modules - -Load a list of modules for installation into the Lua VM. - -```erlang -load_modules(Modules, Opts) -> load_modules(Modules, Opts, []). -``` - -### load_modules - -Load a list of modules for installation into the Lua VM. - -```erlang -load_modules([], _Opts, Acc) -> - {ok, lists:reverse(Acc)}; -``` - -### load_modules - -Load a list of modules for installation into the Lua VM. - -```erlang -load_modules([ModuleID | Rest], Opts, Acc) when ?IS_ID(ModuleID) -> - case hb_cache:read(ModuleID, Opts) of - {ok, Module} when is_binary(Module) -> - % The ID referred to a binary module item, so we add it to the list - % as-is. -``` - -### load_modules - -```erlang -load_modules([Module | Rest], Opts, Acc) when is_map(Module) -> - % We have found a message with a Lua module inside. Search for the binary - % of the program in the body and the data. -``` - -### initialize - -Initialize a new Lua state with a given base message and module. - -```erlang -initialize(Base, Modules, Opts) -> - State0 = luerl:init(), - % Load each script into the Lua state. -``` - -### functions - -Return a list of all functions in the Lua environment. - -```erlang -functions(Base, _Req, Opts) -> - case hb_private:get(<<"state">>, Base, Opts) of - not_found -> - {error, not_found}; - State -> - {ok, [Res], _S2} = - luerl:do_dec( - << - """ - local __tests = {} - for k, v in pairs(_G) do - if type(v) == "function" then - table.insert(__tests, k) - end - end - return __tests - """ - >>, - State - ), - {ok, hb_util:message_to_ordered_list(decode(Res, Opts))} - end. -``` - -### sandbox - -Sandbox (render inoperable) a set of Lua functions. Each function is - -```erlang -sandbox(State, Map, Opts) when is_map(Map) -> - sandbox(State, maps:to_list(Map), Opts); -``` - -### sandbox - -Sandbox (render inoperable) a set of Lua functions. Each function is - -```erlang -sandbox(State, [], _Opts) -> - State; -``` - -### sandbox - -Sandbox (render inoperable) a set of Lua functions. Each function is - -```erlang -sandbox(State, [{Path, Value} | Rest], Opts) -> - {ok, NextState} = luerl:set_table_keys_dec(Path, Value, State), - sandbox(NextState, Rest, Opts); -``` - -### sandbox - -Sandbox (render inoperable) a set of Lua functions. Each function is - -```erlang -sandbox(State, [Path | Rest], Opts) -> - {ok, NextState} = luerl:set_table_keys_dec(Path, <<"sandboxed">>, State), - sandbox(NextState, Rest, Opts). -``` - -### compute - -Call the Lua script with the given arguments. - -```erlang -compute(Key, RawBase, Req, Opts) -> - ?event(debug_lua, compute_called), - {ok, Base} = ensure_initialized(RawBase, Req, Opts), - ?event(debug_lua, ensure_initialized_done), - % Get the state from the base message's private element. -``` - -### process_response - -Process a response to a Luerl invocation. Returns the typical AO-Core - -```erlang -process_response({ok, [Result], NewState}, Priv, Opts) -> - process_response({ok, [<<"ok">>, Result], NewState}, Priv, Opts); -``` - -### process_response - -Process a response to a Luerl invocation. Returns the typical AO-Core - -```erlang -process_response({ok, [Status, MsgResult], NewState}, Priv, Opts) -> - % If the result is a HyperBEAM device return (`{Status, Msg}'), decode it - % and add the previous `priv' element back into the resulting message. -``` - -### process_response - -```erlang -process_response({lua_error, RawError, State}, _Priv, Opts) -> - % An error occurred while calling the Lua function. Parse the stack trace - % and return it. -``` - -### process_response - -```erlang -process_response({error, Reason, Trace}, _Priv, _Opts) -> - % An Erlang error occurred while calling the Lua function. Return it. -``` - -### snapshot - -Snapshot the Lua state from a live computation. Normalizes its `priv` - -```erlang -snapshot(Base, _Req, Opts) -> - case hb_private:get(<<"state">>, Base, Opts) of - not_found -> - {error, <<"Cannot snapshot Lua state: state not initialized.">>}; - State -> - {ok, #{ <<"body">> => term_to_binary(luerl:externalize(State)) }} - end. -``` - -### normalize - -Restore the Lua state from a snapshot, if it exists. - -```erlang -normalize(Base, _Req, RawOpts) -> - Opts = RawOpts#{ hashpath => ignore }, - case hb_private:get(<<"state">>, Base, Opts) of - not_found -> - DeviceKey = - case hb_ao:get(<<"device-key">>, {as, <<"message@1.0">>, Base}, Opts) of - not_found -> []; - Key -> [Key] - end, - ?event(snapshot, - {attempting_to_restore_lua_state, - {msg1, Base}, {device_key, DeviceKey} - } - ), - SerializedState = - hb_ao:get( - [<<"snapshot">>] ++ DeviceKey ++ [<<"body">>], - {as, dev_message, Base}, - Opts - ), - case SerializedState of - not_found -> throw({error, no_lua_state_snapshot_found}); - State -> - ExternalizedState = binary_to_term(State), - InternalizedState = luerl:internalize(ExternalizedState), - ?event(snapshot, loaded_state_from_snapshot), - {ok, hb_private:set(Base, <<"state">>, InternalizedState, Opts)} - end; - _ -> - ?event(snapshot, state_already_initialized), - {ok, Base} - end. -``` - -### decode - -Decode a Lua result into a HyperBEAM `structured@1.0` message. - -```erlang -decode(EncMsg, _Opts) when is_list(EncMsg) andalso length(EncMsg) == 0 -> - % The value is an empty table, so we assume it is a message rather than - % a list. -``` - -### decode - -```erlang -decode(EncMsg = [{_K, _V} | _], Opts) when is_list(EncMsg) -> - decode( - maps:map( - fun(_, V) -> decode(V, Opts) end, - maps:from_list(EncMsg) - ), - Opts - ); -``` - -### decode - -```erlang -decode(Msg, Opts) when is_map(Msg) -> - % If the message is an ordered list encoded as a map, decode it to a list. -``` - -### decode - -```erlang -decode(Other, _Opts) -> - Other. -``` - -### encode - -Encode a HyperBEAM `structured@1.0` message into a Lua term. - -```erlang -encode(Map, Opts) when is_map(Map) -> - hb_cache:ensure_all_loaded( - case hb_util:is_ordered_list(Map, Opts) of - true -> encode(hb_util:message_to_ordered_list(Map), Opts); - false -> maps:to_list(maps:map(fun(_, V) -> encode(V, Opts) end, Map)) - end, - Opts - ); -``` - -### encode - -Encode a HyperBEAM `structured@1.0` message into a Lua term. - -```erlang -encode(List, Opts) when is_list(List) -> - hb_cache:ensure_all_loaded( - lists:map(fun(V) -> encode(V, Opts) end, List), - Opts - ); -``` - -### encode - -Encode a HyperBEAM `structured@1.0` message into a Lua term. - -```erlang -encode(Atom, _Opts) when is_atom(Atom) and (Atom /= false) and (Atom /= true)-> - hb_util:bin(Atom); -``` - -### encode - -Encode a HyperBEAM `structured@1.0` message into a Lua term. - -```erlang -encode(Other, _Opts) -> - Other. -``` - -### decode_stacktrace - -Parse a Lua stack trace into a list of messages. - -```erlang -decode_stacktrace(StackTrace, State0, Opts) -> - decode_stacktrace(StackTrace, State0, [], Opts). -``` - -### decode_stacktrace - -```erlang -decode_stacktrace([], _State, Acc, _Opts) -> - lists:reverse(Acc); -``` - -### decode_stacktrace - -```erlang -decode_stacktrace([{FuncBin, ParamRefs, FileInfo} | Rest], State0, Acc, Opts) -> - %% Decode all the Lua table refs into Erlang terms - DecodedParams = decode_params(ParamRefs, State0, Opts), - %% Pull out the line number - Line = proplists:get_value(line, FileInfo), - File = proplists:get_value(file, FileInfo, undefined), - ?event(debug_lua_stack, {stack_file, FileInfo}), - %% Build our message‐map - Entry = #{ - <<"function">> => FuncBin, - <<"parameters">> => hb_util:list_to_numbered_message(DecodedParams) - }, - MaybeLine = - if is_binary(File) andalso is_integer(Line) -> - #{ - <<"line">> => - iolist_to_binary( - io_lib:format("~s:~p", [File, Line]) - ) - }; - is_integer(Line) -> - #{ <<"line">> => Line }; - true -> - #{} - end, - decode_stacktrace(Rest, State0, [maps:merge(Entry, MaybeLine)|Acc], Opts). -``` - -### decode_params - -Decode a list of Lua references, as found in a stack trace, into a - -```erlang -decode_params([], _State, _Opts) -> []; -``` - -### decode_params - -Decode a list of Lua references, as found in a stack trace, into a - -```erlang -decode_params([Tref|Rest], State, Opts) -> - Decoded = decode(luerl:decode(Tref, State), Opts), - [Decoded|decode_params(Rest, State, Opts)]. -``` - -### simple_invocation_test - -```erlang -simple_invocation_test() -> - {ok, Script} = file:read_file("test/test.lua"), - Base = #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Script - }, - <<"parameters">> => [] - }, - ?assertEqual(2, hb_ao:get(<<"assoctable/b">>, Base, #{})). -``` - -### load_modules_by_id_test_ - -```erlang -load_modules_by_id_test_() -> - {timeout, 30, fun load_modules_by_id/0}. -``` - -### load_modules_by_id - -```erlang -load_modules_by_id() -> - % Start a node to ensure the HTTP services are available. -``` - -### multiple_modules_test - -```erlang -multiple_modules_test() -> - {ok, Module} = file:read_file("test/test.lua"), - Module2 = - << - """ - function test_second_script() - return 4 - end - """ - >>, - Base = #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => [ - #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - }, - #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module2 - } - ], - <<"parameters">> => [] - }, - ?assertEqual(2, hb_ao:get(<<"assoctable/b">>, Base, #{})), - ?assertEqual(4, hb_ao:get(<<"test_second_script">>, Base, #{})). -``` - -### error_response_test - -```erlang -error_response_test() -> - {ok, Module} = file:read_file("test/test.lua"), - Base = #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - }, - <<"parameters">> => [] - }, - ?assertEqual( - {error, <<"Very bad, but Lua caught it.">>}, - hb_ao:resolve(Base, <<"error_response">>, #{}) - ). -``` - -### sandboxed_failure_test - -Run an AO-Core resolution from the Lua environment. - -```erlang -sandboxed_failure_test() -> - {ok, Module} = file:read_file("test/test.lua"), - Base = #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - }, - <<"parameters">> => [], - <<"sandbox">> => true - }, - ?assertMatch({error, _}, hb_ao:resolve(Base, <<"sandboxed_fail">>, #{})). -``` - -### ao_core_sandbox_test - -Run an AO-Core resolution from the Lua environment. -Run an AO-Core resolution from the Lua environment. - -```erlang -ao_core_sandbox_test() -> - {ok, Module} = file:read_file("test/test.lua"), - Base = #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - }, - <<"parameters">> => [], - <<"device-sandbox">> => [<<"message@1.0">>] - }, - ?assertMatch({error, _}, hb_ao:resolve(Base, <<"ao_relay">>, #{})), - ?assertMatch({ok, _}, hb_ao:resolve(Base, <<"ao_resolve">>, #{})). -``` - -### ao_core_resolution_from_lua_test - -Run an AO-Core resolution from the Lua environment. -Run an AO-Core resolution from the Lua environment. -Benchmark the performance of Lua executions. - -```erlang -ao_core_resolution_from_lua_test() -> - {ok, Module} = file:read_file("test/test.lua"), - Base = #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - }, - <<"parameters">> => [] - }, - {ok, Res} = hb_ao:resolve(Base, <<"ao_resolve">>, #{}), - ?assertEqual(<<"Hello, AO world!">>, Res). -``` - -### direct_benchmark_test - -Run an AO-Core resolution from the Lua environment. -Run an AO-Core resolution from the Lua environment. -Benchmark the performance of Lua executions. - -```erlang -direct_benchmark_test() -> - BenchTime = 3, - {ok, Module} = file:read_file("test/test.lua"), - Base = #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - }, - <<"parameters">> => [] - }, - Iterations = hb_test_utils:benchmark( - fun(X) -> - {ok, _} = hb_ao:resolve(Base, <<"assoctable">>, #{}), - ?event({iteration, X}) - end, - BenchTime - ), - ?event({iterations, Iterations}), - hb_test_utils:benchmark_print( - <<"Direct Lua:">>, - <<"executions">>, - Iterations, - BenchTime - ), - ?assert(Iterations > 10). -``` - -### invoke_non_compute_key_test - -Call a non-compute key on a Lua device message and ensure that the -Use a Lua module as a hook on the HTTP server via `~meta@1.0`. - -```erlang -invoke_non_compute_key_test() -> - {ok, Module} = file:read_file("test/test.lua"), - Base = #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - }, - <<"test-value">> => 42 - }, - {ok, Result1} = hb_ao:resolve(Base, <<"hello">>, #{}), - ?event({result1, Result1}), - ?assertEqual(42, hb_ao:get(<<"test-value">>, Result1, #{})), - ?assertEqual(<<"world">>, hb_ao:get(<<"hello">>, Result1, #{})), - {ok, Result2} = - hb_ao:resolve( - Base, - #{<<"path">> => <<"hello">>, <<"name">> => <<"Alice">>}, - #{} - ), - ?event({result2, Result2}), - ?assertEqual(<<"Alice">>, hb_ao:get(<<"hello">>, Result2, #{})). -``` - -### lua_http_hook_test - -Call a non-compute key on a Lua device message and ensure that the -Use a Lua module as a hook on the HTTP server via `~meta@1.0`. -Call a process whose `execution-device` is set to `lua@5.3a`. - -```erlang -lua_http_hook_test() -> - {ok, Module} = file:read_file("test/test.lua"), - Node = hb_http_server:start_node( - #{ - priv_wallet => ar_wallet:new(), - on => #{ - <<"request">> => - #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - } - } - } - }), - {ok, Res} = hb_http:get(Node, <<"/hello?hello=world">>, #{}), - ?assertMatch(#{ <<"body">> := <<"i like turtles">> }, Res). -``` - -### pure_lua_process_test - -Call a non-compute key on a Lua device message and ensure that the -Use a Lua module as a hook on the HTTP server via `~meta@1.0`. -Call a process whose `execution-device` is set to `lua@5.3a`. -Call a process whose `execution-device` is set to `lua@5.3a`. - -```erlang -pure_lua_process_test() -> - Process = generate_lua_process("test/test.lua", #{}), - {ok, _} = hb_cache:write(Process, #{}), - Message = generate_test_message(Process, #{}), - {ok, _} = hb_ao:resolve(Process, Message, #{ hashpath => ignore }), - {ok, Results} = hb_ao:resolve(Process, <<"now">>, #{}), - ?assertEqual(42, hb_ao:get(<<"results/output/body">>, Results, #{})). -``` - -### pure_lua_restore_test - -Call a non-compute key on a Lua device message and ensure that the -Use a Lua module as a hook on the HTTP server via `~meta@1.0`. -Call a process whose `execution-device` is set to `lua@5.3a`. -Call a process whose `execution-device` is set to `lua@5.3a`. - -```erlang -pure_lua_restore_test() -> - Opts = #{ process_cache_frequency => 1 }, - Process = generate_lua_process("test/test.lua", Opts), - {ok, _} = hb_cache:write(Process, Opts), - Message = generate_test_message(Process, Opts, #{ <<"path">> => <<"inc">>}), - {ok, _} = hb_ao:resolve(Process, Message, Opts#{ hashpath => ignore }), - {ok, Count1} = hb_ao:resolve(Process, <<"now/count">>, Opts), - ?assertEqual(1, Count1), - hb_ao:resolve( - Process, - generate_test_message(Process, #{}, #{ <<"path">> => <<"inc">>}), - Opts - ), - {ok, Count2} = hb_ao:resolve(Process, <<"now/count">>, Opts), - ?assertEqual(2, Count2). -``` - -### pure_lua_process_benchmark_test_ - -```erlang -pure_lua_process_benchmark_test_() -> - {timeout, - 30, - fun() -> - pure_lua_process_benchmark(#{ - process_snapshot_slots => 50 - }) - end}. -``` - -### pure_lua_process_benchmark - -```erlang -pure_lua_process_benchmark(Opts) -> - BenchMsgs = 50, - hb:init(), - Process = generate_lua_process("test/test.lua", Opts), - {ok, _} = hb_cache:write(Process, Opts), - Message = generate_test_message(Process, Opts), - lists:foreach( - fun(X) -> - hb_ao:resolve(Process, Message, Opts#{ hashpath => ignore }), - ?event(debug_lua, {scheduled, X}) - end, - lists:seq(1, BenchMsgs) - ), - ?event(debug_lua, {executing, BenchMsgs}), - BeforeExec = os:system_time(millisecond), - {ok, _} = hb_ao:resolve(Process, <<"now">>, Opts), - AfterExec = os:system_time(millisecond), - hb_test_utils:benchmark_print( - <<"Pure Lua process: Computed">>, - <<"slots">>, - BenchMsgs, - (AfterExec - BeforeExec) / 1000 - ). -``` - -### invoke_aos_test - -```erlang -invoke_aos_test() -> - Opts = #{ priv_wallet => hb:wallet() }, - Process = generate_lua_process("test/hyper-aos.lua", Opts), - {ok, _Proc} = hb_cache:write(Process, Opts), - Message = generate_test_message(Process, Opts), - {ok, _Assignment} = hb_ao:resolve(Process, Message, Opts#{ hashpath => ignore }), - {ok, Results} = hb_ao:resolve(Process, <<"now/results/output">>, Opts), - ?assertEqual(<<"1">>, hb_ao:get(<<"data">>, Results, #{})), - ?assertEqual(<<"aos> ">>, hb_ao:get(<<"prompt">>, Results, #{})). -``` - -### aos_authority_not_trusted_test - -Benchmark the performance of Lua executions. - -```erlang -aos_authority_not_trusted_test() -> - Opts = #{ priv_wallet => ar_wallet:new() }, - Process = generate_lua_process("test/hyper-aos.lua", Opts), - ProcID = hb_message:id(Process, all), - {ok, _} = hb_cache:write(Process, Opts), - Message = hb_message:commit( - #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - #{ - <<"target">> => ProcID, - <<"type">> => <<"Message">>, - <<"data">> => <<"1 + 1">>, - <<"random-seed">> => rand:uniform(1337), - <<"action">> => <<"Eval">>, - <<"from-process">> => <<"1234">> - }, - Opts - ) - }, - Opts - ), - ?event({message, Message}), - {ok, _} = hb_ao:resolve(Process, Message, Opts#{ hashpath => ignore }), - {ok, Results} = hb_ao:resolve(Process, <<"now/results/output/data">>, Opts), - ?assertEqual(<<"Message is not trusted.">>, Results). -``` - -### aos_process_benchmark_test_ - -Benchmark the performance of Lua executions. - -```erlang -aos_process_benchmark_test_() -> - {timeout, 30, fun() -> - BenchMsgs = 10, - Opts = #{ - process_async_cache => true, - hashpath => ignore, - process_snapshot_slots => 50 - }, - Process = generate_lua_process("test/hyper-aos.lua", Opts), - Message = generate_test_message(Process, Opts), - lists:foreach( - fun(X) -> - hb_ao:resolve(Process, Message, Opts), - ?event(debug_lua, {scheduled, X}) - end, - lists:seq(1, BenchMsgs) - ), - ?event(debug_lua, {executing, BenchMsgs}), - BeforeExec = os:system_time(millisecond), - {ok, _} = hb_ao:resolve( - Process, - <<"now">>, - Opts - ), - AfterExec = os:system_time(millisecond), - hb_test_utils:benchmark_print( - <<"HyperAOS process: Computed">>, - <<"slots">>, - BenchMsgs, - (AfterExec - BeforeExec) / 1000 - ) - end}. -``` - -### generate_lua_process - -Generate a Lua process message. - -```erlang -generate_lua_process(File, Opts) -> - NormOpts = Opts#{ priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), Opts) }, - Wallet = hb_opts:get(priv_wallet, hb:wallet(), NormOpts), - Address = hb_util:human_id(ar_wallet:to_address(Wallet)), - {ok, Module} = file:read_file(File), - hb_message:commit( - #{ - <<"device">> => <<"process@1.0">>, - <<"type">> => <<"Process">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"execution-device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - }, - <<"authority">> => [ - Address, - <<"E3FJ53E6xtAzcftBpaw2E1H4ZM9h6qy6xz9NXh5lhEQ">> - ], - <<"scheduler-location">> => - hb_util:human_id(ar_wallet:to_address(Wallet)), - <<"test-random-seed">> => rand:uniform(1337) - }, - NormOpts - ). -``` - -### generate_test_message - -Generate a test message for a Lua process. - -```erlang -generate_test_message(Process, Opts) -> - generate_test_message( - Process, - Opts, - <<""" - Count = 0 - function add() - Send({Target = 'Foo', Data = 'Bar' }); - Count = Count + 1 - end - add() - return Count - """>> - ). -``` - -### generate_test_message - -```erlang -generate_test_message(Process, Opts, ToEval) when is_binary(ToEval) -> - generate_test_message( - Process, - Opts, - #{ - <<"action">> => <<"Eval">>, - <<"body">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => hb_util:bin(ToEval) - } - } - ); -``` - -### generate_test_message - -```erlang -generate_test_message(Process, Opts, MsgBase) -> - ProcID = hb_message:id(Process, all), - NormOpts = Opts#{ priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), Opts) }, - hb_message:commit(#{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - MsgBase#{ - <<"target">> => ProcID, - <<"type">> => <<"Message">>, - <<"random-seed">> => rand:uniform(1337) - }, - NormOpts - ) - }, - NormOpts - ). -``` - -### generate_stack - -Generate a stack message for the Lua process. - -```erlang -generate_stack(File) -> - Wallet = hb:wallet(), - {ok, Module} = file:read_file(File), - Msg1 = #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - [ - <<"json-iface@1.0">>, - <<"lua@5.3a">>, - <<"multipass@1.0">> - ], - <<"function">> => <<"json_result">>, - <<"passes">> => 2, - <<"stack-keys">> => [<<"init">>, <<"compute">>], - <<"module">> => Module, - <<"process">> => - hb_message:commit(#{ - <<"type">> => <<"Process">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Module - }, - <<"scheduler">> => hb:address(), - <<"authority">> => hb:address() - }, Wallet) - }, - {ok, Msg2} = hb_ao:resolve(Msg1, <<"init">>, #{}), - Msg2. -``` - ---- - -*Generated from [dev_lua.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua.erl)* diff --git a/docs/book/src/dev_lua_lib.erl.md b/docs/book/src/dev_lua_lib.erl.md deleted file mode 100644 index 7ebfcdb96..000000000 --- a/docs/book/src/dev_lua_lib.erl.md +++ /dev/null @@ -1,199 +0,0 @@ -# dev_lua_lib - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_lib.erl) - -A module for providing AO library functions to the Lua environment. -This module contains the implementation of the functions, each by the name -that should be used in the `ao` table in the Lua environment. Every export -is imported into the Lua environment. -Each function adheres closely to the Luerl calling convention, adding the -appropriate node message as a third argument: - fun(Args, State, NodeMsg) -> {ResultTerms, NewState} -As Lua allows for multiple return values, each function returns a list of -terms to grant to the caller. Matching the tuple convention used by AO-Core, -the first term is typically the status, and the second term is the result. - ---- - -## Exported Functions - -- `event/3` -- `get/3` -- `install/3` -- `resolve/3` -- `set/3` - ---- - -### install - -A module for providing AO library functions to the Lua environment. -Install the library into the given Lua environment. - -```erlang -install(Base, State, Opts) -> - % Calculate and set the new `preloaded_devices' option. -``` - -### return - -Helper function for returning a result from a Lua function. - -```erlang -return(Result, ExecState, Opts) -> - ?event(lua_import, {import_returning, {result, Result}}), - TableEncoded = dev_lua:encode(hb_cache:ensure_all_loaded(Result, Opts), Opts), - {ReturnParams, ResultingState} = - lists:foldr( - fun(LuaEncoded, {Params, StateIn}) -> - {NewParam, NewState} = luerl:encode(LuaEncoded, StateIn), - {[NewParam | Params], NewState} - end, - {[], ExecState}, - TableEncoded - ), - ?event({lua_encoded, ReturnParams}), - {ReturnParams, ResultingState}. -``` - -### resolve - -A wrapper function for performing AO-Core resolutions. Offers both the - -```erlang -resolve([SingletonMsg], ExecState, ExecOpts) -> - ?event({ao_core_resolver, {msg, SingletonMsg}}), - ParsedMsgs = hb_singleton:from(SingletonMsg, ExecOpts), - ?event({parsed_msgs_to_resolve, ParsedMsgs}), - resolve({many, ParsedMsgs}, ExecState, ExecOpts); -``` - -### resolve - -A wrapper function for performing AO-Core resolutions. Offers both the - -```erlang -resolve([Base, Path], ExecState, ExecOpts) when is_binary(Path) -> - PathParts = hb_path:term_to_path_parts(Path, ExecOpts), - resolve({many, [Base] ++ PathParts}, ExecState, ExecOpts); -``` - -### resolve - -A wrapper function for performing AO-Core resolutions. Offers both the - -```erlang -resolve(Msgs, ExecState, ExecOpts) when is_list(Msgs) -> - resolve({many, Msgs}, ExecState, ExecOpts); -``` - -### resolve - -A wrapper function for performing AO-Core resolutions. Offers both the - -```erlang -resolve({many, Msgs}, ExecState, ExecOpts) -> - MaybeAsMsgs = lists:map(fun convert_as/1, Msgs), - try hb_ao:resolve_many(MaybeAsMsgs, ExecOpts) of - {Status, Res} -> - ?event({resolved_msgs, {status, Status}, {res, Res}, {exec_opts, ExecOpts}}), - {[Status, Res], ExecState} - catch - Error -> - ?event(lua_error, {ao_core_resolver_error, Error}), - {[<<"error">>, Error], ExecState} - end. -``` - -### get - -A wrapper for `hb_ao`'s `get` functionality. - -```erlang -get([Key, Base], ExecState, ExecOpts) -> - ?event({ao_core_get, {base, Base}, {key, Key}}), - NewRes = hb_ao:get(convert_as(Key), convert_as(Base), ExecOpts), - ?event({ao_core_get_result, {result, NewRes}}), - {[NewRes], ExecState}. -``` - -### convert_as - -Converts any `as` terms from Lua to their HyperBEAM equivalents. - -```erlang -convert_as([<<"as">>, Device, RawMsg]) -> - {as, Device, RawMsg}; -``` - -### convert_as - -Converts any `as` terms from Lua to their HyperBEAM equivalents. - -```erlang -convert_as(Other) -> - Other. -``` - -### set - -Wrapper for `hb_ao`'s `set` functionality. - -```erlang -set([Base, Key, Value], ExecState, ExecOpts) -> - ?event({ao_core_set, {base, Base}, {key, Key}, {value, Value}}), - NewRes = hb_ao:set(Base, Key, Value, ExecOpts), - ?event({ao_core_set_result, {result, NewRes}}), - {[NewRes], ExecState}; -``` - -### set - -Wrapper for `hb_ao`'s `set` functionality. - -```erlang -set([Base, NewValues], ExecState, ExecOpts) -> - ?event({ao_core_set, {base, Base}, {new_values, NewValues}}), - NewRes = hb_ao:set(Base, NewValues, ExecOpts), - ?event({ao_core_set_result, {result, NewRes}}), - {[NewRes], ExecState}. -``` - -### event - -Allows Lua scripts to signal events using the HyperBEAM hosts internal - -```erlang -event([Event], ExecState, Opts) -> - ?event({recalling_event, Event}), - event([global, Event], ExecState, Opts); -``` - -### event - -Allows Lua scripts to signal events using the HyperBEAM hosts internal - -```erlang -event([Group, Event], State, Opts) when is_list(Event) -> - event([Group, list_to_tuple(Event)], State, Opts); -``` - -### event - -Allows Lua scripts to signal events using the HyperBEAM hosts internal - -```erlang -event([Group, Event], ExecState, Opts) -> - ?event( - lua_event, - {event, - {group, Group}, - {event, Event} - } - ), - ?event(Group, Event), -``` - ---- - -*Generated from [dev_lua_lib.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_lib.erl)* diff --git a/docs/book/src/dev_lua_test.erl.md b/docs/book/src/dev_lua_test.erl.md deleted file mode 100644 index acc6f7145..000000000 --- a/docs/book/src/dev_lua_test.erl.md +++ /dev/null @@ -1,166 +0,0 @@ -# dev_lua_test - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_test.erl) - -A wrapper module for generating and executing EUnit tests for all Lua modules. -When executed with `rebar3 lua-test`, this module will be invoked and scan the -`scripts` directory for all Lua files, and generate an EUnit test suite for -each one. By default, an individual test is generated for each function in -the global `_G` table that ends in `_test`. -In order to specify other tests to run instead, the user may employ the -`LUA_TESTS` and `LUA_SCRIPTS` environment variables. The syntax for these -variables is described in the function documentation for `parse_spec`. - ---- - -## Exported Functions - -- `parse_spec/1` - ---- - -### parse_spec - -Parse a string representation of test descriptions received from the - -```erlang -parse_spec(Str) when is_list(Str) -> - parse_spec(hb_util:bin(Str)); -``` - -### parse_spec - -Parse a string representation of test descriptions received from the - -```erlang -parse_spec(tests) -> - % The user has not given a test spec, so we default to running all tests in - % the `LUA_SCRIPTS' directory (defaulting to `scripts/'). -``` - -### parse_spec - -```erlang -parse_spec(Str) -> - lists:map( - fun(ModDef) -> - [ModName|TestDefs] = binary:split(ModDef, <<":">>, [global, trim_all]), - ScriptDir = hb_util:bin(hb_opts:get(lua_scripts)), - File = - case terminates_with(ModName, <<".lua">>) of - true -> ModName; - false -> << ScriptDir/binary, "/", ModName/binary, ".lua" >> - end, - Tests = - case TestDefs of - [] -> tests; - TestDefs -> TestDefs - end, - {File, Tests} - end, - binary:split(Str, <<",">>, [global, trim_all]) - ). -``` - -### exec_test_ - -Main entrypoint for Lua tests. - -```erlang -exec_test_() -> - ScriptDefs = hb_opts:get(lua_tests), - lists:map( - fun({File, Funcs}) -> suite(File, Funcs) end, - ScriptDefs - ). -``` - -### suite - -Generate an EUnit test suite for a given Lua script. If the `Funcs` is - -```erlang -suite(File, Funcs) -> - {ok, State} = new_state(File), - {foreach, - fun() -> ok end, - fun(_) -> ok end, - lists:map( - fun(FuncName) -> - { - hb_util:list(File) ++ ":" ++ hb_util:list(FuncName), - fun() -> exec_test(State, FuncName) end - } - end, - case Funcs of - tests -> - lists:filter( - fun(FuncName) -> - terminates_with(FuncName, <<"_test">>) - end, - hb_ao:get(<<"functions">>, State, #{}) - ); - FuncNames -> FuncNames - end - ) - }. -``` - -### new_state - -Create a new Lua environment for a given script. - -```erlang -new_state(File) -> - ?event(debug_lua_test, {generating_state_for, File}), - {ok, Module} = file:read_file(hb_util:list(File)), - {ok, _} = - hb_ao:resolve( - #{ - <<"device">> => <<"lua@5.3a">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"name">> => File, - <<"body">> => Module - } - }, - <<"init">>, - #{} - ). -``` - -### exec_test - -Generate an EUnit test for a given function. - -```erlang -exec_test(State, Function) -> - {Status, Result} = - hb_ao:resolve( - State, - #{ <<"path">> => Function, <<"parameters">> => [] }, - #{} - ), - case Status of - ok -> ok; - error -> - hb_format:print(Result, <<"Lua">>, Function, 1), - ?assertEqual( - ok, - Status - ) - end. -``` - -### terminates_with - -Check if a string terminates with a given suffix. - -```erlang -terminates_with(String, Suffix) -> - binary:longest_common_suffix(lists:map(fun hb_util:bin/1, [String, Suffix])) -``` - ---- - -*Generated from [dev_lua_test.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_test.erl)* diff --git a/docs/book/src/dev_lua_test_ledgers.erl.md b/docs/book/src/dev_lua_test_ledgers.erl.md deleted file mode 100644 index 795f864d8..000000000 --- a/docs/book/src/dev_lua_test_ledgers.erl.md +++ /dev/null @@ -1,887 +0,0 @@ -# dev_lua_test_ledgers - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_test_ledgers.erl) - -A collection of Eunit tests for the `lua@5.3a` device, and the -`hyper-token.lua` script. These tests are designed to validate the -functionality of both of these components, and to provide examples -of how to use the `lua@5.3a` device. -The module is split into four components: -1. A simple ledger client library. -2. Assertion functions that verify specific invariants about the state - of ledgers in a test environment. -3. Utility functions for normalizing the state of a test environment. -4. Test cases that generate and manipulate ledger networks in test - environments. -Many client and utility functions in this module handle the conversion of -wallet IDs to human-readable addresses when found in transfers, balances, -and other fields. This is done to make the test cases more readable and -easier to understand -- be careful if following their patterns in other -contexts to either mimic a similar pattern, or to ensure you pass addresses -in these contexts rather that full wallet objects. - ---- - -### ledger - -Generate a Lua process definition message. - -```erlang -ledger(Script, Opts) -> - ledger(Script, #{}, Opts). -``` - -### ledger - -```erlang -ledger(Script, Extra, Opts) -> - % If the `balance' key is set in the `Extra' map, ensure that any wallets - % given as keys in the message are converted to human-readable addresses. -``` - -### lua_script - -Generate a Lua `script` key from a file or list of files. - -```erlang -lua_script(Files) when is_list(Files) -> - [ - #{ - <<"content-type">> => <<"application/lua">>, - <<"module">> => File, - <<"body">> => - hb_util:ok( - file:read_file( - if is_binary(File) -> binary_to_list(File); - true -> File - end - ) - ) - } - || - File <- Files - ]; -``` - -### lua_script - -Generate a Lua `script` key from a file or list of files. - -```erlang -lua_script(File) when is_binary(File) -> - hd(lua_script([File])). -``` - -### subledger - -Generate a test sub-ledger process definition message. - -```erlang -subledger(Root, Opts) -> - subledger(Root, #{}, Opts). -``` - -### subledger - -```erlang -subledger(Root, Extra, Opts) -> - BareRoot = - maps:without( - [<<"token">>, <<"balance">>], - hb_message:uncommitted(Root) - ), - Proc = - hb_message:commit( - maps:merge( - BareRoot#{ - <<"token">> => hb_message:id(Root, all) - }, - Extra - ), - hb_opts:get(priv_wallet, hb:wallet(), Opts) - ), - hb_cache:write(Proc, Opts), - Proc. -``` - -### transfer - -Generate a test transfer message. - -```erlang -transfer(ProcMsg, Sender, Recipient, Quantity, Opts) -> - transfer(ProcMsg, Sender, Recipient, Quantity, undefined, Opts). -``` - -### transfer - -```erlang -transfer(ProcMsg, Sender, Recipient, Quantity, Route, Opts) -> - MaybeRoute = - if Route == undefined -> #{}; - true -> - #{ - <<"route">> => - if is_map(Route) -> hb_message:id(Route, all); - true -> Route - end - } - end, - Xfer = - hb_message:commit(#{ - <<"path">> => <<"push">>, - <<"body">> => - hb_message:commit(MaybeRoute#{ - <<"action">> => <<"Transfer">>, - <<"target">> => hb_message:id(ProcMsg, all), - <<"recipient">> => hb_util:human_id(Recipient), - <<"quantity">> => Quantity - }, - Sender - ) - }, - Sender - ), - hb_ao:resolve( - ProcMsg, - Xfer, - Opts#{ priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), Opts) } - ). -``` - -### register - -Request that a peer register with a without sub-ledger. - -```erlang -register(ProcMsg, Peer, Opts) when is_map(Peer) -> - register(ProcMsg, hb_message:id(Peer, all), Opts); -``` - -### register - -Request that a peer register with a without sub-ledger. - -```erlang -register(ProcMsg, PeerID, RawOpts) -> - Opts = - RawOpts#{ - priv_wallet => hb_opts:get(priv_wallet, hb:wallet(), RawOpts) - }, - Reg = - hb_message:commit( - #{ - <<"path">> => <<"push">>, - <<"body">> => - hb_message:commit( - #{ - <<"action">> => <<"register-remote">>, - <<"target">> => hb_message:id(ProcMsg, all), - <<"peer">> => PeerID - }, - Opts - ) - }, - Opts - ), - hb_ao:resolve( - ProcMsg, - Reg, - Opts - ). -``` - -### balance - -Retreive a single balance from the ledger. - -```erlang -balance(ProcMsg, User, Opts) when not ?IS_ID(User) -> - balance(ProcMsg, hb_util:human_id(ar_wallet:to_address(User)), Opts); -``` - -### balance - -Retreive a single balance from the ledger. -Get the total balance for an ID across all ledgers in a set. - -```erlang -balance(ProcMsg, ID, Opts) -> - hb_ao:get(<<"now/balance/", ID/binary>>, ProcMsg, 0, Opts). -``` - -### balance_total - -Retreive a single balance from the ledger. -Get the total balance for an ID across all ledgers in a set. - -```erlang -balance_total(Procs, ID, Opts) -> - lists:sum( - lists:map( - fun(Proc) -> balance(Proc, ID, Opts) end, - maps:values(normalize_env(Procs)) - ) - ). -``` - -### balances - -Get the balances of a ledger. - -```erlang -balances(ProcMsg, Opts) -> - balances(now, ProcMsg, Opts). -``` - -### balances - -```erlang -balances(initial, ProcMsg, Opts) -> - balances(<<"">>, ProcMsg, Opts); -``` - -### balances - -```erlang -balances(Mode, ProcMsg, Opts) when is_atom(Mode) -> - balances(hb_util:bin(Mode), ProcMsg, Opts); -``` - -### balances - -```erlang -balances(Prefix, ProcMsg, Opts) -> - Balances = hb_ao:get(<>, ProcMsg, #{}, Opts), - hb_private:reset(hb_cache:ensure_all_loaded(Balances, Opts)). -``` - -### supply - -Get the supply of a ledger, either `now` or `initial`. - -```erlang -supply(ProcMsg, Opts) -> - supply(now, ProcMsg, Opts). -``` - -### supply - -```erlang -supply(Mode, ProcMsg, Opts) -> - lists:sum(maps:values(balances(Mode, ProcMsg, Opts))). -``` - -### subledger_supply - -Calculate the supply of tokens in all sub-ledgers, from the balances of - -```erlang -subledger_supply(RootProc, AllProcs, Opts) -> - supply(now, RootProc, Opts) - user_supply(RootProc, AllProcs, Opts). -``` - -### user_supply - -Calculate the supply of tokens held by users on a ledger, excluding - -```erlang -user_supply(Proc, AllProcs, Opts) -> - NormProcs = normalize_without_root(Proc, AllProcs), - SubledgerIDs = maps:keys(NormProcs), - lists:sum( - maps:values( - maps:without( - SubledgerIDs, - balances(now, Proc, Opts) - ) - ) - ). -``` - -### ledgers - -Get the local expectation of a ledger's balances with peer ledgers. - -```erlang -ledgers(ProcMsg, Opts) -> - case hb_cache:ensure_all_loaded( - hb_ao:get(<<"now/ledgers">>, ProcMsg, #{}, Opts), - Opts - ) of - Msg when is_map(Msg) -> hb_private:reset(Msg); - [] -> #{} - end. -``` - -### map - -Generate a complete overview of the test environment's balances and - -```erlang -map(Procs, Opts) -> - NormProcs = normalize_env(Procs), - maps:merge_with( - fun(Key, Balances, Ledgers) -> - MaybeRoot = - case maps:get(Key, NormProcs, #{}) of - #{ <<"token">> := _ } -> #{}; - _ -> #{ root => true } - end, - MaybeRoot#{ - balances => Balances, - ledgers => Ledgers - } - end, - maps:map(fun(_, Proc) -> balances(Proc, Opts) end, NormProcs), - maps:map(fun(_, Proc) -> ledgers(Proc, Opts) end, NormProcs) - ). -``` - -### map - -```erlang -map(Procs, EnvNames, Opts) -> - apply_names(map(Procs, Opts), EnvNames, Opts). -``` - -### apply_names - -Apply a map of environment names to elements in either a map or list. - -```erlang -apply_names(Map, EnvNames, Opts) -> - IDs = - maps:from_list( - lists:filtermap( - fun({Key, V}) -> - try {true, {hb_util:human_id(Key), V}} - catch _:_ -> - try {true, {hb_message:id(Key, all), V}} - catch _:_ -> false - end - end - end, - maps:to_list(EnvNames) - ) - ), - do_apply_names(Map, maps:merge(IDs, EnvNames), Opts). -``` - -### do_apply_names - -```erlang -do_apply_names(Map, EnvNames, Opts) when is_map(Map) -> - maps:from_list( - lists:map( - fun({Key, Proc}) -> - { - apply_names(Key, EnvNames, Opts), - apply_names(Proc, EnvNames, Opts) - } - end, - maps:to_list(Map) - ) - ); -``` - -### do_apply_names - -```erlang -do_apply_names(List, EnvNames, Opts) when is_list(List) -> - lists:map( - fun(Proc) -> - apply_names(Proc, EnvNames, Opts) - end, - List - ); -``` - -### do_apply_names - -```erlang -do_apply_names(Item, Names, _Opts) when is_map_key(Item, Names) -> - maps:get(Item, Names); -``` - -### do_apply_names - -```erlang -do_apply_names(Item, Names, _Opts) -> - try maps:get(hb_util:human_id(Item), Names, Item) - catch _:_ -> Item - end. -``` - -### verify_net - -Execute all invariant checks for a pair of root ledger and sub-ledgers. - -```erlang -verify_net(RootProc, AllProcs, Opts) -> - verify_net_supply(RootProc, AllProcs, Opts), - verify_net_peer_balances(AllProcs, Opts). -``` - -### verify_root_supply - -Verify that the initial supply of tokens on the root ledger is the same - -```erlang -verify_root_supply(RootProc, Opts) -> - ?assert( - supply(initial, RootProc, Opts) == - supply(now, RootProc, Opts) + - lists:sum(maps:values(ledgers(RootProc, Opts))) - ). -``` - -### verify_net_supply - -Verify that the sum of all spendable balances held by ledgers in a - -```erlang -verify_net_supply(RootProc, AllProcs, Opts) -> - verify_root_supply(RootProc, Opts), - StartingRootSupply = supply(initial, RootProc, Opts), - NormProcsWithoutRoot = normalize_without_root(RootProc, AllProcs), - SubledgerIDs = maps:keys(NormProcsWithoutRoot), - RootUserSupply = user_supply(RootProc, NormProcsWithoutRoot, Opts), - SubledgerSupply = subledger_supply(RootProc, AllProcs, Opts), - ?event({verify_net_supply, {root, RootUserSupply}, {subledger, SubledgerSupply}}), - ?assert( - StartingRootSupply == - RootUserSupply + SubledgerSupply - ). -``` - -### verify_net_peer_balances - -Verify the consistency of all expected ledger balances with their peer - -```erlang -verify_net_peer_balances(AllProcs, Opts) -> - NormProcs = normalize_env(AllProcs), - maps:map( - fun(ValidateProc, _) -> - verify_peer_balances(ValidateProc, NormProcs, Opts) - end, - NormProcs - ). -``` - -### verify_peer_balances - -Verify that a ledger's expectation of its balances with peer ledgers - -```erlang -verify_peer_balances(ValidateProc, AllProcs, Opts) -> - Ledgers = ledgers(ValidateProc, Opts), - NormProcs = normalize_env(AllProcs), - maps:map( - fun(PeerID, ExpectedBalance) -> - ?assertEqual( - ExpectedBalance, - balance(ValidateProc, - maps:get(PeerID, NormProcs), - Opts - ) - ) - end, - Ledgers - ). -``` - -### normalize_env - -Normalize a set of processes, representing ledgers in a test environment, - -```erlang -normalize_env(Procs) when is_map(Procs) -> - normalize_env(maps:values(Procs)); -``` - -### normalize_env - -Normalize a set of processes, representing ledgers in a test environment, - -```erlang -normalize_env(Procs) when is_list(Procs) -> - maps:from_list( - lists:map( - fun(Proc) -> - {hb_message:id(Proc, all), Proc} - end, - Procs - ) - ). -``` - -### normalize_without_root - -Return the normalized environment without the root ledger. - -```erlang -normalize_without_root(RootProc, Procs) -> - maps:without([hb_message:id(RootProc, all)], normalize_env(Procs)). -``` - -### test_opts - -Create a node message for the test that avoids looking up unknown - -```erlang -test_opts() -> - hb:init(), - #{}. -``` - -### transfer_test_ - -Test the `transfer` function. - -```erlang -transfer_test_() -> {timeout, 30, fun transfer/0}. -``` - -### transfer - -Test the `transfer` function. - -```erlang -transfer() -> - Opts = test_opts(), - Alice = ar_wallet:new(), - Bob = ar_wallet:new(), - Proc = - ledger( - <<"scripts/hyper-token.lua">>, - #{ <<"balance">> => #{ Alice => 100 } }, - Opts - ), - ?assertEqual(100, supply(Proc, Opts)), - transfer(Proc, Alice, Bob, 1, Opts), - ?assertEqual(99, balance(Proc, Alice, Opts)), - ?assertEqual(1, balance(Proc, Bob, Opts)), - ?assertEqual(100, supply(Proc, Opts)). -``` - -### transfer_unauthorized_test_ - -User's must not be able to send tokens they do not own. We test three - -```erlang -transfer_unauthorized_test_() -> {timeout, 30, fun transfer_unauthorized/0}. -``` - -### transfer_unauthorized - -User's must not be able to send tokens they do not own. We test three - -```erlang -transfer_unauthorized() -> - Opts = test_opts(), - Alice = ar_wallet:new(), - Bob = ar_wallet:new(), - Proc = - ledger( - <<"scripts/hyper-token.lua">>, - #{ <<"balance">> => #{ Alice => 100 } }, - Opts - ), - % 1. Transferring a token when the sender has no tokens. -``` - -### subledger_deposit_test_ - -Verify that a user can deposit tokens into a sub-ledger. - -```erlang -subledger_deposit_test_() -> {timeout, 30, fun subledger_deposit/0}. -``` - -### subledger_deposit - -Verify that a user can deposit tokens into a sub-ledger. - -```erlang -subledger_deposit() -> - Opts = test_opts(), - Alice = ar_wallet:new(), - Proc = - ledger( - <<"scripts/hyper-token.lua">>, - #{ <<"balance">> => #{ Alice => 100 } }, - Opts - ), - SubLedger = subledger(Proc, Opts), - % 1. Alice has tokens on the root ledger. -``` - -### subledger_transfer_test_ - -Simulate inter-ledger payments between users on a single sub-ledger: - -```erlang -subledger_transfer_test_() -> {timeout, 10, fun subledger_transfer/0}. -``` - -### subledger_transfer - -Simulate inter-ledger payments between users on a single sub-ledger: - -```erlang -subledger_transfer() -> - Opts = test_opts(), - Alice = ar_wallet:new(), - Bob = ar_wallet:new(), - RootLedger = - ledger( - <<"scripts/hyper-token.lua">>, - #{ <<"balance">> => #{ Alice => 100 } }, - Opts - ), - SubLedger = subledger(RootLedger, Opts), - EnvNames = #{ - Alice => alice, - Bob => bob, - RootLedger => root, - SubLedger => subledger - }, - % 1. Alice has tokens on the root ledger. -``` - -### subledger_registration_test_disabled - -Verify that peer ledgers on the same token are able to register mutually - -```erlang -subledger_registration_test_disabled() -> - Opts = test_opts(), - Alice = ar_wallet:new(), - RootLedger = - ledger( - <<"scripts/hyper-token.lua">>, - #{ <<"balance">> => #{ Alice => 100 } }, - Opts - ), - SubLedger1 = subledger(RootLedger, Opts), - SubLedger2 = subledger(RootLedger, Opts), - Names = #{ - SubLedger1 => subledger1, - SubLedger2 => subledger2 - }, - ?event(debug, - {subledger, - {sl1, hb_message:id(SubLedger1, none)}, - {sl2, hb_message:id(SubLedger2, none)} - } - ), - % There are no registered peers on either sub-ledger. -``` - -### single_subledger_to_subledger_test_ - -```erlang -single_subledger_to_subledger_test_() -> {timeout, 30, fun single_subledger_to_subledger/0}. -``` - -### single_subledger_to_subledger - -```erlang -single_subledger_to_subledger() -> - Opts = test_opts(), - Alice = ar_wallet:new(), - Bob = ar_wallet:new(), - RootLedger = - ledger( - <<"scripts/hyper-token.lua">>, - #{ <<"balance">> => #{ Alice => 100 } }, - Opts - ), - SubLedger1 = subledger(RootLedger, Opts), - SL1ID = hb_message:id(SubLedger1, signed, Opts), - ?event({sl1ID, SL1ID}), - SubLedger2 = subledger(RootLedger, Opts), - SL2ID = hb_message:id(SubLedger2, signed, Opts), - ?event({sl2ID, SL2ID}), - Names = #{ - Alice => alice, - Bob => bob, - RootLedger => root, - SubLedger1 => subledger1, - SubLedger2 => subledger2 - }, - ?event({root_ledger, RootLedger}), - ?event({sl1, SubLedger1}), - ?event({sl2, SubLedger2}), - ?assertEqual(100, balance(RootLedger, Alice, Opts)), - % 2. Alice sends 90 tokens to herself on SubLedger1. -``` - -### subledger_to_subledger_test_ - -Verify that registered sub-ledgers are able to send tokens to each other - -```erlang -subledger_to_subledger_test_() -> {timeout, 30, fun subledger_to_subledger/0}. -``` - -### subledger_to_subledger - -Verify that registered sub-ledgers are able to send tokens to each other - -```erlang -subledger_to_subledger() -> - Opts = test_opts(), - Alice = ar_wallet:new(), - Bob = ar_wallet:new(), - RootLedger = - ledger( - <<"scripts/hyper-token.lua">>, - #{ <<"balance">> => #{ Alice => 100 } }, - Opts - ), - SubLedger1 = subledger(RootLedger, Opts), - SubLedger2 = subledger(RootLedger, Opts), - Names = #{ - Alice => alice, - Bob => bob, - RootLedger => root, - SubLedger1 => subledger1, - SubLedger2 => subledger2 - }, - % 1. Alice has tokens on the root ledger. -``` - -### unregistered_peer_transfer_test_ - -Verify that a ledger can send tokens to a peer ledger that is not - -```erlang -unregistered_peer_transfer_test_() -> {timeout, 30, fun unregistered_peer_transfer/0}. -``` - -### unregistered_peer_transfer - -Verify that a ledger can send tokens to a peer ledger that is not - -```erlang -unregistered_peer_transfer() -> - Opts = #{}, - Alice = ar_wallet:new(), - Bob = ar_wallet:new(), - RootLedger = - ledger( - <<"scripts/hyper-token.lua">>, - #{ <<"balance">> => #{ Alice => 100 } }, - Opts - ), - SubLedgers = [ subledger(RootLedger, Opts) || _ <- lists:seq(1, 3) ], - SubLedger1 = lists:nth(1, SubLedgers), - SubLedger2 = lists:nth(2, SubLedgers), - SubLedger3 = lists:nth(3, SubLedgers), - Names = #{ - Alice => alice, - Bob => bob, - RootLedger => root, - SubLedger1 => subledger1, - SubLedger2 => subledger2, - SubLedger3 => subledger3 - }, - % 1. Alice has tokens on the root ledger. -``` - -### multischeduler_test_disabled - -Verify that sub-ledgers can request and enforce multiple scheduler - -```erlang -multischeduler_test_disabled() -> {timeout, 30, fun multischeduler/0}. -``` - -### multischeduler - -Verify that sub-ledgers can request and enforce multiple scheduler - -```erlang -multischeduler() -> - BaseOpts = test_opts(), - NodeWallet = ar_wallet:new(), - Scheduler2 = ar_wallet:new(), - Scheduler3 = ar_wallet:new(), - Opts = BaseOpts#{ - priv_wallet => NodeWallet, - identities => #{ - <<"extra-scheduler">> => #{ - priv_wallet => Scheduler2 - } - } - }, - Alice = ar_wallet:new(), - Bob = ar_wallet:new(), - RootLedger = - ledger( - <<"scripts/hyper-token.lua">>, - ProcExtra = - #{ - <<"balance">> => #{ Alice => 100 }, - <<"scheduler">> => - [ - hb_util:human_id(NodeWallet), - hb_util:human_id(Scheduler2) - ], - <<"scheduler-required">> => - [ - hb_util:human_id(NodeWallet) - ] - }, - Opts - ), - % Alice has tokens on the root ledger. She moves them to Bob. -``` - -### comma_separated_scheduler_list_test - -Ensure that the `hyper-token.lua` script can parse comma-separated - -```erlang -comma_separated_scheduler_list_test() -> - NodeWallet = hb:wallet(), - Scheduler2 = ar_wallet:new(), - Alice = ar_wallet:new(), - Bob = ar_wallet:new(), - Opts = (test_opts())#{ priv_wallet => NodeWallet, identities => #{ - <<"extra-scheduler">> => #{ - priv_wallet => Scheduler2 - } - } }, - Ledger = - ledger( - <<"scripts/hyper-token.lua">>, - ProcExtra = - #{ - <<"balance">> => #{ Alice => 100 }, - <<"scheduler">> => - iolist_to_binary( - [ - <<"\"">>, - hb_util:human_id(NodeWallet), - <<"\",\"">>, - hb_util:human_id(Scheduler2), - <<"\"">> - ] - ), - <<"scheduler-required">> => - [ - hb_util:human_id(NodeWallet) - ] - }, - Opts - ), - % Alice has tokens on the root ledger. She moves them to Bob. -``` - ---- - -*Generated from [dev_lua_test_ledgers.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_lua_test_ledgers.erl)* diff --git a/docs/book/src/dev_manifest.erl.md b/docs/book/src/dev_manifest.erl.md deleted file mode 100644 index 0f89076b1..000000000 --- a/docs/book/src/dev_manifest.erl.md +++ /dev/null @@ -1,188 +0,0 @@ -# dev_manifest - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_manifest.erl) - -An Arweave path manifest resolution device. Follows the v1 schema: -https://specs.ar.io/?tx=lXLd0OPwo-dJLB_Amz5jgIeDhiOkjXuM3-r0H_aiNj0 - ---- - -## Exported Functions - -- `index/3` -- `info/0` - ---- - -### info - -An Arweave path manifest resolution device. Follows the v1 schema: -Use the `route/4` function as the handler for all requests, aside - -```erlang -info() -> - #{ - default => fun route/4, - excludes => [keys, set, committers] - }. -``` - -### index - -Return the fallback index page when the manifest itself is requested. - -```erlang -index(M1, M2, Opts) -> - ?event({manifest_index_request, M1, M2}), - case route(<<"index">>, M1, M2, Opts) of - {ok, Index} -> - ?event({manifest_index_returned, Index}), - {ok, Index}; - {error, not_found} -> - {error, not_found} - end. -``` - -### route - -Route a request to the associated data via its manifest. - -```erlang -route(<<"index">>, M1, M2, Opts) -> - ?event({manifest_index, M1, M2}), - case manifest(M1, M2, Opts) of - {ok, JSONStruct} -> - ?event({manifest_json_struct, JSONStruct}), - % Get the path to the index page from the manifest. We make - % sure to use `hb_maps:get/4' to ensure that we do not recurse - % on the `index' key with an `ao' resolve. -``` - -### route - -```erlang -route(Key, M1, M2, Opts) -> - ?event({manifest_lookup, Key}), - {ok, Manifest} = manifest(M1, M2, Opts), - {ok, - hb_ao:get( - <<"paths/", Key/binary>>, - {as, <<"message@1.0">>, Manifest}, - Opts - ) - }. -``` - -### manifest - -Find and deserialize a manifest from the given base. - -```erlang -manifest(Base, _Req, Opts) -> - JSON = - hb_ao:get_first( - [ - {{as, <<"message@1.0">>, Base}, [<<"data">>]}, - {{as, <<"message@1.0">>, Base}, [<<"body">>]} - ], - Opts - ), - ?event({manifest_json, JSON}), - Structured = - hb_cache:ensure_all_loaded( - hb_message:convert(JSON, <<"structured@1.0">>, <<"json@1.0">>, Opts), - Opts - ), - ?event({manifest_structured, {explicit, Structured}}), - Linkified = linkify(Structured, Opts), - ?event({manifest_linkified, {explicit, Linkified}}), - {ok, Linkified}. -``` - -### linkify - -Generate a nested message of links to content from a parsed (and - -```erlang -linkify(#{ <<"id">> := ID }, Opts) -> - LinkOptsBase = (maps:with([store], Opts))#{ scope => [local, remote]}, - {link, ID, LinkOptsBase#{ <<"type">> => <<"link">>, <<"lazy">> => false }}; -``` - -### linkify - -Generate a nested message of links to content from a parsed (and - -```erlang -linkify(Manifest, Opts) when is_map(Manifest) -> - hb_maps:map( - fun(_Key, Val) -> linkify(Val, Opts) end, - Manifest, - Opts - ); -``` - -### linkify - -Generate a nested message of links to content from a parsed (and - -```erlang -linkify(Manifest, Opts) when is_list(Manifest) -> - lists:map( - fun(Item) -> linkify(Item, Opts) end, - Manifest - ); -``` - -### linkify - -Generate a nested message of links to content from a parsed (and - -```erlang -linkify(Manifest, _Opts) -> - Manifest. -``` - -### resolve_test - -```erlang -resolve_test() -> - Opts = #{ store => hb_opts:get(store, no_viable_store, #{}) }, - IndexPage = #{ - <<"content-type">> => <<"text/html">>, - <<"body">> => <<"Page 1">> - }, - {ok, IndexID} = hb_cache:write(IndexPage, Opts), - Page2 = #{ - <<"content-type">> => <<"text/html">>, - <<"body">> => <<"Page 2">> - }, - {ok, Page2ID} = hb_cache:write(Page2, Opts), - Manifest = #{ - <<"paths">> => #{ - <<"nested">> => #{ <<"page2">> => #{ <<"id">> => Page2ID } }, - <<"page1">> => #{ <<"id">> => IndexID } - }, - <<"index">> => #{ <<"path">> => <<"page1">> } - }, - JSON = hb_message:convert(Manifest, <<"json@1.0">>, <<"structured@1.0">>, Opts), - ManifestMsg = - #{ - <<"device">> => <<"manifest@1.0">>, - <<"body">> => JSON - }, - {ok, ManifestID} = hb_cache:write(ManifestMsg, Opts), - ?event({manifest_id, ManifestID}), - Node = hb_http_server:start_node(Opts), - ?assertMatch( - {ok, #{ <<"body">> := <<"Page 1">> }}, - hb_http:get(Node, << ManifestID/binary, "/index" >>, Opts) - ), - {ok, Res} = hb_http:get(Node, << ManifestID/binary, "/nested/page2" >>, Opts), - ?event({manifest_resolve_test, Res}), - ?assertEqual(<<"Page 2">>, hb_maps:get(<<"body">>, Res, <<"NO BODY">>, Opts)), -``` - ---- - -*Generated from [dev_manifest.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_manifest.erl)* diff --git a/docs/book/src/dev_message.erl.md b/docs/book/src/dev_message.erl.md deleted file mode 100644 index 94e4ec92e..000000000 --- a/docs/book/src/dev_message.erl.md +++ /dev/null @@ -1,625 +0,0 @@ -# dev_message - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_message.erl) - -The identity device: For non-reserved keys, it simply returns a key -from the message as it is found in the message's underlying Erlang map. -Private keys (`priv[.*]`) are not included. -Reserved keys are: `id`, `commitments`, `committers`, `keys`, `path`, -`set`, `remove`, `get`, and `verify`. Their function comments describe the -behaviour of the device when these keys are set. - ---- - -## Exported Functions - -- `commit/3` -- `committed/3` -- `committers/1` -- `committers/2` -- `committers/3` -- `get/3` -- `get/4` -- `id/1` -- `id/2` -- `id/3` -- `index/3` -- `info/0` -- `keys/1` -- `keys/2` -- `remove/2` -- `remove/3` -- `set_path/3` -- `set/3` -- `verify/3` - ---- - -### info - -The identity device: For non-reserved keys, it simply returns a key -Return the info for the identity device. - -```erlang -info() -> - #{ - default => fun dev_message:get/4 - }. -``` - -### index - -Generate an index page for a message, in the event that the `body` and - -```erlang -index(Msg, Req, Opts) -> - case hb_opts:get(default_index, not_found, Opts) of - not_found -> - {error, <<"No default index message set.">>}; - DefaultIndex -> - hb_ao:resolve( - case is_map(DefaultIndex) of - true -> maps:merge(Msg, DefaultIndex); - false -> {as, DefaultIndex, Msg} - end, - Req#{ - <<"path">> => - case hb_maps:find(<<"path">>, DefaultIndex, Opts) of - {ok, Path} -> Path; - _ -> - hb_opts:get(default_index_path, <<"index">>, Opts) - end - }, - Opts - ) - end. -``` - -### id - -Return the ID of a message, using the `committers` list if it exists. - -```erlang -id(Base) -> id(Base, #{}). -``` - -### id - -Return the ID of a message, using the `committers` list if it exists. - -```erlang -id(Base, Req) -> id(Base, Req, #{}). -``` - -### id - -Return the ID of a message, using the `committers` list if it exists. - -```erlang -id(Base, _, NodeOpts) when is_binary(Base) -> - % Return the hashpath of the message in native format, to match the native - % format of the message ID return. -``` - -### id - -```erlang -id(RawBase, Req, NodeOpts) -> - % Ensure that the base message is a normalized before proceeding. -``` - -### calculate_id - -```erlang -calculate_id(Base, Req, NodeOpts) -> - % Find the ID device for the message. -``` - -### id_device - -Locate the ID device of a message. The ID device is determined the - -```erlang -id_device(#{ <<"commitments">> := Commitments }, Opts) -> - % Get the device from the first commitment. -``` - -### id_device - -```erlang -id_device(_, _) -> - {ok, ?DEFAULT_ID_DEVICE}. -``` - -### committers - -Return the committers of a message that are present in the given request. - -```erlang -committers(Base) -> committers(Base, #{}). -``` - -### committers - -Return the committers of a message that are present in the given request. - -```erlang -committers(Base, Req) -> committers(Base, Req, #{}). -``` - -### committers - -Return the committers of a message that are present in the given request. - -```erlang -committers(#{ <<"commitments">> := Commitments }, _, NodeOpts) -> - ?event(debug_commitments, {calculating_committers, {commitments, Commitments}}), - {ok, - hb_maps:values( - hb_maps:filtermap( - fun(_ID, Commitment) -> - Committer = maps:get(<<"committer">>, Commitment, undefined), - ?event(debug_commitments, {committers, {committer, Committer}}), - case Committer of - undefined -> false; - Committer -> {true, Committer} - end - end, - Commitments, - NodeOpts - ), - NodeOpts - ) - }; -``` - -### committers - -Return the committers of a message that are present in the given request. - -```erlang -committers(_, _, _) -> - {ok, []}. -``` - -### commit - -Commit to a message, using the `commitment-device` key to specify the - -```erlang -commit(Self, Req, Opts) -> - {ok, Base} = hb_message:find_target(Self, Req, Opts), - AttDev = - case hb_maps:get(<<"commitment-device">>, Req, not_specified, Opts) of - not_specified -> - hb_opts:get(commitment_device, no_viable_commitment_device, Opts); - Dev -> Dev - end, - % We _do not_ set the `device' key in the message, as the device will be - % part of the commitment. Instead, we find the device module's `commit' - % function and apply it. -``` - -### verify - -Verify a message. By default, all commitments are verified. The - -```erlang -verify(Self, Req, Opts) -> - % Get the target message of the verification request. -``` - -### verify_commitment - -Execute a function for a single commitment in the context of its - -```erlang -verify_commitment(Base, Commitment, Opts) -> - ?event(verify, {verifying_commitment, {commitment, Commitment}, {msg, Base}}), - AttDev = - hb_maps:get( - <<"commitment-device">>, - Commitment, - ?DEFAULT_ATT_DEVICE, - Opts - ), - AttMod = - hb_ao:message_to_device( - #{ <<"device">> => AttDev }, - Opts - ), - {ok, AttFun} = - hb_ao:find_exported_function( - Base, - AttMod, - verify, - 3, - Opts - ), - apply(AttFun, [Base, Commitment, Opts]). -``` - -### committed - -Return the list of committed keys from a message. - -```erlang -committed(Self, Req, Opts) -> - % Get the target message of the verification request and ensure its - % commitments are loaded. -``` - -### with_relevant_commitments - -Return a message with only the relevant commitments for a given request. -Implements a standardized form of specifying commitment IDs for a - -```erlang -with_relevant_commitments(Base, Req, Opts) -> - Commitments = maps:get(<<"commitments">>, Base, #{}), - CommitmentIDs = commitment_ids_from_request(Base, Req, Opts), - Base#{ <<"commitments">> => maps:with(CommitmentIDs, Commitments) }. -``` - -### commitment_ids_from_request - -Return a message with only the relevant commitments for a given request. -Implements a standardized form of specifying commitment IDs for a - -```erlang -commitment_ids_from_request(Base, Req, Opts) -> - Commitments = maps:get(<<"commitments">>, Base, #{}), - ReqCommitters = - case maps:get(<<"committers">>, Req, <<"none">>) of - X when is_list(X) -> X; - CommitterDescriptor -> hb_ao:normalize_key(CommitterDescriptor) - end, - RawReqCommitments = maps:get(<<"commitments">>, Req, <<"none">>), - ReqCommitments = - case RawReqCommitments of - X2 when is_list(X2) -> X2; - CommitmentDescriptor -> hb_ao:normalize_key(CommitmentDescriptor) - end, - ?event(debug_commitments, - {commitment_ids_from_request, - {req_commitments, ReqCommitments}, - {req_committers, ReqCommitters}} - ), - % Get the commitments to verify. -``` - -### ensure_commitments_loaded - -Ensure that the `commitments` submessage of a base message is fully - -```erlang -ensure_commitments_loaded(NonRelevant, _Opts) when not is_map(NonRelevant) -> - NonRelevant; -``` - -### ensure_commitments_loaded - -Ensure that the `commitments` submessage of a base message is fully - -```erlang -ensure_commitments_loaded(M = #{ <<"commitments">> := Link}, Opts) when ?IS_LINK(Link) -> - M#{ - <<"commitments">> => hb_cache:ensure_all_loaded(Link, Opts) - }; -``` - -### ensure_commitments_loaded - -Ensure that the `commitments` submessage of a base message is fully - -```erlang -ensure_commitments_loaded(M, _Opts) -> - M. -``` - -### commitment_ids_from_committers - -Returns a list of commitment IDs in a commitments map that are relevant - -```erlang -commitment_ids_from_committers(CommitterAddrs, Commitments, Opts) -> - % Get the IDs of all commitments for each committer. -``` - -### set - -Deep merge keys in a message. Takes a map of key-value pairs and sets - -```erlang -set(Message1, NewValuesMsg, Opts) -> - OriginalPriv = hb_private:from_message(Message1), - % Filter keys that are in the default device (this one). -``` - -### set_path - -Special case of `set/3` for setting the `path` key. This cannot be set - -```erlang -set_path(Base, #{ <<"value">> := Value }, Opts) -> - set_path(Base, Value, Opts); -``` - -### set_path - -Special case of `set/3` for setting the `path` key. This cannot be set - -```erlang -set_path(Base, Value, Opts) when not is_map(Value) -> - % Determine whether the `path' key is committed. If it is, we remove the - % commitment if the new value is different. We try to minimize work by - % doing the `hb_maps:get` first, as it is far cheaper than calculating - % the committed keys. -``` - -### remove - -Remove a key or keys from a message. - -```erlang -remove(Message1, Key) -> - remove(Message1, Key, #{}). -``` - -### remove - -```erlang -remove(Message1, #{ <<"item">> := Key }, Opts) -> - remove(Message1, #{ <<"items">> => [Key] }, Opts); -``` - -### remove - -```erlang -remove(Message1, #{ <<"items">> := Keys }, Opts) -> - { ok, hb_maps:without(Keys, Message1, Opts) }. -``` - -### keys - -Get the public keys of a message. - -```erlang -keys(Msg) -> - keys(Msg, #{}). -``` - -### keys - -```erlang -keys(Msg, Opts) when not is_map(Msg) -> - case hb_ao:normalize_keys(Msg, Opts) of - NormMsg when is_map(NormMsg) -> keys(NormMsg, Opts); - _ -> throw(badarg) - end; -``` - -### keys - -```erlang -keys(Msg, Opts) -> - { - ok, - lists:filter( - fun(Key) -> not hb_private:is_private(Key) end, - hb_maps:keys(Msg, Opts) - ) - }. -``` - -### get - -Return the value associated with the key as it exists in the message's - -```erlang -get(Key, Msg, Opts) -> get(Key, Msg, #{ <<"path">> => <<"get">> }, Opts). -``` - -### get - -Return the value associated with the key as it exists in the message's - -```erlang -get(Key, Msg, _Msg2, Opts) -> - case hb_private:is_private(Key) of - true -> {error, not_found}; - false -> - case hb_maps:get(Key, Msg, not_found, Opts) of - not_found -> case_insensitive_get(Key, Msg, Opts); - Value -> {ok, Value} - end - end. -``` - -### case_insensitive_get - -Key matching should be case insensitive, following RFC-9110, so we - -```erlang -case_insensitive_get(Key, Msg, Opts) -> - NormKey = hb_util:to_lower(hb_util:bin(Key)), - NormMsg = hb_ao:normalize_keys(Msg, Opts), - case hb_maps:get(NormKey, NormMsg, not_found, Opts) of - not_found -> {error, not_found}; - Value -> {ok, Value} - end. -``` - -### get_keys_mod_test - -```erlang -get_keys_mod_test() -> - ?assertEqual([a], hb_maps:keys(#{a => 1}, #{})). -``` - -### is_private_mod_test - -```erlang -is_private_mod_test() -> - ?assertEqual(true, hb_private:is_private(<<"private">>)), - ?assertEqual(true, hb_private:is_private(<<"private.foo">>)), - ?assertEqual(false, hb_private:is_private(<<"a">>)). -%%% Device functionality tests: -``` - -### keys_from_device_test - -```erlang -keys_from_device_test() -> - ?assertEqual({ok, [<<"a">>]}, hb_ao:resolve(#{ <<"a">> => 1 }, keys, #{})). -``` - -### case_insensitive_get_test - -```erlang -case_insensitive_get_test() -> - ?assertEqual({ok, 1}, case_insensitive_get(<<"a">>, #{ <<"a">> => 1 }, #{})), -% ?assertEqual({ok, 1}, case_insensitive_get(<<"a">>, #{ <<"A">> => 1 }, #{})), - ?assertEqual({ok, 1}, case_insensitive_get(<<"A">>, #{ <<"a">> => 1 }, #{})). - %?assertEqual({ok, 1}, case_insensitive_get(<<"A">>, #{ <<"A">> => 1 }, #{})). -``` - -### private_keys_are_filtered_test - -```erlang -private_keys_are_filtered_test() -> - ?assertEqual( - {ok, [<<"a">>]}, - hb_ao:resolve(#{ <<"a">> => 1, <<"private">> => 2 }, keys, #{}) - ), - ?assertEqual( - {ok, [<<"a">>]}, - hb_ao:resolve(#{ <<"a">> => 1, <<"priv_foo">> => 4 }, keys, #{}) - ). -``` - -### cannot_get_private_keys_test - -```erlang -cannot_get_private_keys_test() -> - ?assertEqual( - {error, not_found}, - hb_ao:resolve( - #{ <<"a">> => 1, <<"private_key">> => 2 }, - <<"private_key">>, - #{ hashpath => ignore } - ) - ). -``` - -### key_from_device_test - -```erlang -key_from_device_test() -> - ?assertEqual({ok, 1}, hb_ao:resolve(#{ <<"a">> => 1 }, <<"a">>, #{})). -``` - -### remove_test - -```erlang -remove_test() -> - Msg = #{ <<"key1">> => <<"Value1">>, <<"key2">> => <<"Value2">> }, - ?assertMatch({ok, #{ <<"key2">> := <<"Value2">> }}, - hb_ao:resolve( - Msg, - #{ <<"path">> => <<"remove">>, <<"item">> => <<"key1">> }, - #{ hashpath => ignore } - ) - ), - ?assertMatch({ok, #{}}, - hb_ao:resolve( - Msg, - #{ <<"path">> => <<"remove">>, <<"items">> => [<<"key1">>, <<"key2">>] }, - #{ hashpath => ignore } - ) - ). -``` - -### set_conflicting_keys_test - -```erlang -set_conflicting_keys_test() -> - Msg1 = #{ <<"dangerous">> => <<"Value1">> }, - Msg2 = #{ <<"path">> => <<"set">>, <<"dangerous">> => <<"Value2">> }, - ?assertMatch({ok, #{ <<"dangerous">> := <<"Value2">> }}, - hb_ao:resolve(Msg1, Msg2, #{})). -``` - -### unset_with_set_test - -```erlang -unset_with_set_test() -> - Msg1 = #{ <<"dangerous">> => <<"Value1">> }, - Msg2 = #{ <<"path">> => <<"set">>, <<"dangerous">> => unset }, - ?assertMatch({ok, Msg3} when ?IS_EMPTY_MESSAGE(Msg3), - hb_ao:resolve(Msg1, Msg2, #{ hashpath => ignore })). -``` - -### deep_unset_test - -```erlang -deep_unset_test() -> - Opts = #{ hashpath => ignore }, - Msg1 = #{ - <<"test-key1">> => <<"Value1">>, - <<"deep">> => #{ - <<"test-key2">> => <<"Value2">>, - <<"test-key3">> => <<"Value3">> - } - }, - Msg2 = hb_ao:set(Msg1, #{ <<"deep/test-key2">> => unset }, Opts), - ?assertEqual(#{ - <<"test-key1">> => <<"Value1">>, - <<"deep">> => #{ <<"test-key3">> => <<"Value3">> } - }, - Msg2 - ), - Msg3 = hb_ao:set(Msg2, <<"deep/test-key3">>, unset, Opts), - ?assertEqual(#{ - <<"test-key1">> => <<"Value1">>, - <<"deep">> => #{} - }, - Msg3 - ), - Msg4 = hb_ao:set(Msg3, #{ <<"deep">> => unset }, Opts), - ?assertEqual(#{ <<"test-key1">> => <<"Value1">> }, Msg4). -``` - -### set_ignore_undefined_test - -```erlang -set_ignore_undefined_test() -> - Msg1 = #{ <<"test-key">> => <<"Value1">> }, - Msg2 = #{ <<"path">> => <<"set">>, <<"test-key">> => undefined }, - ?assertEqual(#{ <<"test-key">> => <<"Value1">> }, - hb_private:reset(hb_util:ok(set(Msg1, Msg2, #{ hashpath => ignore })))). -``` - -### verify_test - -```erlang -verify_test() -> - Unsigned = #{ <<"a">> => <<"b">> }, - Signed = hb_message:commit(Unsigned, hb:wallet()), - ?event({signed, Signed}), - BadSigned = Signed#{ <<"a">> => <<"c">> }, - ?event({bad_signed, BadSigned}), - ?assertEqual(false, hb_message:verify(BadSigned)), - ?assertEqual({ok, true}, - hb_ao:resolve( - #{ <<"device">> => <<"message@1.0">> }, - #{ <<"path">> => <<"verify">>, <<"body">> => Signed }, - #{ hashpath => ignore } - ) - ), - % Test that we can verify a message without specifying the device explicitly. -``` - ---- - -*Generated from [dev_message.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_message.erl)* diff --git a/docs/book/src/dev_meta.erl.md b/docs/book/src/dev_meta.erl.md deleted file mode 100644 index 98475b034..000000000 --- a/docs/book/src/dev_meta.erl.md +++ /dev/null @@ -1,981 +0,0 @@ -# dev_meta - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_meta.erl) - -The hyperbeam meta device, which is the default entry point -for all messages processed by the machine. This device executes a -AO-Core singleton request, after first applying the node's -pre-processor, if set. The pre-processor can halt the request by -returning an error, or return a modified version if it deems necessary -- -the result of the pre-processor is used as the request for the AO-Core -resolver. Additionally, a post-processor can be set, which is executed after -the AO-Core resolver has returned a result. - ---- - -## Exported Functions - -- `adopt_node_message/2` -- `build/3` -- `handle/2` -- `info/1` -- `info/3` -- `is_operator/2` -- `is/2` -- `is/3` - ---- - -### info - -The hyperbeam meta device, which is the default entry point -Ensure that the helper function `adopt_node_message/2` is not exported. -Utility function for determining if a request is from the `operator` of - -```erlang -info(_) -> #{ exports => [info, build] }. -``` - -### is_operator - -The hyperbeam meta device, which is the default entry point -Ensure that the helper function `adopt_node_message/2` is not exported. -Utility function for determining if a request is from the `operator` of - -```erlang -is_operator(Request, NodeMsg) -> - RequestSigners = hb_message:signers(Request, NodeMsg), - Operator = - hb_opts:get( - operator, - case hb_opts:get(priv_wallet, no_viable_wallet, NodeMsg) of - no_viable_wallet -> unclaimed; - Wallet -> ar_wallet:to_address(Wallet) - end, - NodeMsg - ), - EncOperator = - case Operator of - unclaimed -> unclaimed; - NativeAddress -> hb_util:human_id(NativeAddress) - end, - EncOperator == unclaimed orelse lists:member(EncOperator, RequestSigners). -``` - -### build - -Emits the version number and commit hash of the HyperBEAM node source, - -```erlang -build(_, _, _NodeMsg) -> - {ok, - #{ - <<"node">> => <<"HyperBEAM">>, - <<"version">> => ?HYPERBEAM_VERSION, - <<"source">> => ?HB_BUILD_SOURCE, - <<"source-short">> => ?HB_BUILD_SOURCE_SHORT, - <<"build-time">> => ?HB_BUILD_TIME - } - }. -``` - -### handle - -Normalize and route messages downstream based on their path. Messages - -```erlang -handle(NodeMsg, RawRequest) -> - ?event({singleton_tabm_request, RawRequest}), - NormRequest = hb_singleton:from(RawRequest, NodeMsg), - ?event( - http, - {request, - hb_cache:ensure_all_loaded( - hb_ao:normalize_keys(NormRequest, NodeMsg), - NodeMsg - ) - } - ), - case hb_opts:get(initialized, false, NodeMsg) of - false -> - Res = - embed_status( - hb_ao:force_message( - handle_initialize(NormRequest, NodeMsg), - NodeMsg - ), - NodeMsg - ), - Res; - _ -> handle_resolve(RawRequest, NormRequest, NodeMsg) - end. -``` - -### handle_initialize - -```erlang -handle_initialize([Base = #{ <<"device">> := Dev}, Req = #{ <<"path">> := Path }|_], NodeMsg) -> - ?event({got, {device, Dev}, {path, Path}}), - case {Dev, Path} of - {<<"meta@1.0">>, <<"info">>} -> info(Base, Req, NodeMsg); - _ -> {error, <<"Node must be initialized before use.">>} - end; -``` - -### handle_initialize - -```erlang -handle_initialize([{as, <<"meta@1.0">>, _}|Rest], NodeMsg) -> - handle_initialize([#{ <<"device">> => <<"meta@1.0">>}|Rest], NodeMsg); -``` - -### handle_initialize - -```erlang -handle_initialize([_|Rest], NodeMsg) -> - handle_initialize(Rest, NodeMsg); -``` - -### handle_initialize - -Get/set the node message. If the request is a `POST`, we check that the - -```erlang -handle_initialize([], _NodeMsg) -> - {error, <<"Node must be initialized before use.">>}. -``` - -### info - -Get/set the node message. If the request is a `POST`, we check that the - -```erlang -info(_, Request, NodeMsg) -> - case hb_ao:get(<<"method">>, Request, NodeMsg) of - <<"POST">> -> - case hb_ao:get(<<"initialized">>, NodeMsg, not_found, NodeMsg) of - permanent -> - embed_status( - {error, - <<"The node message of this machine is already " - "permanent. It cannot be changed.">> - }, - NodeMsg - ); - _ -> - update_node_message(Request, NodeMsg) - end; - _ -> - ?event({get_config_req, Request, NodeMsg}), - DynamicKeys = add_dynamic_keys(NodeMsg), - embed_status({ok, filter_node_msg(DynamicKeys, NodeMsg)}, NodeMsg) - end. -``` - -### filter_node_msg - -Remove items from the node message that are not encodable into a - -```erlang -filter_node_msg(Msg, NodeMsg) when is_map(Msg) -> - hb_maps:map(fun(_, Value) -> filter_node_msg(Value, NodeMsg) end, hb_private:reset(Msg), NodeMsg); -``` - -### filter_node_msg - -Remove items from the node message that are not encodable into a - -```erlang -filter_node_msg(Msg, NodeMsg) when is_list(Msg) -> - lists:map(fun(Item) -> filter_node_msg(Item, NodeMsg) end, Msg); -``` - -### filter_node_msg - -Remove items from the node message that are not encodable into a - -```erlang -filter_node_msg(Tuple, _NodeMsg) when is_tuple(Tuple) -> - <<"Unencodable value.">>; -``` - -### filter_node_msg - -Remove items from the node message that are not encodable into a - -```erlang -filter_node_msg(Other, _NodeMsg) -> - Other. -``` - -### add_dynamic_keys - -Add dynamic keys to the node message. - -```erlang -add_dynamic_keys(NodeMsg) -> - UpdatedNodeMsg = - case hb_opts:get(priv_wallet, no_viable_wallet, NodeMsg) of - no_viable_wallet -> - NodeMsg; - Wallet -> - %% Create a new map with address and merge it (overwriting existing) - Address = hb_util:id(ar_wallet:to_address(Wallet)), - NodeMsg#{ address => Address, <<"address">> => Address } - end, - add_identity_addresses(UpdatedNodeMsg). -``` - -### add_identity_addresses - -Validate that the request is signed by the operator of the node, then - -```erlang -add_identity_addresses(NodeMsg) -> - Identities = hb_opts:get(identities, #{}, NodeMsg), - NewIdentities = maps:map(fun(_, Identity) -> - Identity#{ - <<"address">> => hb_util:human_id( - hb_opts:get(priv_wallet, hb:wallet(), Identity) - ) - } - end, Identities), - NodeMsg#{ <<"identities">> => NewIdentities }. -``` - -### update_node_message - -Validate that the request is signed by the operator of the node, then - -```erlang -update_node_message(Request, NodeMsg) -> - case is(admin, Request, NodeMsg) of - false -> - ?event({set_node_message_fail, Request}), - embed_status({error, <<"Unauthorized">>}, NodeMsg); - true -> - case adopt_node_message(Request, NodeMsg) of - {ok, NewNodeMsg} -> - NewH = hb_opts:get(node_history, [], NewNodeMsg), - embed_status( - {ok, - #{ - <<"body">> => - iolist_to_binary( - io_lib:format( - "Node message updated. History: ~p" - "updates.", - [length(NewH)] - ) - ), - <<"history-length">> => length(NewH) - } - }, - NodeMsg - ); - {error, Reason} -> - ?event({set_node_message_fail, Request, Reason}), - embed_status({error, Reason}, NodeMsg) - end - end. -``` - -### adopt_node_message - -Attempt to adopt changes to a node message. - -```erlang -adopt_node_message(Request, NodeMsg) -> - ?event({set_node_message_success, Request}), - % Ensure that the node history is updated and the http_server ID is - % not overridden. -``` - -### handle_resolve - -Handle an AO-Core request, which is a list of messages. We apply - -```erlang -handle_resolve(Req, Msgs, NodeMsg) -> - TracePID = hb_opts:get(trace, no_tracer_set, NodeMsg), - % Apply the pre-processor to the request. -``` - -### resolve_hook - -Execute a hook from the node message upon the user's request. The - -```erlang -resolve_hook(HookName, InitiatingRequest, Body, NodeMsg) -> - HookReq = - #{ - <<"request">> => InitiatingRequest, - <<"body">> => Body - }, - ?event(hook, {resolve_hook, HookName, HookReq}), - case dev_hook:on(HookName, HookReq, NodeMsg) of - {ok, #{ <<"body">> := ResponseBody }} -> - ?event(hook, - {resolve_hook_success, - {name, HookName}, - {response_body, ResponseBody} - } - ), - {ok, ResponseBody}; - {error, _} = Error -> - ?event(hook, - {resolve_hook_error, - {name, HookName}, - {error, Error} - } - ), - Error; - Other -> - {error, Other} - end. -``` - -### embed_status - -Wrap the result of a device call in a status. - -```erlang -embed_status({ErlStatus, Res}, NodeMsg) when is_map(Res) -> - case lists:member(<<"status">>, hb_message:committed(Res, all, NodeMsg)) of - false -> - HTTPCode = status_code({ErlStatus, Res}, NodeMsg), - {ok, Res#{ <<"status">> => HTTPCode }}; - true -> - {ok, Res} - end; -``` - -### embed_status - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -embed_status({ErlStatus, Res}, NodeMsg) -> - HTTPCode = status_code({ErlStatus, Res}, NodeMsg), - {ok, #{ <<"status">> => HTTPCode, <<"body">> => Res }}. -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -status_code({ErlStatus, Msg}, NodeMsg) -> - case message_to_status(Msg, NodeMsg) of - default -> status_code(ErlStatus, NodeMsg); - RawStatus -> RawStatus - end; -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -status_code(ok, _NodeMsg) -> 200; -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -status_code(error, _NodeMsg) -> 400; -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -status_code(created, _NodeMsg) -> 201; -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -status_code(not_found, _NodeMsg) -> 404; -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -status_code(failure, _NodeMsg) -> 500; -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -status_code(unavailable, _NodeMsg) -> 503; -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -status_code(unauthorized, _NodeMsg) -> 401; -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. - -```erlang -status_code(forbidden, _NodeMsg) -> 403; -``` - -### status_code - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. -Get the HTTP status code from a transaction (if it exists). - -```erlang -status_code(_, _NodeMsg) -> 200. -``` - -### message_to_status - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. -Get the HTTP status code from a transaction (if it exists). - -```erlang -message_to_status(#{ <<"body">> := Status }, NodeMsg) when is_atom(Status) -> - status_code(Status, NodeMsg); -``` - -### message_to_status - -Wrap the result of a device call in a status. -Calculate the appropriate HTTP status code for an AO-Core result. -Get the HTTP status code from a transaction (if it exists). - -```erlang -message_to_status(Item, NodeMsg) when is_map(Item) -> - % Note: We use `dev_message' directly here, such that we do not cause - % additional AO-Core calls for every request. This is particularly important - % if a remote server is being used for all AO-Core requests by a node. -``` - -### message_to_status - -```erlang -message_to_status(Item, NodeMsg) when is_atom(Item) -> - status_code(Item, NodeMsg); -``` - -### message_to_status - -```erlang -message_to_status(_Item, _NodeMsg) -> - default. -``` - -### maybe_sign - -Sign the result of a device call if the node is configured to do so. - -```erlang -maybe_sign({Status, Res}, NodeMsg) -> - {Status, maybe_sign(Res, NodeMsg)}; -``` - -### maybe_sign - -Sign the result of a device call if the node is configured to do so. - -```erlang -maybe_sign(Res, NodeMsg) -> - ?event({maybe_sign, Res}), - case hb_opts:get(force_signed, false, NodeMsg) of - true -> - case hb_message:signers(Res, NodeMsg) of - [] -> hb_message:commit(Res, NodeMsg); - _ -> Res - end; - false -> Res - end. -``` - -### is - -Check if the request in question is signed by a given `role` on the node. - -```erlang -is(Request, NodeMsg) -> - is(operator, Request, NodeMsg). -``` - -### is - -```erlang -is(admin, Request, NodeMsg) -> - % Does the caller have the right to change the node message? - RequestSigners = hb_message:signers(Request, NodeMsg), - ValidOperator = - hb_util:bin( - hb_opts:get( - operator, - case hb_opts:get(priv_wallet, no_viable_wallet, NodeMsg) of - no_viable_wallet -> unclaimed; - Wallet -> ar_wallet:to_address(Wallet) - end, - NodeMsg - ) - ), - EncOperator = - case ValidOperator of - <<"unclaimed">> -> unclaimed; - NativeAddress -> hb_util:human_id(NativeAddress) - end, - ?event({is, - {operator, - {valid_operator, ValidOperator}, - {encoded_operator, EncOperator}, - {request_signers, RequestSigners} - } - }), - EncOperator == unclaimed orelse lists:member(EncOperator, RequestSigners); -``` - -### is - -```erlang -is(operator, Req, NodeMsg) -> - % Is the caller explicitly set to be the operator? - % Get the operator from the node message - Operator = hb_opts:get(operator, unclaimed, NodeMsg), - % Get the request signers - RequestSigners = hb_message:signers(Req, NodeMsg), - % Ensure the operator is present in the request - lists:member(Operator, RequestSigners); -``` - -### is - -```erlang -is(initiator, Request, NodeMsg) -> - % Is the caller the first identity that configured the node message? - NodeHistory = hb_opts:get(node_history, [], NodeMsg), - % Check if node_history exists and is not empty - case NodeHistory of - [] -> - ?event(green_zone, {init, node_history, empty}), - false; - [InitializationRequest | _] -> - % Extract signature from first entry - InitializationRequestSigners = hb_message:signers(InitializationRequest, NodeMsg), - % Get request signers - RequestSigners = hb_message:signers(Request, NodeMsg), - % Ensure all signers of the initalization request are present in the - % request. -``` - -### config_test - -Test that we can get the node message. -Test that we can't get the node message if the requested key is private. - -```erlang -config_test() -> - StoreOpts = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - }, - Node = hb_http_server:start_node(Opts = #{ test_config_item => <<"test">>, store => StoreOpts }), - {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), - ?assertEqual(<<"test">>, hb_ao:get(<<"test_config_item">>, Res, Opts)). -``` - -### priv_inaccessible_test - -Test that we can get the node message. -Test that we can't get the node message if the requested key is private. -Test that we can't set the node message if the request is not signed by - -```erlang -priv_inaccessible_test() -> - Node = hb_http_server:start_node( - #{ - test_config_item => <<"test">>, - priv_key => <<"BAD">> - } - ), - {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, #{}), - ?event({res, Res}), - ?assertEqual(<<"test">>, hb_ao:get(<<"test_config_item">>, Res, #{})), - ?assertEqual(not_found, hb_ao:get(<<"priv_key">>, Res, #{})). -``` - -### unauthorized_set_node_msg_fails_test - -Test that we can get the node message. -Test that we can't get the node message if the requested key is private. -Test that we can't set the node message if the request is not signed by -Test that we can set the node message if the request is signed by the - -```erlang -unauthorized_set_node_msg_fails_test() -> - StoreOpts = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - }, - Node = hb_http_server:start_node(Opts = #{ store => StoreOpts, priv_wallet => ar_wallet:new() }), - {error, _} = - hb_http:post( - Node, - hb_message:commit( - #{ - <<"path">> => <<"/~meta@1.0/info">>, - <<"evil_config_item">> => <<"BAD">> - }, - Opts#{ priv_wallet => ar_wallet:new() } - ), - #{} - ), - {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), - ?assertEqual(not_found, hb_ao:get(<<"evil_config_item">>, Res, Opts)), - ?assertEqual(0, length(hb_ao:get(<<"node_history">>, Res, [], Opts))). -``` - -### authorized_set_node_msg_succeeds_test - -Test that we can get the node message. -Test that we can't get the node message if the requested key is private. -Test that we can't set the node message if the request is not signed by -Test that we can set the node message if the request is signed by the -Test that an uninitialized node will not run computation. - -```erlang -authorized_set_node_msg_succeeds_test() -> - StoreOpts = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - }, - Owner = ar_wallet:new(), - Node = hb_http_server:start_node( - Opts = #{ - operator => hb_util:human_id(ar_wallet:to_address(Owner)), - test_config_item => <<"test">>, - store => StoreOpts - } - ), - {ok, SetRes} = - hb_http:post( - Node, - hb_message:commit( - #{ - <<"path">> => <<"/~meta@1.0/info">>, - <<"test_config_item">> => <<"test2">> - }, - Opts#{ priv_wallet => Owner } - ), - Opts - ), - ?event({res, SetRes}), - {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), - ?event({res, Res}), - ?assertEqual(<<"test2">>, hb_ao:get(<<"test_config_item">>, Res, Opts)), - ?assertEqual(1, length(hb_ao:get(<<"node_history">>, Res, [], Opts))). -``` - -### uninitialized_node_test - -Test that we can get the node message. -Test that we can't get the node message if the requested key is private. -Test that we can't set the node message if the request is not signed by -Test that we can set the node message if the request is signed by the -Test that an uninitialized node will not run computation. -Test that a permanent node message cannot be changed. - -```erlang -uninitialized_node_test() -> - Node = hb_http_server:start_node(#{ initialized => false }), - {error, Res} = hb_http:get(Node, <<"/key1?1.key1=value1">>, #{}), - ?event({res, Res}), - ?assertEqual(<<"Node must be initialized before use.">>, Res). -``` - -### permanent_node_message_test - -Test that we can get the node message. -Test that we can't get the node message if the requested key is private. -Test that we can't set the node message if the request is not signed by -Test that we can set the node message if the request is signed by the -Test that an uninitialized node will not run computation. -Test that a permanent node message cannot be changed. -Test that we can claim the node correctly and set the node message after. - -```erlang -permanent_node_message_test() -> - StoreOpts = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - }, - Owner = ar_wallet:new(), - Node = hb_http_server:start_node( - Opts =#{ - operator => <<"unclaimed">>, - initialized => false, - test_config_item => <<"test">>, - store => StoreOpts - } - ), - {ok, SetRes1} = - hb_http:post( - Node, - hb_message:commit( - #{ - <<"path">> => <<"/~meta@1.0/info">>, - <<"test_config_item">> => <<"test2">>, - initialized => <<"permanent">> - }, - Opts#{ priv_wallet => Owner } - ), - Opts - ), - ?event({set_res, SetRes1}), - {ok, Res} = hb_http:get(Node, #{ <<"path">> => <<"/~meta@1.0/info">> }, Opts), - ?event({get_res, Res}), - ?assertEqual(<<"test2">>, hb_ao:get(<<"test_config_item">>, Res, Opts)), - {error, SetRes2} = - hb_http:post( - Node, - hb_message:commit( - #{ - <<"path">> => <<"/~meta@1.0/info">>, - <<"test_config_item">> => <<"bad_value">> - }, - Opts#{ priv_wallet => Owner } - ), - Opts - ), - ?event({set_res, SetRes2}), - {ok, Res2} = hb_http:get(Node, #{ <<"path">> => <<"/~meta@1.0/info">> }, Opts), - ?event({get_res, Res2}), - ?assertEqual(<<"test2">>, hb_ao:get(<<"test_config_item">>, Res2, Opts)), - ?assertEqual(1, length(hb_ao:get(<<"node_history">>, Res2, [], Opts))). -``` - -### claim_node_test - -Test that we can get the node message. -Test that we can't get the node message if the requested key is private. -Test that we can't set the node message if the request is not signed by -Test that we can set the node message if the request is signed by the -Test that an uninitialized node will not run computation. -Test that a permanent node message cannot be changed. -Test that we can claim the node correctly and set the node message after. - -```erlang -claim_node_test() -> - StoreOpts = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - }, - Owner = ar_wallet:new(), - Address = ar_wallet:to_address(Owner), - Node = hb_http_server:start_node( - Opts = #{ - operator => unclaimed, - test_config_item => <<"test">>, - store => StoreOpts - } - ), - {ok, SetRes} = - hb_http:post( - Node, - hb_message:commit( - #{ - <<"path">> => <<"/~meta@1.0/info">>, - <<"operator">> => hb_util:human_id(Address) - }, - Opts#{ priv_wallet => Owner} - ), - Opts - ), - ?event({res, SetRes}), - {ok, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), - ?event({res, Res}), - ?assertEqual(hb_util:human_id(Address), hb_ao:get(<<"operator">>, Res, Opts)), - {ok, SetRes2} = - hb_http:post( - Node, - hb_message:commit( - #{ - <<"path">> => <<"/~meta@1.0/info">>, - <<"test_config_item">> => <<"test2">> - }, - Opts#{ priv_wallet => Owner } - ), - Opts - ), - ?event({res, SetRes2}), - {ok, Res2} = hb_http:get(Node, <<"/~meta@1.0/info">>, Opts), - ?event({res, Res2}), - ?assertEqual(<<"test2">>, hb_ao:get(<<"test_config_item">>, Res2, Opts)), - ?assertEqual(2, length(hb_ao:get(<<"node_history">>, Res2, [], Opts))). -%% Test that we can use a hook upon a request. -``` - -### request_response_hooks_test - -```erlang -request_response_hooks_test() -> - Parent = self(), - Node = hb_http_server:start_node( - #{ - on => - #{ - <<"request">> => - #{ - <<"device">> => #{ - <<"request">> => - fun(_, #{ <<"body">> := Msgs }, _) -> - Parent ! {hook, request}, - {ok, #{ <<"body">> => Msgs} } - end - } - }, - <<"response">> => - #{ - <<"device">> => #{ - <<"response">> => - fun(_, #{ <<"body">> := Msgs }, _) -> - Parent ! {hook, response}, - {ok, #{ <<"body">> => Msgs} } - end - } - } - }, - http_extra_opts => #{ - <<"cache-control">> => [<<"no-store">>, <<"no-cache">>] - } - }), - hb_http:get(Node, <<"/~meta@1.0/info">>, #{}), - % Receive both of the responses from the hooks, if possible. -``` - -### halt_request_test - -Test that we can halt a request if the hook returns an error. -Test that a hook can modify a request. - -```erlang -halt_request_test() -> - Node = hb_http_server:start_node( - #{ - on => - #{ - <<"request">> => - #{ - <<"device">> => #{ - <<"request">> => - fun(_, _, _) -> - {error, <<"Bad">>} - end - } - } - } - }), - {error, Res} = hb_http:get(Node, <<"/~meta@1.0/info">>, #{}), - ?assertEqual(<<"Bad">>, Res). -``` - -### modify_request_test - -Test that we can halt a request if the hook returns an error. -Test that a hook can modify a request. -Test that version information is available and returned correctly. - -```erlang -modify_request_test() -> - Node = hb_http_server:start_node( - #{ - on => - #{ - <<"request">> => - #{ - <<"device">> => #{ - <<"request">> => - fun(_, #{ <<"body">> := [M|Ms] }, _) -> - { - ok, - #{ - <<"body">> => - [ - M#{ - <<"added">> => - <<"value">> - } - | - Ms - ] - } - } - end - } - } - } - }), - {ok, Res} = hb_http:get(Node, <<"/added">>, #{}), - ?assertEqual(<<"value">>, Res). -``` - -### buildinfo_test - -Test that we can halt a request if the hook returns an error. -Test that a hook can modify a request. -Test that version information is available and returned correctly. - -```erlang -buildinfo_test() -> - Node = hb_http_server:start_node(#{}), - ?assertEqual( - {ok, <<"HyperBEAM">>}, - hb_http:get(Node, <<"/~meta@1.0/build/node">>, #{}) - ), - ?assertEqual( - {ok, ?HYPERBEAM_VERSION}, - hb_http:get(Node, <<"/~meta@1.0/build/version">>, #{}) - ), - ?assertEqual( - {ok, ?HB_BUILD_SOURCE}, - hb_http:get(Node, <<"/~meta@1.0/build/source">>, #{}) - ), - ?assertEqual( - {ok, ?HB_BUILD_SOURCE_SHORT}, - hb_http:get(Node, <<"/~meta@1.0/build/source-short">>, #{}) - ), - ?assertEqual( - {ok, ?HB_BUILD_TIME}, - hb_http:get(Node, <<"/~meta@1.0/build/build-time">>, #{}) - ). -``` - ---- - -*Generated from [dev_meta.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_meta.erl)* diff --git a/docs/book/src/dev_monitor.erl.md b/docs/book/src/dev_monitor.erl.md deleted file mode 100644 index 72bfdb2ab..000000000 --- a/docs/book/src/dev_monitor.erl.md +++ /dev/null @@ -1,75 +0,0 @@ -# dev_monitor - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_monitor.erl) - -A simple device that allows flexible monitoring of a process execution. -Adding a dev_monitor device to a process will cause the listed functions -to be called with the current process state during each pass. The monitor -functions must not mutate state. - ---- - -## Exported Functions - -- `add_monitor/2` -- `end_of_schedule/1` -- `execute/2` -- `init/3` -- `uses/0` - ---- - -### init - -```erlang -init(State, _, InitState) -> - {ok, State#{ <<"monitors">> => InitState }}. -``` - -### execute - -```erlang -execute(Message, State = #{ <<"pass">> := Pass, <<"passes">> := Passes }) when Pass == Passes -> - signal(State, {message, Message}); -``` - -### execute - -```erlang -execute(_, S) -> {ok, S}. -``` - -### add_monitor - -```erlang -add_monitor(Mon, State = #{ <<"monitors">> := Monitors }) -> - {ok, State#{ <<"monitors">> => [Mon | Monitors] }}. -``` - -### end_of_schedule - -```erlang -end_of_schedule(State) -> signal(State, end_of_schedule). -``` - -### signal - -```erlang -signal(State = #{ <<"monitors">> := StartingMonitors }, Signal) -> - RemainingMonitors = - lists:filter( - fun(Mon) -> - case Mon(State, Signal) of - done -> false; - _ -> true - end - end, - StartingMonitors - ), - ?event({remaining_monitors, length(RemainingMonitors)}), - {ok, State#{ <<"monitors">> := RemainingMonitors }}. -``` - ---- - -*Generated from [dev_monitor.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_monitor.erl)* diff --git a/docs/book/src/dev_multipass.erl.md b/docs/book/src/dev_multipass.erl.md deleted file mode 100644 index 9c6278798..000000000 --- a/docs/book/src/dev_multipass.erl.md +++ /dev/null @@ -1,77 +0,0 @@ -# dev_multipass - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_multipass.erl) - -A device that triggers repass events until a certain counter has been -reached. This is useful for certain types of stacks that need various -execution passes to be completed in sequence across devices. - ---- - -## Exported Functions - -- `info/1` - ---- - -### info - -A device that triggers repass events until a certain counter has been - -```erlang -info(_M1) -> - #{ - handler => fun handle/4 - }. -``` - -### handle - -Forward the keys function to the message device, handle all others - -```erlang -handle(<<"keys">>, M1, _M2, Opts) -> - dev_message:keys(M1, Opts); -``` - -### handle - -Forward the keys function to the message device, handle all others - -```erlang -handle(<<"set">>, M1, M2, Opts) -> - dev_message:set(M1, M2, Opts); -``` - -### handle - -Forward the keys function to the message device, handle all others - -```erlang -handle(_Key, M1, _M2, Opts) -> - Passes = hb_ao:get(<<"passes">>, {as, dev_message, M1}, 1, Opts), - Pass = hb_ao:get(<<"pass">>, {as, dev_message, M1}, 1, Opts), - case Pass < Passes of - true -> {pass, M1}; - false -> {ok, M1} - end. -``` - -### basic_multipass_test - -```erlang -basic_multipass_test() -> - Msg1 = - #{ - <<"device">> => <<"multipass@1.0">>, - <<"passes">> => 2, - <<"pass">> => 1 - }, - Msg2 = Msg1#{ <<"pass">> => 2 }, - ?assertMatch({pass, _}, hb_ao:resolve(Msg1, <<"Compute">>, #{})), - ?event(alive), -``` - ---- - -*Generated from [dev_multipass.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_multipass.erl)* diff --git a/docs/book/src/dev_name.erl.md b/docs/book/src/dev_name.erl.md deleted file mode 100644 index 5d540157e..000000000 --- a/docs/book/src/dev_name.erl.md +++ /dev/null @@ -1,200 +0,0 @@ -# dev_name - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_name.erl) - -A device for resolving names to their corresponding values, through the -use of a `resolver` interface. Each `resolver` is a message that can be -given a `key` and returns an associated value. The device will attempt to -match the key against each resolver in turn, and return the value of the -first resolver that matches. - ---- - -## Exported Functions - -- `info/1` - ---- - -### info - -A device for resolving names to their corresponding values, through the -Configure the `default` key to proxy to the `resolver/4` function. - -```erlang -info(_) -> - #{ - default => fun resolve/4, - excludes => [<<"keys">>, <<"set">>] - }. -``` - -### resolve - -Resolve a name to its corresponding value. The name is given by the key - -```erlang -resolve(Key, _, Req, Opts) -> - Resolvers = hb_opts:get(name_resolvers, [], Opts), - ?event({resolvers, Resolvers}), - case match_resolver(Key, Resolvers, Opts) of - {ok, Resolved} -> - case hb_util:atom(hb_ao:get(<<"load">>, Req, true, Opts)) of - false -> - {ok, Resolved}; - true -> - hb_cache:read(Resolved, Opts) - end; - not_found -> - not_found - end. -``` - -### match_resolver - -Find the first resolver that matches the key and return its value. - -```erlang -match_resolver(_Key, [], _Opts) -> - not_found; -``` - -### match_resolver - -Find the first resolver that matches the key and return its value. - -```erlang -match_resolver(Key, [Resolver | Resolvers], Opts) -> - case execute_resolver(Key, Resolver, Opts) of - {ok, Value} -> - ?event({resolver_found, {key, Key}, {value, Value}}), - {ok, Value}; - _ -> - match_resolver(Key, Resolvers, Opts) - end. -``` - -### execute_resolver - -Execute a resolver with the given key and return its value. - -```erlang -execute_resolver(Key, Resolver, Opts) -> - ?event({executing, {key, Key}, {resolver, Resolver}}), - hb_ao:resolve( - Resolver, - #{ <<"path">> => <<"lookup">>, <<"key">> => Key }, - Opts - ). -``` - -### no_resolvers_test - -```erlang -no_resolvers_test() -> - ?assertEqual( - not_found, - resolve(<<"hello">>, #{}, #{}, #{ only => local }) - ). -``` - -### message_lookup_device_resolver - -```erlang -message_lookup_device_resolver(Msg) -> - #{ - <<"device">> => #{ - <<"lookup">> => fun(_, Req, Opts) -> - Key = hb_ao:get(<<"key">>, Req, Opts), - ?event({test_resolver_executing, {key, Key}, {req, Req}, {msg, Msg}}), - case maps:get(Key, Msg, not_found) of - not_found -> - ?event({test_resolver_not_found, {key, Key}, {msg, Msg}}), - {error, not_found}; - Value -> - ?event({test_resolver_found, {key, Key}, {value, Value}}), - {ok, Value} - end - end - } - }. -``` - -### single_resolver_test - -```erlang -single_resolver_test() -> - ?assertEqual( - {ok, <<"world">>}, - resolve( - <<"hello">>, - #{}, - #{ <<"load">> => false }, - #{ - name_resolvers => [ - message_lookup_device_resolver( - #{<<"hello">> => <<"world">>} - ) - ] - } - ) - ). -``` - -### multiple_resolvers_test - -```erlang -multiple_resolvers_test() -> - ?assertEqual( - {ok, <<"bigger-world">>}, - resolve( - <<"hello">>, - #{}, - #{ <<"load">> => false }, - #{ - name_resolvers => [ - message_lookup_device_resolver( - #{<<"irrelevant">> => <<"world">>} - ), - message_lookup_device_resolver( - #{<<"hello">> => <<"bigger-world">>} - ) - ] - } - ) - ). -``` - -### load_and_execute_test - -Test that we can resolve messages from a name loaded with the device. - -```erlang -load_and_execute_test() -> - TestKey = <<"test-key", (hb_util:bin(erlang:system_time(millisecond)))/binary>>, - {ok, ID} = hb_cache:write( - #{ - <<"deep">> => <<"PING">> - }, - #{} - ), - ?assertEqual( - {ok, <<"PING">>}, - hb_ao:resolve_many( - [ - #{ <<"device">> => <<"name@1.0">> }, - #{ <<"path">> => TestKey }, - #{ <<"path">> => <<"deep">> } - ], - #{ - name_resolvers => [ - message_lookup_device_resolver(#{ <<"irrelevant">> => ID }), - message_lookup_device_resolver(#{ TestKey => ID }) - ] - } - ) -``` - ---- - -*Generated from [dev_name.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_name.erl)* diff --git a/docs/book/src/dev_node_process.erl.md b/docs/book/src/dev_node_process.erl.md deleted file mode 100644 index 34382b371..000000000 --- a/docs/book/src/dev_node_process.erl.md +++ /dev/null @@ -1,204 +0,0 @@ -# dev_node_process - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_node_process.erl) - -A device that implements the singleton pattern for processes specific -to an individual node. This device uses the `local-name@1.0` device to -register processes with names locally, persistenting them across reboots. -Definitions of singleton processes are expected to be found with their -names in the `node_processes` section of the node message. - ---- - -## Exported Functions - -- `info/1` - ---- - -### info - -A device that implements the singleton pattern for processes specific -Register a default handler for the device. Inherits `keys` and `set` - -```erlang -info(_Opts) -> - #{ - default => fun lookup/4, - excludes => [<<"set">>, <<"keys">>] - }. -``` - -### lookup - -Lookup a process by name. - -```erlang -lookup(Name, _Base, Req, Opts) -> - ?event(node_process, {lookup, {name, Name}}), - LookupRes = - hb_ao:resolve( - #{ <<"device">> => <<"local-name@1.0">> }, - #{ <<"path">> => <<"lookup">>, <<"key">> => Name, <<"load">> => true }, - Opts - ), - case LookupRes of - {ok, ProcessID} -> - hb_cache:read(ProcessID, Opts); - {error, not_found} -> - case hb_ao:get(<<"spawn">>, Req, true, Opts) of - true -> - spawn_register(Name, Opts); - false -> - {error, not_found} - end - end. -``` - -### spawn_register - -Spawn a new process according to the process definition found in the - -```erlang -spawn_register(Name, Opts) -> - case hb_opts:get(node_processes, #{}, Opts) of - #{ Name := BaseDef } -> - % We have found the base process definition. Augment it with the - % node's address as necessary, then commit to the result. -``` - -### augment_definition - -Augment the given process definition with the node's address. - -```erlang -augment_definition(BaseDef, Opts) -> - Address = - hb_util:human_id( - ar_wallet:to_address( - hb_opts:get(priv_wallet, no_viable_wallet, Opts) - ) - ), - SchedulersFromBase = - hb_util:binary_to_addresses( - hb_ao:get(<<"scheduler">>, BaseDef, <<>>, Opts) - ), - AuthoritiesFromBase = - hb_util:binary_to_addresses( - hb_ao:get(<<"authority">>, BaseDef, <<>>, Opts) - ), - Schedulers = (SchedulersFromBase -- [Address]) ++ [Address], - Authorities = (AuthoritiesFromBase -- [Address]) ++ [Address], - % Normalize the scheduler and authority lists to binary strings. -``` - -### generate_test_opts - -Helper function to generate a test environment and its options. - -```erlang -generate_test_opts() -> - {ok, Module} = file:read_file(<<"test/test.lua">>), - generate_test_opts(#{ - ?TEST_NAME => #{ - <<"device">> => <<"process@1.0">>, - <<"execution-device">> => <<"lua@5.3a">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"module">> => #{ - <<"content-type">> => <<"text/x-lua">>, - <<"body">> => Module - } - } - }). -``` - -### generate_test_opts - -```erlang -generate_test_opts(Defs) -> - #{ - node_processes => Defs, - priv_wallet => ar_wallet:new() - }. -``` - -### lookup_no_spawn_test - -```erlang -lookup_no_spawn_test() -> - Opts = generate_test_opts(), - ?assertEqual( - {error, not_found}, - lookup(<<"name1">>, #{}, #{}, Opts) - ). -``` - -### lookup_spawn_test - -```erlang -lookup_spawn_test() -> - Opts = generate_test_opts(), - Res1 = {_, Process1} = - hb_ao:resolve( - #{ <<"device">> => <<"node-process@1.0">> }, - ?TEST_NAME, - Opts - ), - ?assertMatch( - {ok, #{ <<"device">> := <<"process@1.0">> }}, - Res1 - ), - {ok, Process2} = hb_ao:resolve( - #{ <<"device">> => <<"node-process@1.0">> }, - ?TEST_NAME, - Opts - ), - ?assertEqual( - hb_cache:ensure_all_loaded(Process1, Opts), - hb_cache:ensure_all_loaded(Process2, Opts) - ). -``` - -### lookup_execute_test - -Test that a process can be spawned, executed upon, and its result retrieved. - -```erlang -lookup_execute_test() -> - Opts = generate_test_opts(), - Res1 = - hb_ao:resolve_many( - [ - #{ <<"device">> => <<"node-process@1.0">> }, - ?TEST_NAME, - #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - #{ - <<"path">> => <<"compute">>, - <<"test-key">> => <<"test-value">> - }, - Opts - ) - } - ], - Opts - ), - ?assertMatch( - {ok, #{ <<"slot">> := 1 }}, - Res1 - ), - ?assertMatch( - 42, - hb_ao:get( - << ?TEST_NAME/binary, "/now/results/output/body" >>, - #{ <<"device">> => <<"node-process@1.0">> }, - Opts - ) -``` - ---- - -*Generated from [dev_node_process.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_node_process.erl)* diff --git a/docs/book/src/dev_p4.erl.md b/docs/book/src/dev_p4.erl.md deleted file mode 100644 index 6964fac5a..000000000 --- a/docs/book/src/dev_p4.erl.md +++ /dev/null @@ -1,308 +0,0 @@ -# dev_p4 - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_p4.erl) - -The HyperBEAM core payment ledger. This module allows the operator to -specify another device that can act as a pricing mechanism for transactions -on the node, as well as orchestrating a payment ledger to calculate whether -the node should fulfil services for users. -The device requires the following node message settings in order to function: -- `p4_pricing-device`: The device that will estimate the cost of a request. -- `p4_ledger-device`: The device that will act as a payment ledger. -The pricing device should implement the following keys: -
-            `GET /estimate?type=pre|post&body=[...]&request=RequestMessage`
-            `GET /price?type=pre|post&body=[...]&request=RequestMessage`
-
-The `body` key is used to pass either the request or response messages to the -device. The `type` key is used to specify whether the inquiry is for a request -(pre) or a response (post) object. Requests carry lists of messages that will -be executed, while responses carry the results of the execution. The `price` -key may return `infinity` if the node will not serve a user under any -circumstances. Else, the value returned by the `price` key will be passed to -the ledger device as the `amount` key. -A ledger device should implement the following keys: -
-            `POST /credit?message=PaymentMessage&request=RequestMessage`
-            `POST /charge?amount=PriceMessage&request=RequestMessage`
-            `GET /balance?request=RequestMessage`
-
-The `type` key is optional and defaults to `pre`. If `type` is set to `post`, -the charge must be applied to the ledger, whereas the `pre` type is used to -check whether the charge would succeed before execution. - ---- - -## Exported Functions - -- `balance/3` -- `request/3` -- `response/3` - ---- - -### request - -The HyperBEAM core payment ledger. This module allows the operator to -Estimate the cost of a transaction and decide whether to proceed with - -```erlang -request(State, Raw, NodeMsg) -> - PricingDevice = hb_ao:get(<<"pricing-device">>, State, false, NodeMsg), - LedgerDevice = hb_ao:get(<<"ledger-device">>, State, false, NodeMsg), - Messages = hb_ao:get(<<"body">>, Raw, NodeMsg#{ hashpath => ignore }), - Request = hb_ao:get(<<"request">>, Raw, NodeMsg), - IsChargable = is_chargable_req(Request, NodeMsg), - ?event(payment, - {preprocess_with_devices, - PricingDevice, - LedgerDevice, - {chargable, IsChargable} - } - ), - case {IsChargable, (PricingDevice =/= false) and (LedgerDevice =/= false)} of - {false, _} -> - ?event(payment, non_chargable_route), - {ok, #{ <<"body">> => Messages }}; - {true, false} -> - ?event(payment, {p4_pre_pricing_response, {error, <<"infinity">>}}), - {ok, #{ <<"body">> => Messages }}; - {true, true} -> - PricingMsg = State#{ <<"device">> => PricingDevice }, - LedgerMsg = State#{ <<"device">> => LedgerDevice }, - PricingReq = #{ - <<"path">> => <<"estimate">>, - <<"request">> => Request, - <<"body">> => Messages - }, - ?event({p4_pricing_request, {devmsg, PricingMsg}, {req, PricingReq}}), - case hb_ao:resolve(PricingMsg, PricingReq, NodeMsg) of - {ok, <<"infinity">>} -> - % The device states that under no circumstances should we - % proceed with the request. -``` - -### response - -Postprocess the request after it has been fulfilled. - -```erlang -response(State, RawResponse, NodeMsg) -> - PricingDevice = hb_ao:get(<<"pricing-device">>, State, false, NodeMsg), - LedgerDevice = hb_ao:get(<<"ledger-device">>, State, false, NodeMsg), - Response = - hb_ao:get( - <<"body">>, - RawResponse, - NodeMsg#{ hashpath => ignore } - ), - Request = hb_ao:get(<<"request">>, RawResponse, NodeMsg), - ?event(payment, {post_processing_with_devices, PricingDevice, LedgerDevice}), - ?event({response_hook, {request, Request}, {response, Response}}), - case ((PricingDevice =/= false) and (LedgerDevice =/= false)) andalso - is_chargable_req(Request, NodeMsg) of - false -> - {ok, #{ <<"body">> => Response }}; - true -> - PricingMsg = State#{ <<"device">> => PricingDevice }, - LedgerMsg = State#{ <<"device">> => LedgerDevice }, - PricingReq = #{ - <<"path">> => <<"price">>, - <<"request">> => Request, - <<"body">> => Response - }, - ?event({post_pricing_request, PricingReq}), - PricingRes = - case hb_ao:resolve(PricingMsg, PricingReq, NodeMsg) of - {error, _Error} -> - % The pricing device is unable to give us a cost for - % the request, so we try to estimate it instead. -``` - -### balance - -Get the balance of a user in the ledger. - -```erlang -balance(_, Req, NodeMsg) -> - case dev_hook:find(<<"request">>, NodeMsg) of - [] -> - {error, <<"No request hook found.">>}; - [Handler] -> - LedgerDevice = - hb_ao:get(<<"ledger-device">>, Handler, false, NodeMsg), - LedgerMsg = Handler#{ <<"device">> => LedgerDevice }, - LedgerReq = #{ - <<"path">> => <<"balance">>, - <<"request">> => Req - }, - ?event({ledger_message, {ledger_msg, LedgerMsg}}), - case hb_ao:resolve(LedgerMsg, LedgerReq, NodeMsg) of - {ok, Balance} -> - {ok, Balance}; - {error, Error} -> - {error, Error} - end - end. -``` - -### is_chargable_req - -The node operator may elect to make certain routes non-chargable, using - -```erlang -is_chargable_req(Req, NodeMsg) -> - NonChargableRoutes = - hb_opts:get( - p4_non_chargable_routes, - ?DEFAULT_NON_CHARGABLE_ROUTES, - NodeMsg - ), - Matches = - dev_router:match( - #{ <<"routes">> => NonChargableRoutes }, - Req, - NodeMsg - ), - ?event( - { - is_chargable, - {non_chargable_routes, NonChargableRoutes}, - {req, Req}, - {matches, Matches} - } - ), - case Matches of - {error, no_matching_route} -> true; - _ -> false - end. -``` - -### test_opts - -```erlang -test_opts(Opts) -> - test_opts(Opts, <<"faff@1.0">>). -``` - -### test_opts - -```erlang -test_opts(Opts, PricingDev) -> - test_opts(Opts, PricingDev, <<"faff@1.0">>). -``` - -### test_opts - -```erlang -test_opts(Opts, PricingDev, LedgerDev) -> - ProcessorMsg = - #{ - <<"device">> => <<"p4@1.0">>, - <<"pricing-device">> => PricingDev, - <<"ledger-device">> => LedgerDev - }, - Opts#{ - on => #{ - <<"request">> => ProcessorMsg, - <<"response">> => ProcessorMsg - } - }. -``` - -### faff_test - -Simple test of p4's capabilities with the `faff@1.0` device. - -```erlang -faff_test() -> - GoodWallet = ar_wallet:new(), - BadWallet = ar_wallet:new(), - Node = hb_http_server:start_node( - test_opts( - #{ - faff_allow_list => - [hb_util:human_id(ar_wallet:to_address(GoodWallet))] - } - ) - ), - Req = #{ - <<"path">> => <<"/greeting">>, - <<"greeting">> => <<"Hello, world!">> - }, - GoodSignedReq = hb_message:commit(Req, GoodWallet), - ?event({req, GoodSignedReq}), - BadSignedReq = hb_message:commit(Req, BadWallet), - ?event({req, BadSignedReq}), - {ok, Res} = hb_http:get(Node, GoodSignedReq, #{}), - ?event(payment, {res, Res}), - ?assertEqual(<<"Hello, world!">>, Res), - ?assertMatch({error, _}, hb_http:get(Node, BadSignedReq, #{})). -``` - -### non_chargable_route_test - -Test that a non-chargable route is not charged for. - -```erlang -non_chargable_route_test() -> - Wallet = ar_wallet:new(), - Processor = - #{ - <<"device">> => <<"p4@1.0">>, - <<"ledger-device">> => <<"simple-pay@1.0">>, - <<"pricing-device">> => <<"simple-pay@1.0">> - }, - Node = hb_http_server:start_node( - #{ - p4_non_chargable_routes => - [ - #{ <<"template">> => <<"/~p4@1.0/balance">> }, - #{ <<"template">> => <<"/~meta@1.0/*/*">> } - ], - on => #{ - <<"request">> => Processor, - <<"response">> => Processor - }, - operator => hb:address() - } - ), - Req = #{ - <<"path">> => <<"/~p4@1.0/balance">> - }, - GoodSignedReq = hb_message:commit(Req, Wallet), - Res = hb_http:get(Node, GoodSignedReq, #{}), - ?event({res1, Res}), - ?assertMatch({ok, 0}, Res), - Req2 = #{ <<"path">> => <<"/~meta@1.0/info/operator">> }, - GoodSignedReq2 = hb_message:commit(Req2, Wallet), - Res2 = hb_http:get(Node, GoodSignedReq2, #{}), - ?event({res2, Res2}), - OperatorAddress = hb_util:human_id(hb:address()), - ?assertEqual({ok, OperatorAddress}, Res2), - Req3 = #{ <<"path">> => <<"/~scheduler@1.0">> }, - BadSignedReq3 = hb_message:commit(Req3, Wallet), - Res3 = hb_http:get(Node, BadSignedReq3, #{}), - ?event({res3, Res3}), - ?assertMatch({error, _}, Res3). -``` - -### hyper_token_ledger_test_ - -Ensure that Lua scripts can be used as pricing and ledger devices. Our - -```erlang -hyper_token_ledger_test_() -> - {timeout, 60, fun hyper_token_ledger/0}. -``` - -### hyper_token_ledger - -```erlang -hyper_token_ledger() -> - % Create the wallets necessary and read the files containing the scripts. -``` - ---- - -*Generated from [dev_p4.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_p4.erl)* diff --git a/docs/book/src/dev_patch.erl.md b/docs/book/src/dev_patch.erl.md deleted file mode 100644 index 7fc36cc2e..000000000 --- a/docs/book/src/dev_patch.erl.md +++ /dev/null @@ -1,288 +0,0 @@ -# dev_patch - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_patch.erl) - -A device that can be used to reorganize a message: Moving data from -one path inside it to another. This device's function runs in two modes: -1. When using `all` to move all data at the path given in `from` to the - path given in `to`. -2. When using `patches` to move all submessages in the source to the target, - _if_ they have a `method` key of `PATCH` or a `device` key of `patch@1.0`. -Source and destination paths may be prepended by `base:` or `req:` keys to -indicate that they are relative to either of the message's that the -computation is being performed on. -The search order for finding the source and destination keys is as follows, -where `X` is either `from` or `to`: -1. The `patch-X` key of the execution message. -2. The `X` key of the execution message. -3. The `patch-X` key of the request message. -4. The `X` key of the request message. -Additionally, this device implements the standard computation device keys, -allowing it to be used as an element of an execution stack pipeline, etc. - ---- - -## Exported Functions - -- `all/3` -- `compute/3` -- `init/3` -- `normalize/3` -- `patches/3` -- `snapshot/3` - ---- - -### init - -A device that can be used to reorganize a message: Moving data from -Necessary hooks for compliance with the `execution-device` standard. - -```erlang -init(Msg1, _Msg2, _Opts) -> {ok, Msg1}. -``` - -### normalize - -A device that can be used to reorganize a message: Moving data from -Necessary hooks for compliance with the `execution-device` standard. - -```erlang -normalize(Msg1, _Msg2, _Opts) -> {ok, Msg1}. -``` - -### snapshot - -A device that can be used to reorganize a message: Moving data from -Necessary hooks for compliance with the `execution-device` standard. - -```erlang -snapshot(Msg1, _Msg2, _Opts) -> {ok, Msg1}. -``` - -### compute - -A device that can be used to reorganize a message: Moving data from -Necessary hooks for compliance with the `execution-device` standard. -Get the value found at the `patch-from` key of the message, or the - -```erlang -compute(Msg1, Msg2, Opts) -> patches(Msg1, Msg2, Opts). -``` - -### all - -A device that can be used to reorganize a message: Moving data from -Necessary hooks for compliance with the `execution-device` standard. -Get the value found at the `patch-from` key of the message, or the - -```erlang -all(Msg1, Msg2, Opts) -> - move(all, Msg1, Msg2, Opts). -``` - -### patches - -Find relevant `PATCH` messages in the given source key of the execution - -```erlang -patches(Msg1, Msg2, Opts) -> - move(patches, Msg1, Msg2, Opts). -``` - -### move - -Unified executor for the `all` and `patches` modes. - -```erlang -move(Mode, Msg1, Msg2, Opts) -> - maybe - % Find the input paths. -``` - -### uninitialized_patch_test - -```erlang -uninitialized_patch_test() -> - InitState = #{ - <<"device">> => <<"patch@1.0">>, - <<"results">> => #{ - <<"outbox">> => #{ - <<"1">> => #{ - <<"method">> => <<"PATCH">>, - <<"prices">> => #{ - <<"apple">> => 100, - <<"banana">> => 200 - } - }, - <<"2">> => #{ - <<"method">> => <<"GET">>, - <<"prices">> => #{ - <<"apple">> => 1000 - } - } - } - }, - <<"other-message">> => <<"other-value">>, - <<"patch-to">> => <<"/">>, - <<"patch-from">> => <<"/results/outbox">> - }, - {ok, ResolvedState} = - hb_ao:resolve( - InitState, - <<"compute">>, - #{} - ), - ?event({resolved_state, ResolvedState}), - ?assertEqual( - 100, - hb_ao:get(<<"prices/apple">>, ResolvedState, #{}) - ), - ?assertMatch( - not_found, - hb_ao:get(<<"results/outbox/1">>, ResolvedState, #{}) - ). -``` - -### patch_to_submessage_test - -```erlang -patch_to_submessage_test() -> - InitState = #{ - <<"device">> => <<"patch@1.0">>, - <<"results">> => #{ - <<"outbox">> => #{ - <<"1">> => - hb_message:commit(#{ - <<"method">> => <<"PATCH">>, - <<"prices">> => #{ - <<"apple">> => 100, - <<"banana">> => 200 - } - }, - hb:wallet() - ) - } - }, - <<"state">> => #{ - <<"prices">> => #{ - <<"apple">> => 1000 - } - }, - <<"other-message">> => <<"other-value">>, - <<"patch-to">> => <<"/state">>, - <<"patch-from">> => <<"/results/outbox">> - }, - {ok, ResolvedState} = - hb_ao:resolve( - InitState, - <<"compute">>, - #{} - ), - ?event({resolved_state, ResolvedState}), - ?assertEqual( - 100, - hb_ao:get(<<"state/prices/apple">>, ResolvedState, #{}) - ). -``` - -### all_mode_test - -```erlang -all_mode_test() -> - InitState = #{ - <<"device">> => <<"patch@1.0">>, - <<"input">> => #{ - <<"zones">> => #{ - <<"1">> => #{ - <<"method">> => <<"PATCH">>, - <<"prices">> => #{ - <<"apple">> => 100, - <<"banana">> => 200 - } - }, - <<"2">> => #{ - <<"method">> => <<"GET">>, - <<"prices">> => #{ - <<"orange">> => 300 - } - } - } - }, - <<"state">> => #{ - <<"prices">> => #{ - <<"apple">> => 1000 - } - } - }, - {ok, ResolvedState} = - hb_ao:resolve( - InitState, - #{ - <<"path">> => <<"all">>, - <<"patch-to">> => <<"/state">>, - <<"patch-from">> => <<"/input/zones">> - }, - #{} - ), - ?event({resolved_state, ResolvedState}), - ?assertEqual( - 100, - hb_ao:get(<<"state/1/prices/apple">>, ResolvedState, #{}) - ), - ?assertEqual( - 300, - hb_ao:get(<<"state/2/prices/orange">>, ResolvedState, #{}) - ), - ?assertEqual( - not_found, - hb_ao:get(<<"input/zones">>, ResolvedState, #{}) - ). -``` - -### req_prefix_test - -```erlang -req_prefix_test() -> - BaseMsg = #{ - <<"device">> => <<"patch@1.0">>, - <<"state">> => #{ - <<"prices">> => #{ - <<"apple">> => 1000 - } - } - }, - ReqMsg = #{ - <<"path">> => <<"all">>, - <<"patch-from">> => <<"req:/results/outbox/1">>, - <<"patch-to">> => <<"/state">>, - <<"results">> => #{ - <<"outbox">> => #{ - <<"1">> => #{ - <<"method">> => <<"PATCH">>, - <<"prices">> => #{ - <<"apple">> => 100, - <<"banana">> => 200 - } - } - } - } - }, - {ok, ResolvedState} = hb_ao:resolve(BaseMsg, ReqMsg, #{}), - ?event({resolved_state, ResolvedState}), - ?assertEqual( - 100, - hb_ao:get(<<"state/prices/apple">>, ResolvedState, #{}) - ), - ?assertEqual( - 200, - hb_ao:get(<<"state/prices/banana">>, ResolvedState, #{}) - ), - ?assertEqual( - not_found, - hb_ao:get(<<"results/outbox/1">>, ResolvedState, #{}) -``` - ---- - -*Generated from [dev_patch.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_patch.erl)* diff --git a/docs/book/src/dev_poda.erl.md b/docs/book/src/dev_poda.erl.md deleted file mode 100644 index 27df7947a..000000000 --- a/docs/book/src/dev_poda.erl.md +++ /dev/null @@ -1,298 +0,0 @@ -# dev_poda - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_poda.erl) - -A simple exemplar decentralized proof of authority consensus algorithm -A simple exemplar decentralized proof of authority consensus algorithm -for AO processes. This device is split into two flows, spanning three -actions. -Execution flow: -1. Initialization. -2. Validation of incoming messages before execution. -Commitment flow: -1. Adding commitments to results, either on a CU or MU. - ---- - -## Exported Functions - -- `execute/3` -- `init/2` -- `is_user_signed/1` -- `push/3` - ---- - -### init - -A simple exemplar decentralized proof of authority consensus algorithm - -```erlang -init(S, Params) -> - {ok, S, extract_opts(Params)}. -``` - -### extract_opts - -```erlang -extract_opts(Params) -> - Authorities = - lists:filtermap( - fun({<<"authority">>, Addr}) -> {true, Addr}; - (_) -> false end, - Params - ), - {_, RawQuorum} = lists:keyfind(<<"quorum">>, 1, Params), - Quorum = binary_to_integer(RawQuorum), - ?event({poda_authorities, Authorities}), - #{ - authorities => Authorities, - quorum => Quorum - }. -``` - -### execute - -```erlang -execute(Outer = #tx { data = #{ <<"body">> := Msg } }, S = #{ <<"pass">> := 1 }, Opts) -> - case is_user_signed(Msg) of - true -> - {ok, S}; - false -> - case validate(Msg, Opts) of - true -> - ?event({poda_validated, ok}), - % Add the validations to the VFS. -``` - -### execute - -```erlang -execute(_M, S = #{ <<"pass">> := 3, <<"results">> := _Results }, _Opts) -> - {ok, S}; -``` - -### execute - -```erlang -execute(_M, S, _Opts) -> - {ok, S}. -``` - -### validate - -```erlang -validate(Msg, Opts) -> - validate_stage(1, Msg, Opts). -``` - -### validate_stage - -```erlang -validate_stage(1, Msg, Opts) when is_record(Msg, tx) -> - validate_stage(1, Msg#tx.data, Opts); -``` - -### validate_stage - -```erlang -validate_stage(1, #{ <<"commitments">> := Commitments, <<"body">> := Content }, Opts) -> - validate_stage(2, Commitments, Content, Opts); -``` - -### validate_stage - -```erlang -validate_stage(1, _M, _Opts) -> {false, <<"Required PoDA messages missing">>}. -``` - -### validate_stage - -```erlang -validate_stage(2, #tx { data = Commitments }, Content, Opts) -> - validate_stage(2, Commitments, Content, Opts); -``` - -### validate_stage - -```erlang -validate_stage(2, Commitments, Content, Opts) -> - % Ensure that all commitments are valid and signed by a - % trusted authority. -``` - -### validate_stage - -```erlang -validate_stage(3, Content, Commitments, Opts = #{ <<"quorum">> := Quorum }) -> - Validations = - lists:filter( - fun({_, Comm}) -> validate_commitment(Content, Comm, Opts) end, - hb_maps:to_list(Commitments, Opts) - ), - ?event({poda_validations, length(Validations)}), - case length(Validations) >= Quorum of - true -> - ?event({poda_quorum_reached, length(Validations)}), - true; - false -> {false, <<"Not enough validations">>} - end. -``` - -### validate_commitment - -```erlang -validate_commitment(Msg, Comm, Opts) -> - MsgID = hb_util:encode(ar_bundles:id(Msg, unsigned)), - AttSigner = hb_util:encode(ar_bundles:signer(Comm)), - ?event({poda_commitment, {signer, AttSigner, hb_maps:get(authorities, Opts, undefined, Opts)}, {msg_id, MsgID}}), - ValidSigner = lists:member(AttSigner, hb_maps:get(authorities, Opts, undefined, Opts)), - ValidSignature = ar_bundles:verify_item(Comm), - RelevantMsg = ar_bundles:id(Comm, unsigned) == MsgID orelse - (lists:keyfind(<<"commitment-for">>, 1, Comm#tx.tags) - == {<<"commitment-for">>, MsgID}) orelse - ar_bundles:member(ar_bundles:id(Msg, unsigned), Comm), - case ValidSigner and ValidSignature and RelevantMsg of - false -> - ?event({poda_commitment_invalid, - {commitment, ar_bundles:id(Comm, signed)}, - {signer, AttSigner}, - {valid_signer, ValidSigner}, - {valid_signature, ValidSignature}, - {relevant_msg, RelevantMsg}} - ), - false; - true -> true - end. -``` - -### return_error - -```erlang -return_error(S = #{ <<"wallet">> := Wallet }, Reason) -> - ?event({poda_return_error, Reason}), - ?debug_wait(10000), - {skip, S#{ - results => #{ - <<"/outbox">> => - ar_bundles:sign_item( - #tx{ - data = Reason, - tags = [{<<"error">>, <<"PoDA">>}] - }, - Wallet - ) - } - }}. -``` - -### is_user_signed - -Determines if a user committed - -```erlang -is_user_signed(#tx { data = #{ <<"body">> := Msg } }) -> - ?no_prod(use_real_commitment_detection), - lists:keyfind(<<"from-process">>, 1, Msg#tx.tags) == false; -``` - -### is_user_signed - -Determines if a user committed - -```erlang -is_user_signed(_) -> true. -%%% Commitment flow: Adding commitments to results. -``` - -### push - -Hook used by the MU pathway (currently) to add commitments to an - -```erlang -push(_Item, S = #{ <<"results">> := ResultsMsg }, Opts) -> - NewRes = commit_to_results(ResultsMsg, S, Opts), - {ok, S#{ <<"results">> => NewRes }}. -``` - -### commit_to_results - -Hook used by the MU pathway (currently) to add commitments to an - -```erlang -commit_to_results(Msg, S, Opts) -> - case is_map(Msg#tx.data) of - true -> - % Add commitments to the outbox and spawn items. -``` - -### add_commitments - -```erlang -add_commitments(NewMsg, S = #{ <<"assignment">> := Assignment, <<"store">> := _Store, <<"logger">> := _Logger, <<"wallet">> := Wallet }, Opts) -> - Process = find_process(NewMsg, S), - case is_record(Process, tx) andalso lists:member({<<"device">>, <<"PODA">>}, Process#tx.tags) of - true -> - #{ <<"authorities">> := InitAuthorities, <<"quorum">> := Quorum } = - extract_opts(Process#tx.tags), - ?event({poda_push, InitAuthorities, Quorum}), - % Aggregate validations from other nodes. -``` - -### pfiltermap - -Helper function for parallel execution of commitment - -```erlang -pfiltermap(Pred, List) -> - Parent = self(), - Pids = lists:map(fun(X) -> - spawn_monitor(fun() -> - Result = {X, Pred(X)}, - ?event({pfiltermap, sending_result, self()}), - Parent ! {self(), Result} - end) - end, List), - ?event({pfiltermap, waiting_for_results, Pids}), - [ - Res - || - {true, Res} <- - lists:map(fun({Pid, Ref}) -> - receive - {Pid, {_Item, Result}} -> - ?event({pfiltermap, received_result, Pid}), - Result; - % Handle crashes as filterable events - {'DOWN', Ref, process, Pid, _Reason} -> - ?event({pfiltermap, crashed, Pid}), - false; - Other -> - ?event({pfiltermap, unexpected_message, Other}), - false - end - end, Pids) - ]. -``` - -### find_process - -Find the process that this message is targeting, in order to - -```erlang -find_process(Item, #{ <<"logger">> := _Logger, <<"store">> := Store }) -> - case Item#tx.target of - X when X =/= <<>> -> - ?event({poda_find_process, hb_util:id(Item#tx.target)}), - {ok, Proc} = hb_cache:read(Store, hb_util:id(Item#tx.target)), - Proc; - _ -> - case lists:keyfind(<<"type">>, 1, Item#tx.tags) of - {<<"type">>, <<"process">>} -> Item; - _ -> process_not_specified - end -``` - ---- - -*Generated from [dev_poda.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_poda.erl)* diff --git a/docs/book/src/dev_process.erl.md b/docs/book/src/dev_process.erl.md deleted file mode 100644 index 76b2f0283..000000000 --- a/docs/book/src/dev_process.erl.md +++ /dev/null @@ -1,1221 +0,0 @@ -# dev_process - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process.erl) - -This module contains the device implementation of AO processes -in AO-Core. The core functionality of the module is in 'routing' requests -for different functionality (scheduling, computing, and pushing messages) -to the appropriate device. This is achieved by swapping out the device -of the process message with the necessary component in order to run the -execution, then swapping it back before returning. Computation is supported -as a stack of devices, customizable by the user, while the scheduling -device is (by default) a single device. -This allows the devices to share state as needed. Additionally, after each -computation step the device caches the result at a path relative to the -process definition itself, such that the process message's ID can act as an -immutable reference to the process's growing list of interactions. See -`dev_process_cache` for details. -The external API of the device is as follows: -
-GET /ID/Schedule:                Returns the messages in the schedule
-POST /ID/Schedule:               Adds a message to the schedule
-GET /ID/Compute/[IDorSlotNum]:   Returns the state of the process after 
-                                 applying a message
-GET /ID/Now:                     Returns the `/Results` key of the latest 
-                                 computed message
-
-An example process definition will look like this: -
-    Device: Process/1.0
-    Scheduler-Device: Scheduler/1.0
-    Execution-Device: Stack/1.0
-    Execution-Stack: "Scheduler/1.0", "Cron/1.0", "WASM/1.0", "PoDA/1.0"
-    Cron-Frequency: 10-Minutes
-    WASM-Image: WASMImageID
-    PoDA:
-        Device: PoDA/1.0
-        Authority: A
-        Authority: B
-        Authority: C
-        Quorum: 2
-
-Runtime options: - Cache-Frequency: The number of assignments that will be computed - before the full (restorable) state should be cached. - Cache-Keys: A list of the keys that should be cached for all - assignments, in addition to `/Results`. - ---- - -## Exported Functions - -- `as_process/2` -- `as/3` -- `compute/3` -- `dev_test_process/0` -- `do_test_restore/0` -- `ensure_process_key/2` -- `info/1` -- `init/0` -- `now/3` -- `process_id/3` -- `push/3` -- `schedule_aos_call/2` -- `schedule_aos_call/3` -- `schedule/3` -- `slot/3` -- `snapshot/3` -- `test_aos_process/0` -- `test_aos_process/1` -- `test_wasm_process/1` - ---- - -### info - -This module contains the device implementation of AO processes -When the info key is called, we should return the process exports. - -```erlang -info(_Msg1) -> - #{ - worker => fun dev_process_worker:server/3, - grouper => fun dev_process_worker:group/3, - await => fun dev_process_worker:await/5, - excludes => [ - <<"test">>, - <<"init">>, - <<"ping_ping_script">>, - <<"schedule_aos_call">>, - <<"test_aos_process">>, - <<"dev_test_process">>, - <<"test_wasm_process">> - ] - }. -``` - -### as - -Return the process state with the device swapped out for the device - -```erlang -as(RawMsg1, Msg2, Opts) -> - {ok, Msg1} = ensure_loaded(RawMsg1, Msg2, Opts), - Key = - hb_ao:get_first( - [ - {{as, <<"message@1.0">>, Msg2}, <<"as">>}, - {{as, <<"message@1.0">>, Msg2}, <<"as-device">>} - ], - <<"execution">>, - Opts - ), - {ok, - hb_util:deep_merge( - ensure_process_key(Msg1, Opts), - #{ - <<"device">> => - hb_maps:get( - << Key/binary, "-device">>, - Msg1, - default_device(Msg1, Key, Opts), - Opts - ), - % Configure input prefix for proper message routing within the - % device - <<"input-prefix">> => - case hb_maps:get(<<"input-prefix">>, Msg1, not_found, Opts) of - not_found -> <<"process">>; - Prefix -> Prefix - end, - % Configure output prefixes for result organization - <<"output-prefixes">> => - hb_maps:get( - <>, - Msg1, - undefined, % Undefined in set will be ignored. -``` - -### default_device - -Returns the default device for a given piece of functionality. Expects - -```erlang -default_device(Msg1, Key, Opts) -> - NormKey = hb_ao:normalize_key(Key), - case {NormKey, hb_util:deep_get(<<"process/variant">>, Msg1, Opts)} of - {<<"execution">>, <<"ao.TN.1">>} -> <<"genesis-wasm@1.0">>; - _ -> default_device_index(NormKey) - end. -``` - -### default_device_index - -```erlang -default_device_index(<<"scheduler">>) -> <<"scheduler@1.0">>; -``` - -### default_device_index - -```erlang -default_device_index(<<"execution">>) -> <<"genesis-wasm@1.0">>; -``` - -### default_device_index - -Wraps functions in the Scheduler device. - -```erlang -default_device_index(<<"push">>) -> <<"push@1.0">>. -``` - -### schedule - -Wraps functions in the Scheduler device. - -```erlang -schedule(Msg1, Msg2, Opts) -> - run_as(<<"scheduler">>, Msg1, Msg2, Opts). -``` - -### slot - -Wraps functions in the Scheduler device. - -```erlang -slot(Msg1, Msg2, Opts) -> - ?event({slot_called, {msg1, Msg1}, {msg2, Msg2}}), - run_as(<<"scheduler">>, Msg1, Msg2, Opts). -``` - -### next - -Wraps functions in the Scheduler device. - -```erlang -next(Msg1, _Msg2, Opts) -> - run_as(<<"scheduler">>, Msg1, next, Opts). -``` - -### snapshot - -Wraps functions in the Scheduler device. - -```erlang -snapshot(RawMsg1, _Msg2, Opts) -> - Msg1 = ensure_process_key(RawMsg1, Opts), - {ok, SnapshotMsg} = run_as( - <<"execution">>, - Msg1, - #{ <<"path">> => <<"snapshot">>, <<"mode">> => <<"Map">> }, - Opts#{ - cache_control => [<<"no-cache">>, <<"no-store">>], - hashpath => ignore - } - ), - ProcID = hb_message:id(Msg1, all, Opts), - Slot = hb_ao:get(<<"at-slot">>, {as, <<"message@1.0">>, Msg1}, Opts), - {ok, - hb_private:set( - SnapshotMsg#{ <<"cache-control">> => [<<"store">>] }, - #{ <<"priv/additional-hashpaths">> => - [ - hb_path:to_binary([ProcID, <<"snapshot">>, Slot]) - ] - }, - Opts - ) - }. -``` - -### process_id - -Returns the process ID of the current process. - -```erlang -process_id(Msg1, Msg2, Opts) -> - case hb_ao:get(<<"process">>, Msg1, Opts#{ hashpath => ignore }) of - not_found -> - process_id(ensure_process_key(Msg1, Opts), Msg2, Opts); - Process -> - hb_message:id( - Process, - hb_util:atom(maps:get(<<"commitments">>, Msg2, <<"all">>)), - Opts - ) - end. -``` - -### init - -Before computation begins, a boot phase is required. This phase - -```erlang -init(Msg1, Msg2, Opts) -> - ?event({init_called, {msg1, Msg1}, {msg2, Msg2}}), - {ok, Initialized} = - run_as(<<"execution">>, Msg1, #{ <<"path">> => init }, Opts), - { - ok, - hb_ao:set( - Initialized, - #{ - <<"initialized">> => <<"true">>, - <<"at-slot">> => -1 - }, - Opts - ) - }. -``` - -### compute - -Compute the result of an assignment applied to the process state. - -```erlang -compute(Msg1, Msg2, Opts) -> - ProcBase = ensure_process_key(Msg1, Opts), - ProcID = process_id(ProcBase, #{}, Opts), - TargetSlot = - hb_ao:get_first( - [ - {{as, <<"message@1.0">>, Msg2}, <<"compute">>}, - {{as, <<"message@1.0">>, Msg2}, <<"slot">>} - ], - Opts - ), - case TargetSlot of - not_found -> - % The slot is not set, so we need to serve the latest known state. -``` - -### compute_to_slot - -Continually get and apply the next assignment from the scheduler until - -```erlang -compute_to_slot(ProcID, Msg1, Msg2, TargetSlot, Opts) -> - CurrentSlot = hb_ao:get(<<"at-slot">>, Msg1, Opts#{ hashpath => ignore }), - ?event(compute_short, - {starting_compute, - {proc_id, ProcID}, - {current, CurrentSlot}, - {target, TargetSlot} - } - ), - case CurrentSlot of - CurrentSlot when CurrentSlot > TargetSlot -> - % The cache should already have the result, so we should never end up - % here. Depending on the type of process, 'rewinding' may require - % re-computing from a significantly earlier checkpoint, so for now - % we throw an error. -``` - -### compute_slot - -Compute a single slot for a process, given an initialized state. - -```erlang -compute_slot(ProcID, State, RawInputMsg, ReqMsg, Opts) -> - % Ensure that the next slot is the slot that we are expecting, just - % in case there is a scheduler device error. -``` - -### store_result - -Store the resulting state in the cache, potentially with the snapshot - -```erlang -store_result(ForceSnapshot, ProcID, Slot, Msg3, Msg2, Opts) -> - % Cache the `Snapshot' key as frequently as the node is configured to. -``` - -### should_snapshot - -Should we snapshot a new full state result? First, we check if the - -```erlang -should_snapshot(Slot, Msg3, Opts) -> - should_snapshot_slots(Slot, Opts) - orelse should_snapshot_time(Msg3, Opts). -``` - -### should_snapshot_slots - -Calculate if we should snapshot based on the number of slots. - -```erlang -should_snapshot_slots(Slot, Opts) -> - case hb_opts:get(process_snapshot_slots, ?DEFAULT_SNAPSHOT_SLOTS, Opts) of - Undef when (Undef == undefined) or (Undef == <<"false">>) -> - false; - RawSnapshotSlots -> - SnapshotSlots = hb_util:int(RawSnapshotSlots), - Slot rem SnapshotSlots == 0 - end. -``` - -### should_snapshot_time - -Calculate if we should snapshot based on the elapsed time since the last - -```erlang -should_snapshot_time(Msg3, Opts) -> - case hb_opts:get(process_snapshot_time, ?DEFAULT_SNAPSHOT_TIME, Opts) of - Undef when (Undef == undefined) or (Undef == <<"false">>) -> - false; - RawSecs -> - Secs = hb_util:int(RawSecs), - case hb_private:get(<<"last-snapshot">>, Msg3, undefined, Opts) of - undefined -> - ?event( - debug_interval, - {no_last_snapshot, - {interval, Secs}, - {msg, Msg3} - } - ), - true; - OldTimestamp -> - ?event( - debug_interval, - {calculating, - {secs, Secs}, - {timestamp, OldTimestamp}, - {now, os:system_time(second)} - } - ), - os:system_time(second) > OldTimestamp + hb_util:int(Secs) - end - end. -``` - -### now - -Returns the known state of the process at either the current slot, or - -```erlang -now(RawMsg1, Msg2, Opts) -> - Msg1 = ensure_process_key(RawMsg1, Opts), - ProcessID = process_id(Msg1, #{}, Opts), - case hb_opts:get(process_now_from_cache, false, Opts) of - false -> - {ok, CurrentSlot} = - hb_ao:resolve( - Msg1, - #{ <<"path">> => <<"slot/current">> }, - Opts - ), - ?event({now_called, {process, ProcessID}, {slot, CurrentSlot}}), - hb_ao:resolve( - Msg1, - #{ <<"path">> => <<"compute">>, <<"slot">> => CurrentSlot }, - Opts - ); - CacheParam -> - % We are serving the latest known state from the cache, rather - % than computing it. -``` - -### push - -Recursively push messages to the scheduler until we find a message -Ensure that the process message we have in memory is live and - -```erlang -push(Msg1, Msg2, Opts) -> - ProcBase = ensure_process_key(Msg1, Opts), - run_as(<<"push">>, ProcBase, Msg2, Opts). -``` - -### ensure_loaded - -Recursively push messages to the scheduler until we find a message -Ensure that the process message we have in memory is live and - -```erlang -ensure_loaded(Msg1, Msg2, Opts) -> - % Get the nonce we are currently on and the inbound nonce. -``` - -### without_snapshot - -Remove the `snapshot` key from a message and return it. -Run a message against Msg1, with the device being swapped out for - -```erlang -without_snapshot(Msg, Opts) -> - hb_maps:remove(<<"snapshot">>, Msg, Opts). -``` - -### run_as - -Remove the `snapshot` key from a message and return it. -Run a message against Msg1, with the device being swapped out for - -```erlang -run_as(Key, Msg1, Path, Opts) when not is_map(Path) -> - run_as(Key, Msg1, #{ <<"path">> => Path }, Opts); -``` - -### run_as - -Remove the `snapshot` key from a message and return it. -Run a message against Msg1, with the device being swapped out for - -```erlang -run_as(Key, Msg1, Msg2, Opts) -> - % Store the original device so we can restore it after execution - BaseDevice = hb_maps:get(<<"device">>, Msg1, not_found, Opts), - ?event({running_as, {key, {explicit, Key}}, {req, Msg2}}), - % Prepare the message with the specialized device configuration. -``` - -### as_process - -Change the message to for that has the device set as this module. - -```erlang -as_process(Msg1, Opts) -> - {ok, Proc} = dev_message:set(Msg1, #{ <<"device">> => <<"process@1.0">> }, Opts), - Proc. -``` - -### ensure_process_key - -Helper function to store a copy of the `process` key in the message. - -```erlang -ensure_process_key(Msg1, Opts) -> - case hb_maps:get(<<"process">>, Msg1, not_found, Opts) of - not_found -> - % If the message has lost its signers, we need to re-read it from - % the cache. This can happen if the message was 'cast' to a different - % device, leading the signers to be unset. -``` - -### init - -```erlang -init() -> - application:ensure_all_started(hb), - ok. -``` - -### test_base_process - -Generate a process message with a random number, and no - -```erlang -test_base_process() -> - test_base_process(#{}). -``` - -### test_base_process - -```erlang -test_base_process(Opts) -> - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - Address = hb_util:human_id(ar_wallet:to_address(Wallet)), - hb_message:commit(#{ - <<"device">> => <<"process@1.0">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"scheduler-location">> => hb_opts:get(scheduler, Address, Opts), - <<"type">> => <<"Process">>, - <<"test-random-seed">> => rand:uniform(1337) - }, Wallet). -``` - -### test_wasm_process - -```erlang -test_wasm_process(WASMImage) -> - test_wasm_process(WASMImage, #{}). -``` - -### test_wasm_process - -```erlang -test_wasm_process(WASMImage, Opts) -> - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - #{ <<"image">> := WASMImageID } = dev_wasm:cache_wasm_image(WASMImage, Opts), - hb_message:commit( - hb_maps:merge( - hb_message:uncommitted(test_base_process(Opts), Opts), - #{ - <<"execution-device">> => <<"stack@1.0">>, - <<"device-stack">> => [<<"wasm-64@1.0">>], - <<"image">> => WASMImageID - }, - Opts - ), - Opts#{ priv_wallet => Wallet} - ). -``` - -### test_aos_process - -Generate a process message with a random number, and the - -```erlang -test_aos_process() -> - test_aos_process(#{}). -``` - -### test_aos_process - -```erlang -test_aos_process(Opts) -> - test_aos_process(Opts, [ - <<"wasi@1.0">>, - <<"json-iface@1.0">>, - <<"wasm-64@1.0">>, - <<"multipass@1.0">> - ]). -``` - -### test_aos_process - -```erlang -test_aos_process(Opts, Stack) -> - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - Address = hb_util:human_id(ar_wallet:to_address(Wallet)), - WASMProc = test_wasm_process(<<"test/aos-2-pure-xs.wasm">>, Opts), - hb_message:commit( - hb_maps:merge( - hb_message:uncommitted(WASMProc, Opts), - #{ - <<"device-stack">> => Stack, - <<"execution-device">> => <<"stack@1.0">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"output-prefix">> => <<"wasm">>, - <<"patch-from">> => <<"/results/outbox">>, - <<"passes">> => 2, - <<"stack-keys">> => - [ - <<"init">>, - <<"compute">>, - <<"snapshot">>, - <<"normalize">> - ], - <<"scheduler">> => - hb_opts:get(scheduler, Address, Opts), - <<"authority">> => - hb_opts:get(authority, Address, Opts) - }, Opts), - Opts#{ priv_wallet => Wallet} - ). -``` - -### dev_test_process - -Generate a device that has a stack of two `dev_test`s for - -```erlang -dev_test_process() -> - Wallet = hb:wallet(), - hb_message:commit( - hb_maps:merge(test_base_process(), #{ - <<"execution-device">> => <<"stack@1.0">>, - <<"device-stack">> => [<<"test-device@1.0">>, <<"test-device@1.0">>] - }, #{}), - Wallet - ). -``` - -### schedule_test_message - -```erlang -schedule_test_message(Msg1, Text, Opts) -> - schedule_test_message(Msg1, Text, #{}, Opts). -``` - -### schedule_test_message - -```erlang -schedule_test_message(Msg1, Text, MsgBase, Opts) -> - Wallet = hb:wallet(), - UncommittedBase = hb_message:uncommitted(MsgBase, Opts), - Msg2 = - hb_message:commit(#{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - UncommittedBase#{ - <<"type">> => <<"Message">>, - <<"test-label">> => Text - }, - Opts#{ priv_wallet => Wallet} - ) - }, - Opts#{ priv_wallet => Wallet} - ), - {ok, _} = hb_ao:resolve(Msg1, Msg2, Opts). -``` - -### schedule_aos_call - -```erlang -schedule_aos_call(Msg1, Code) -> - schedule_aos_call(Msg1, Code, #{}). -``` - -### schedule_aos_call - -```erlang -schedule_aos_call(Msg1, Code, Opts) -> - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - ProcID = hb_message:id(Msg1, all), - Msg2 = - hb_message:commit( - #{ - <<"action">> => <<"Eval">>, - <<"data">> => Code, - <<"target">> => ProcID - }, - Opts#{priv_wallet => Wallet} - ), - schedule_test_message(Msg1, <<"TEST MSG">>, Msg2, Opts). -``` - -### schedule_wasm_call - -```erlang -schedule_wasm_call(Msg1, FuncName, Params) -> - schedule_wasm_call(Msg1, FuncName, Params, #{}). -``` - -### schedule_wasm_call - -```erlang -schedule_wasm_call(Msg1, FuncName, Params, Opts) -> - Wallet = hb:wallet(), - Msg2 = hb_message:commit(#{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - #{ - <<"type">> => <<"Message">>, - <<"function">> => FuncName, - <<"parameters">> => Params - }, - Opts#{ priv_wallet => Wallet} - ) - }, Opts#{ priv_wallet => Wallet}), - ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg2, Opts)). -``` - -### schedule_on_process_test_ - -```erlang -schedule_on_process_test_() -> - {timeout, 30, fun()-> - init(), - Msg1 = test_aos_process(), - schedule_test_message(Msg1, <<"TEST TEXT 1">>, #{}), - schedule_test_message(Msg1, <<"TEST TEXT 2">>, #{}), - ?event(messages_scheduled), - {ok, SchedulerRes} = - hb_ao:resolve(Msg1, #{ - <<"method">> => <<"GET">>, - <<"path">> => <<"schedule">> - }, #{}), - ?assertMatch( - <<"TEST TEXT 1">>, - hb_ao:get(<<"assignments/0/body/test-label">>, SchedulerRes) - ), - ?assertMatch( - <<"TEST TEXT 2">>, - hb_ao:get(<<"assignments/1/body/test-label">>, SchedulerRes) - ) - end}. -``` - -### get_scheduler_slot_test - -```erlang -get_scheduler_slot_test() -> - init(), - Msg1 = test_base_process(), - schedule_test_message(Msg1, <<"TEST TEXT 1">>, #{}), - schedule_test_message(Msg1, <<"TEST TEXT 2">>, #{}), - Msg2 = #{ - <<"path">> => <<"slot">>, - <<"method">> => <<"GET">> - }, - ?assertMatch( - {ok, #{ <<"current">> := CurrentSlot }} when CurrentSlot > 0, - hb_ao:resolve(Msg1, Msg2, #{}) - ). -``` - -### recursive_path_resolution_test - -```erlang -recursive_path_resolution_test() -> - init(), - Msg1 = test_base_process(), - schedule_test_message(Msg1, <<"TEST TEXT 1">>, #{}), - CurrentSlot = - hb_ao:resolve( - Msg1, - #{ <<"path">> => <<"slot/current">> }, - #{ <<"hashpath">> => ignore } - ), - ?event({resolved_current_slot, CurrentSlot}), - ?assertMatch( - CurrentSlot when CurrentSlot > 0, - CurrentSlot - ), - ok. -``` - -### test_device_compute_test - -```erlang -test_device_compute_test() -> - init(), - Msg1 = dev_test_process(), - schedule_test_message(Msg1, <<"TEST TEXT 1">>, #{}), - schedule_test_message(Msg1, <<"TEST TEXT 2">>, #{}), - ?assertMatch( - {ok, <<"TEST TEXT 2">>}, - hb_ao:resolve( - Msg1, - <<"schedule/assignments/1/body/test-label">>, - #{ <<"hashpath">> => ignore } - ) - ), - Msg2 = #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, - {ok, Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), - ?event({computed_message, {msg3, Msg3}}), - ?assertEqual(1, hb_ao:get(<<"results/assignment-slot">>, Msg3, #{})), - ?assertEqual([1,1,0,0], hb_ao:get(<<"already-seen">>, Msg3, #{})). -``` - -### wasm_compute_test - -```erlang -wasm_compute_test() -> - init(), - Msg1 = test_wasm_process(<<"test/test-64.wasm">>), - schedule_wasm_call(Msg1, <<"fac">>, [5.0]), - schedule_wasm_call(Msg1, <<"fac">>, [6.0]), - {ok, Msg3} = - hb_ao:resolve( - Msg1, - #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, - #{ <<"hashpath">> => ignore } - ), - ?event({computed_message, {msg3, Msg3}}), - ?assertEqual([120.0], hb_ao:get(<<"results/output">>, Msg3, #{})), - {ok, Msg4} = - hb_ao:resolve( - Msg1, - #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, - #{ <<"hashpath">> => ignore } - ), - ?event({computed_message, {msg4, Msg4}}), - ?assertEqual([720.0], hb_ao:get(<<"results/output">>, Msg4, #{})). -``` - -### wasm_compute_from_id_test - -```erlang -wasm_compute_from_id_test() -> - init(), - Opts = #{ cache_control => <<"always">> }, - Msg1 = test_wasm_process(<<"test/test-64.wasm">>), - schedule_wasm_call(Msg1, <<"fac">>, [5.0], Opts), - Msg1ID = hb_message:id(Msg1, all), - Msg2 = #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, - {ok, Msg3} = hb_ao:resolve(Msg1ID, Msg2, Opts), - ?event(process_compute, {computed_message, {msg3, Msg3}}), - ?assertEqual([120.0], hb_ao:get(<<"results/output">>, Msg3, Opts)). -``` - -### http_wasm_process_by_id_test - -```erlang -http_wasm_process_by_id_test() -> - rand:seed(default), - SchedWallet = ar_wallet:new(), - Node = hb_http_server:start_node(Opts = #{ - port => 10000 + rand:uniform(10000), - priv_wallet => SchedWallet, - cache_control => <<"always">>, - process_async_cache => false, - store => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-mainnet">> - } - }), - Wallet = ar_wallet:new(), - Proc = test_wasm_process(<<"test/test-64.wasm">>, Opts), - hb_cache:write(Proc, Opts), - ProcID = hb_util:human_id(hb_message:id(Proc, all)), - InitRes = - hb_http:post( - Node, - << "/schedule" >>, - Proc, - #{} - ), - ?event({schedule_proc_res, InitRes}), - ExecMsg = - hb_message:commit(#{ - <<"target">> => ProcID, - <<"type">> => <<"Message">>, - <<"function">> => <<"fac">>, - <<"parameters">> => [5.0] - }, - Wallet - ), - {ok, Msg3} = hb_http:post(Node, << ProcID/binary, "/schedule">>, ExecMsg, #{}), - ?event({schedule_msg_res, {msg3, Msg3}}), - {ok, Msg4} = - hb_http:get( - Node, - #{ - <<"path">> => << ProcID/binary, "/compute">>, - <<"slot">> => 1 - }, - #{} - ), - ?event({compute_msg_res, {msg4, Msg4}}), - ?assertEqual([120.0], hb_ao:get(<<"results/output">>, Msg4, #{})). -``` - -### aos_compute_test_ - -```erlang -aos_compute_test_() -> - {timeout, 30, fun() -> - init(), - Msg1 = test_aos_process(), - schedule_aos_call(Msg1, <<"return 1+1">>), - schedule_aos_call(Msg1, <<"return 2+2">>), - Msg2 = #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, - {ok, Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), - {ok, Res} = hb_ao:resolve(Msg3, <<"results">>, #{}), - ?event({computed_message, {msg3, Res}}), - {ok, Data} = hb_ao:resolve(Res, <<"data">>, #{}), - ?event({computed_data, Data}), - ?assertEqual(<<"2">>, Data), - Msg4 = #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, - {ok, Msg5} = hb_ao:resolve(Msg1, Msg4, #{}), - ?assertEqual(<<"4">>, hb_ao:get(<<"results/data">>, Msg5, #{})), - {ok, Msg5} - end}. -``` - -### aos_browsable_state_test_ - -```erlang -aos_browsable_state_test_() -> - {timeout, 30, fun() -> - init(), - Msg1 = test_aos_process(), - schedule_aos_call(Msg1, - <<"table.insert(ao.outbox.Messages, { target = ao.id, ", - "action = \"State\", ", - "data = { deep = 4, bool = true } })">> - ), - Msg2 = #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, - {ok, Msg3} = - hb_ao:resolve_many( - [Msg1, Msg2, <<"results">>, <<"outbox">>, 1, <<"data">>, <<"deep">>], - #{ cache_control => <<"always">> } - ), - ID = hb_message:id(Msg1), - ?event({computed_message, {id, {explicit, ID}}}), - ?assertEqual(4, Msg3) - end}. -``` - -### aos_state_access_via_http_test_ - -```erlang -aos_state_access_via_http_test_() -> - {timeout, 60, fun() -> - rand:seed(default), - Wallet = ar_wallet:new(), - Node = hb_http_server:start_node(Opts = #{ - port => 10000 + rand:uniform(10000), - priv_wallet => Wallet, - cache_control => <<"always">>, - store => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-mainnet">> - }, - force_signed_requests => true - }), - Proc = test_aos_process(Opts), - ProcID = hb_util:human_id(hb_message:id(Proc, all)), - {ok, _InitRes} = hb_http:post(Node, <<"/schedule">>, Proc, Opts), - Msg2 = hb_message:commit(#{ - <<"data-protocol">> => <<"ao">>, - <<"variant">> => <<"ao.N.1">>, - <<"type">> => <<"Message">>, - <<"action">> => <<"Eval">>, - <<"data">> => - <<"table.insert(ao.outbox.Messages, { target = ao.id,", - " action = \"State\", data = { ", - "[\"content-type\"] = \"text/html\", ", - "[\"body\"] = \"

Hello, world!

\"", - "}})">>, - <<"target">> => ProcID - }, Wallet), - {ok, Msg3} = hb_http:post(Node, << ProcID/binary, "/schedule">>, Msg2, Opts), - ?event({schedule_msg_res, {msg3, Msg3}}), - {ok, Msg4} = - hb_http:get( - Node, - #{ - <<"path">> => << ProcID/binary, "/compute/results/outbox/1/data" >>, - <<"slot">> => 1 - }, - Opts - ), - ?event({compute_msg_res, {msg4, Msg4}}), - ?event( - {try_yourself, - {explicit, - << - Node/binary, - "/", - ProcID/binary, - "/compute&slot=1/results/outbox/1/data" - >> - } - } - ), - ?assertMatch(#{ <<"body">> := <<"

Hello, world!

">> }, Msg4), - ok - end}. -``` - -### aos_state_patch_test_ - -```erlang -aos_state_patch_test_() -> - {timeout, 30, fun() -> - Wallet = hb:wallet(), - init(), - Msg1Raw = test_aos_process(#{}, [ - <<"wasi@1.0">>, - <<"json-iface@1.0">>, - <<"wasm-64@1.0">>, - <<"patch@1.0">>, - <<"multipass@1.0">> - ]), - {ok, Msg1} = hb_message:with_only_committed(Msg1Raw, #{}), - ProcID = hb_message:id(Msg1, all), - Msg2 = (hb_message:commit(#{ - <<"data-protocol">> => <<"ao">>, - <<"variant">> => <<"ao.N.1">>, - <<"target">> => ProcID, - <<"type">> => <<"Message">>, - <<"action">> => <<"Eval">>, - <<"data">> => - << - "table.insert(ao.outbox.Messages, " - "{ method = \"PATCH\", x = \"banana\" })" - >> - }, Wallet))#{ <<"path">> => <<"schedule">>, <<"method">> => <<"POST">> }, - {ok, _} = hb_ao:resolve(Msg1, Msg2, #{}), - Msg3 = #{ <<"path">> => <<"compute">>, <<"slot">> => 0 }, - {ok, Msg4} = hb_ao:resolve(Msg1, Msg3, #{}), - ?event({computed_message, {msg3, Msg4}}), - {ok, Data} = hb_ao:resolve(Msg4, <<"x">>, #{}), - ?event({computed_data, Data}), - ?assertEqual(<<"banana">>, Data) - end}. -``` - -### restore_test_ - -Manually test state restoration without using the cache. - -```erlang -restore_test_() -> {timeout, 30, fun do_test_restore/0}. -``` - -### do_test_restore - -Manually test state restoration without using the cache. - -```erlang -do_test_restore() -> - % Init the process and schedule 3 messages: - % 1. Set variables in Lua. -``` - -### now_results_test_ - -```erlang -now_results_test_() -> - {timeout, 30, fun() -> - init(), - Msg1 = test_aos_process(), - schedule_aos_call(Msg1, <<"return 1+1">>), - schedule_aos_call(Msg1, <<"return 2+2">>), - ?assertEqual({ok, <<"4">>}, hb_ao:resolve(Msg1, <<"now/results/data">>, #{})) - end}. -``` - -### prior_results_accessible_test_ - -```erlang -prior_results_accessible_test_() -> - {timeout, 30, fun() -> - init(), - Opts = #{ - process_async_cache => false - }, - Msg1 = test_aos_process(), - schedule_aos_call(Msg1, <<"return 1+1">>), - schedule_aos_call(Msg1, <<"return 2+2">>), - ?assertEqual( - {ok, <<"4">>}, - hb_ao:resolve(Msg1, <<"now/results/data">>, Opts) - ), - {ok, Results} = - hb_ao:resolve( - Msg1, - #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, - Opts - ), - ?assertMatch( - #{ <<"results">> := #{ <<"data">> := <<"4">> } }, - hb_cache:ensure_all_loaded(Results, Opts) - ) - end}. -``` - -### persistent_process_test - -```erlang -persistent_process_test() -> - {timeout, 30, fun() -> - init(), - Msg1 = test_aos_process(), - schedule_aos_call(Msg1, <<"X=1">>), - schedule_aos_call(Msg1, <<"return 2">>), - schedule_aos_call(Msg1, <<"return X">>), - T0 = hb:now(), - FirstSlotMsg2 = #{ - <<"path">> => <<"compute">>, - <<"slot">> => 0 - }, - ?assertMatch( - {ok, _}, - hb_ao:resolve(Msg1, FirstSlotMsg2, #{ spawn_worker => true }) - ), - T1 = hb:now(), - ThirdSlotMsg2 = #{ - <<"path">> => <<"compute">>, - <<"slot">> => 2 - }, - Res = hb_ao:resolve(Msg1, ThirdSlotMsg2, #{}), - ?event({computed_message, {msg3, Res}}), - ?assertMatch( - {ok, _}, - Res - ), - T2 = hb:now(), - ?event(benchmark, {runtimes, {first_run, T1 - T0}, {second_run, T2 - T1}}), - % The second resolve should be much faster than the first resolve, as the - % process is already running. -``` - -### simple_wasm_persistent_worker_benchmark_test - -```erlang -simple_wasm_persistent_worker_benchmark_test() -> - init(), - BenchTime = 1, - Msg1 = test_wasm_process(<<"test/test-64.wasm">>), - schedule_wasm_call(Msg1, <<"fac">>, [5.0]), - schedule_wasm_call(Msg1, <<"fac">>, [6.0]), - {ok, Initialized} = - hb_ao:resolve( - Msg1, - #{ <<"path">> => <<"compute">>, <<"slot">> => 1 }, - #{ spawn_worker => true, process_workers => true } - ), - Iterations = hb_test_utils:benchmark( - fun(Iteration) -> - schedule_wasm_call( - Initialized, - <<"fac">>, - [5.0] - ), - ?assertMatch( - {ok, _}, - hb_ao:resolve( - Initialized, - #{ <<"path">> => <<"compute">>, <<"slot">> => Iteration + 1 }, - #{} - ) - ) - end, - BenchTime - ), - ?event(benchmark, {scheduled, Iterations}), - hb_format:eunit_print( - "Scheduled and evaluated ~p simple wasm process messages in ~p s (~s msg/s)", - [Iterations, BenchTime, hb_util:human_int(Iterations / BenchTime)] - ), - ?assert(Iterations >= 2), - ok. -``` - -### aos_persistent_worker_benchmark_test_ - -```erlang -aos_persistent_worker_benchmark_test_() -> - {timeout, 30, fun() -> - BenchTime = 5, - init(), - Msg1 = test_aos_process(), - schedule_aos_call(Msg1, <<"X=1337">>), - FirstSlotMsg2 = #{ - <<"path">> => <<"compute">>, - <<"slot">> => 0 - }, - ?assertMatch( - {ok, _}, - hb_ao:resolve(Msg1, FirstSlotMsg2, #{ spawn_worker => true }) - ), - Iterations = hb_test_utils:benchmark( - fun(Iteration) -> - schedule_aos_call( - Msg1, - <<"return X + ", (integer_to_binary(Iteration))/binary>> - ), - ?assertMatch( - {ok, _}, - hb_ao:resolve( - Msg1, - #{ <<"path">> => <<"compute">>, <<"slot">> => Iteration }, - #{} - ) - ) - end, - BenchTime - ), - ?event(benchmark, {scheduled, Iterations}), - hb_format:eunit_print( - "Scheduled and evaluated ~p AOS process messages in ~p s (~s msg/s)", - [Iterations, BenchTime, hb_util:human_int(Iterations / BenchTime)] - ), - ?assert(Iterations >= 2), - ok -``` - ---- - -*Generated from [dev_process.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process.erl)* diff --git a/docs/book/src/dev_process_cache.erl.md b/docs/book/src/dev_process_cache.erl.md deleted file mode 100644 index 5b7170958..000000000 --- a/docs/book/src/dev_process_cache.erl.md +++ /dev/null @@ -1,204 +0,0 @@ -# dev_process_cache - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process_cache.erl) - -A wrapper around the hb_cache module that provides a more -convenient interface for reading the result of a process at a given slot or -message ID. - ---- - -## Exported Functions - -- `latest/2` -- `latest/3` -- `latest/4` -- `read/2` -- `read/3` -- `write/4` - ---- - -### read - -A wrapper around the hb_cache module that provides a more -Read the result of a process at a given slot. - -```erlang -read(ProcID, Opts) -> - hb_util:ok(latest(ProcID, Opts)). -``` - -### read - -```erlang -read(ProcID, SlotRef, Opts) -> - ?event({reading_computed_result, ProcID, SlotRef}), - Path = path(ProcID, SlotRef, Opts), - hb_cache:read(Path, Opts). -``` - -### write - -Write a process computation result to the cache. - -```erlang -write(ProcID, Slot, Msg, Opts) -> - % Write the item to the cache in the root of the store. -``` - -### path - -Calculate the path of a result, given a process ID and a slot. - -```erlang -path(ProcID, Ref, Opts) -> - path(ProcID, Ref, [], Opts). -``` - -### path - -```erlang -path(ProcID, Ref, PathSuffix, Opts) -> - Store = hb_opts:get(store, no_viable_store, Opts), - hb_store:path( - Store, - [ - <<"computed">>, - hb_util:human_id(ProcID) - ] ++ - case Ref of - Int when is_integer(Int) -> ["slot", integer_to_binary(Int)]; - root -> []; - slot_root -> ["slot"]; - _ -> [Ref] - end ++ PathSuffix - ). -``` - -### latest - -Retrieve the latest slot for a given process. Optionally state a limit - -```erlang -latest(ProcID, Opts) -> latest(ProcID, [], Opts). -``` - -### latest - -Retrieve the latest slot for a given process. Optionally state a limit - -```erlang -latest(ProcID, RequiredPath, Opts) -> - latest(ProcID, RequiredPath, undefined, Opts). -``` - -### latest - -```erlang -latest(ProcID, RawRequiredPath, Limit, Opts) -> - ?event( - {latest_called, - {proc_id, ProcID}, - {required_path, RawRequiredPath}, - {limit, Limit} - } - ), - % Convert the required path to a list of _binary_ keys. -``` - -### first_with_path - -Find the latest assignment with the requested path suffix. - -```erlang -first_with_path(ProcID, RequiredPath, Slots, Opts) -> - first_with_path( - ProcID, - RequiredPath, - Slots, - Opts, - hb_opts:get(store, no_viable_store, Opts) - ). -``` - -### first_with_path - -```erlang -first_with_path(_ProcID, _Required, [], _Opts, _Store) -> - not_found; -``` - -### first_with_path - -```erlang -first_with_path(ProcID, RequiredPath, [Slot | Rest], Opts, Store) -> - RawPath = path(ProcID, Slot, RequiredPath, Opts), - ResolvedPath = hb_store:resolve(Store, RawPath), - ?event({trying_slot, {slot, Slot}, {path, RawPath}, {resolved_path, ResolvedPath}}), - case hb_store:type(Store, ResolvedPath) of - not_found -> - first_with_path(ProcID, RequiredPath, Rest, Opts, Store); - _ -> - Slot - end. -``` - -### process_cache_suite_test_ - -```erlang -process_cache_suite_test_() -> - hb_store:generate_test_suite( - [ - {"write and read process outputs", fun test_write_and_read_output/1}, - {"find latest output (with path)", fun find_latest_outputs/1} - ], - [ - {Name, Opts} - || - {Name, Opts} <- hb_store:test_stores() - ] - ). -``` - -### test_write_and_read_output - -Test for writing multiple computed outputs, then getting them by - -```erlang -test_write_and_read_output(Opts) -> - Proc = hb_cache:test_signed( - #{ <<"test-item">> => hb_cache:test_unsigned(<<"test-body-data">>) }), - ProcID = hb_util:human_id(hb_ao:get(id, Proc)), - Item1 = hb_cache:test_signed(<<"Simple signed output #1">>), - Item2 = hb_cache:test_unsigned(<<"Simple unsigned output #2">>), - {ok, Path0} = write(ProcID, 0, Item1, Opts), - {ok, Path1} = write(ProcID, 1, Item2, Opts), - {ok, DirectReadItem1} = hb_cache:read(Path0, Opts), - ?assert(hb_message:match(Item1, DirectReadItem1)), - {ok, DirectReadItem2} = hb_cache:read(Path1, Opts), - ?assert(hb_message:match(Item2, DirectReadItem2)), - {ok, ReadItem1BySlotNum} = read(ProcID, 0, Opts), - ?assert(hb_message:match(Item1, ReadItem1BySlotNum)), - {ok, ReadItem2BySlotNum} = read(ProcID, 1, Opts), - ?assert(hb_message:match(Item2, ReadItem2BySlotNum)), - {ok, ReadItem1ByID} = - read(ProcID, hb_util:human_id(hb_ao:get(id, Item1)), Opts), - ?assert(hb_message:match(Item1, ReadItem1ByID)), - {ok, ReadItem2ByID} = - read(ProcID, hb_util:human_id(hb_message:id(Item2, all)), Opts), - ?assert(hb_message:match(Item2, ReadItem2ByID)). -``` - -### find_latest_outputs - -Test for retrieving the latest computed output for a process. - -```erlang -find_latest_outputs(Opts) -> - % Create test environment. -``` - ---- - -*Generated from [dev_process_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process_cache.erl)* diff --git a/docs/book/src/dev_process_worker.erl.md b/docs/book/src/dev_process_worker.erl.md deleted file mode 100644 index b02c5448f..000000000 --- a/docs/book/src/dev_process_worker.erl.md +++ /dev/null @@ -1,213 +0,0 @@ -# dev_process_worker - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process_worker.erl) - -A long-lived process worker that keeps state in memory between -calls. Implements the interface of `hb_ao` to receive and respond -to computation requests regarding a process as a singleton. - ---- - -## Exported Functions - -- `await/5` -- `group/3` -- `notify_compute/4` -- `server/3` -- `stop/1` - ---- - -### group - -A long-lived process worker that keeps state in memory between -Returns a group name for a request. The worker is responsible for all - -```erlang -group(Msg1, undefined, Opts) -> - hb_persistent:default_grouper(Msg1, undefined, Opts); -``` - -### group - -A long-lived process worker that keeps state in memory between -Returns a group name for a request. The worker is responsible for all - -```erlang -group(Msg1, Msg2, Opts) -> - case hb_opts:get(process_workers, false, Opts) of - false -> - hb_persistent:default_grouper(Msg1, Msg2, Opts); - true -> - case Msg2 of - undefined -> - hb_persistent:default_grouper(Msg1, undefined, Opts); - _ -> - case hb_path:matches(<<"compute">>, hb_path:hd(Msg2, Opts)) of - true -> - process_to_group_name(Msg1, Opts); - _ -> - hb_persistent:default_grouper(Msg1, Msg2, Opts) - end - end - end. -``` - -### process_to_group_name - -```erlang -process_to_group_name(Msg1, Opts) -> - Initialized = dev_process:ensure_process_key(Msg1, Opts), - ProcMsg = hb_ao:get(<<"process">>, Initialized, Opts#{ hashpath => ignore }), - ID = hb_message:id(ProcMsg, all), - ?event({process_to_group_name, {id, ID}, {msg1, Msg1}}), - hb_util:human_id(ID). -``` - -### server - -Spawn a new worker process. This is called after the end of the first - -```erlang -server(GroupName, Msg1, Opts) -> - ServerOpts = Opts#{ - await_inprogress => false, - spawn_worker => false, - process_workers => false - }, - % The maximum amount of time the worker will wait for a request before - % checking the cache for a snapshot. Default: 5 minutes. -``` - -### await - -Await a resolution from a worker executing the `process@1.0` device. - -```erlang -await(Worker, GroupName, Msg1, Msg2, Opts) -> - case hb_path:matches(<<"compute">>, hb_path:hd(Msg2, Opts)) of - false -> - hb_persistent:default_await(Worker, GroupName, Msg1, Msg2, Opts); - true -> - TargetSlot = hb_ao:get(<<"slot">>, Msg2, any, Opts), - ?event({awaiting_compute, - {worker, Worker}, - {group, GroupName}, - {target_slot, TargetSlot} - }), - receive - {resolved, _, GroupName, {slot, RecvdSlot}, Res} - when RecvdSlot == TargetSlot orelse TargetSlot == any -> - ?event(compute_debug, {notified_of_resolution, - {target, TargetSlot}, - {group, GroupName} - }), - Res; - {resolved, _, GroupName, {slot, RecvdSlot}, _Res} -> - ?event(compute_debug, {waiting_again, - {target, TargetSlot}, - {recvd, RecvdSlot}, - {worker, Worker}, - {group, GroupName} - }), - await(Worker, GroupName, Msg1, Msg2, Opts); - {'DOWN', _R, process, Worker, _Reason} -> - ?event(compute_debug, - {leader_died, - {group, GroupName}, - {leader, Worker}, - {target, TargetSlot} - } - ), - {error, leader_died} - end - end. -``` - -### notify_compute - -Notify any waiters for a specific slot of the computed results. - -```erlang -notify_compute(GroupName, SlotToNotify, Msg3, Opts) -> - notify_compute(GroupName, SlotToNotify, Msg3, Opts, 0). -``` - -### notify_compute - -```erlang -notify_compute(GroupName, SlotToNotify, Msg3, Opts, Count) -> - ?event({notifying_of_computed_slot, {group, GroupName}, {slot, SlotToNotify}}), - receive - {resolve, Listener, GroupName, #{ <<"slot">> := SlotToNotify }, _ListenerOpts} -> - send_notification(Listener, GroupName, SlotToNotify, Msg3), - notify_compute(GroupName, SlotToNotify, Msg3, Opts, Count + 1); - {resolve, Listener, GroupName, Msg, _ListenerOpts} - when is_map(Msg) andalso not is_map_key(<<"slot">>, Msg) -> - send_notification(Listener, GroupName, SlotToNotify, Msg3), - notify_compute(GroupName, SlotToNotify, Msg3, Opts, Count + 1) - after 0 -> - ?event(worker_short, - {finished_notifying, - {group, GroupName}, - {slot, SlotToNotify}, - {listeners, Count} - } - ) - end. -``` - -### send_notification - -```erlang -send_notification(Listener, GroupName, SlotToNotify, Msg3) -> - ?event({sending_notification, {group, GroupName}, {slot, SlotToNotify}}), - Listener ! {resolved, self(), GroupName, {slot, SlotToNotify}, Msg3}. -``` - -### stop - -Stop a worker process. - -```erlang -stop(Worker) -> - exit(Worker, normal). -``` - -### test_init - -```erlang -test_init() -> - application:ensure_all_started(hb), - ok. -``` - -### info_test - -```erlang -info_test() -> - test_init(), - M1 = dev_process:test_wasm_process(<<"test/aos-2-pure-xs.wasm">>), - Res = hb_ao:info(M1, #{}), - ?assertEqual(fun dev_process_worker:group/3, hb_maps:get(grouper, Res, undefined, #{})). -``` - -### grouper_test - -```erlang -grouper_test() -> - test_init(), - M1 = dev_process:test_aos_process(), - M2 = #{ <<"path">> => <<"compute">>, <<"v">> => 1 }, - M3 = #{ <<"path">> => <<"compute">>, <<"v">> => 2 }, - M4 = #{ <<"path">> => <<"not-compute">>, <<"v">> => 3 }, - G1 = hb_persistent:group(M1, M2, #{ process_workers => true }), - G2 = hb_persistent:group(M1, M3, #{ process_workers => true }), - G3 = hb_persistent:group(M1, M4, #{ process_workers => true }), - ?event({group_samples, {g1, G1}, {g2, G2}, {g3, G3}}), - ?assertEqual(G1, G2), -``` - ---- - -*Generated from [dev_process_worker.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_process_worker.erl)* diff --git a/docs/book/src/dev_profile.erl.md b/docs/book/src/dev_profile.erl.md deleted file mode 100644 index bffb37a8f..000000000 --- a/docs/book/src/dev_profile.erl.md +++ /dev/null @@ -1,414 +0,0 @@ -# dev_profile - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_profile.erl) - -A module for running different profiling tools upon HyperBEAM executions. -This device allows a variety of profiling tools to be used and for their -outputs to be returned as messages, or displayed locally on the console. -When called from an AO-Core request, the path at the given key is resolved. -If the `eval` function is instead directly invoked via Erlang, the first -argument may be a function to profile instead. - ---- - -## Exported Functions - -- `eval/1` -- `eval/2` -- `eval/3` -- `eval/4` -- `info/1` - ---- - -### info - -A module for running different profiling tools upon HyperBEAM executions. -Default to the `eval` function. - -```erlang -info(_) -> - #{ - excludes => [<<"keys">>, <<"set">>], - default => fun eval/4 - }. -``` - -### eval - -Invoke a profiling tool on a function or an AO-Core resolution. If a - -```erlang -eval(Fun) -> eval(Fun, #{}). -``` - -### eval - -Invoke a profiling tool on a function or an AO-Core resolution. If a - -```erlang -eval(Fun, Opts) -> eval(Fun, #{}, Opts). -``` - -### eval - -Invoke a profiling tool on a function or an AO-Core resolution. If a - -```erlang -eval(Fun, Req, Opts) when is_function(Fun) -> - do_eval( - Fun, - case return_mode(Req, Opts, undefined) of - undefined -> Req#{ <<"return-mode">> => <<"open">> }; - _ -> Req - end, - Opts - ); -``` - -### eval - -Invoke a profiling tool on a function or an AO-Core resolution. If a - -```erlang -eval(Base, Request, Opts) -> - eval(<<"eval">>, Base, Request, Opts). -``` - -### eval - -Invoke a profiling tool on a function or an AO-Core resolution. If a - -```erlang -eval(PathKey, Base, Req, Opts) when not is_function(Base) -> - case hb_ao:get(PathKey, Req, undefined, Opts) of - undefined -> - { - error, - << - "Path key `", - (hb_util:bin(PathKey))/binary, - "` not found in request." - >> - }; - Path -> - do_eval( - fun() -> hb_ao:resolve(Req#{ <<"path">> => Path }, Opts) end, - Req, - Opts - ) - end. -``` - -### do_eval - -```erlang -do_eval(Fun, Req, Opts) -> - % Validate the request and options, then invoke the engine-specific profile - % function. We match the user-requested engine against the supported engines - % on the node. Each engine takes three arguments: - % 1. The function to profile. -``` - -### find_profiling_config - -Find the profiling options. The supported options for `profiling` in the - -```erlang -find_profiling_config(Opts) -> - case hb_opts:get(profiling, not_found, Opts) of - not_found -> - case hb_opts:get(mode, prod, Opts) of - prod -> false; - _ -> hb_features:test() - end; - EnableProfiling -> EnableProfiling - end. -``` - -### validate_enabled - -Validate that profiling is enabled. - -```erlang -validate_enabled(Opts) -> - case find_profiling_config(Opts) of - false -> {validation_error, disabled}; - _ -> true - end. -``` - -### validate_return_mode - -Validate that the request return mode is acceptable. We only allow the - -```erlang -validate_return_mode(Req, Opts) -> - case return_mode(Req, Opts) of - <<"open">> -> hb_opts:get(mode, prod, Opts) == debug; - _ -> true - end. -``` - -### validate_signer - -Validate that the request is from a valid signer, if set by the node - -```erlang -validate_signer(Req, Opts) -> - case find_profiling_config(Opts) of - ValidSigners when is_list(ValidSigners) -> - lists:any( - fun(Signer) -> lists:member(Signer, ValidSigners) end, - hb_message:signers(Req, Opts) - ); - EnableProfiling -> EnableProfiling - end orelse {validation_error, invalid_signer}. -``` - -### engine - -Return the profiling function for the given engine. - -```erlang -engine(<<"eflame">>) -> {ok, fun eflame_profile/3}; -``` - -### engine - -Return the profiling function for the given engine. - -```erlang -engine(<<"eprof">>) -> {ok, fun eprof_profile/3}; -``` - -### engine - -Return the profiling function for the given engine. - -```erlang -engine(<<"event">>) -> {ok, fun event_profile/3}; -``` - -### engine - -Return the profiling function for the given engine. - -```erlang -engine(default) -> {ok, default()}; -``` - -### engine - -Return the profiling function for the given engine. -Return the default profiling engine to use. `eflame` if preferred if - -```erlang -engine(Unknown) -> {unknown_engine, Unknown}. -``` - -### default - -Return the profiling function for the given engine. -Return the default profiling engine to use. `eflame` if preferred if - -```erlang -default() -> - case hb_features:eflame() of - true -> fun eflame_profile/3; - false -> fun eprof_profile/3 - end. -``` - -### eflame_profile - -Profile a function using the `eflame` tool. This tool is only available - -```erlang -eflame_profile(Fun, Req, Opts) -> - File = temp_file(), - Res = eflame:apply(normal, File, Fun, []), - MergeStacks = hb_maps:get(<<"mode">>, Req, <<"merge">>, Opts), - EflameDir = code:lib_dir(eflame), - % Get the name of the function to profile. If the path in the request is - % set, attempt to find it. If that is not found, we use the bare path. -``` - -### eflame_profile - -```erlang -eflame_profile(_Fun, _Req, _Opts) -> - {error, <<"eflame is not enabled.">>}. --endif. -``` - -### eprof_profile - -Profile a function using the `eprof` tool. - -```erlang -eprof_profile(Fun, Req, Opts) -> - File = temp_file(), - % Attempt to profile the function, stopping the profiler afterwards. -``` - -### event_profile - -Profile using HyperBEAM's events. - -```erlang -event_profile(Fun, Req, Opts) -> - Start = hb_event:counters(), - Fun(), - End = hb_event:counters(), - Diff = hb_message:diff(Start, End, Opts), - case return_mode(Req, Opts) of - <<"message">> -> - {ok, Diff}; - <<"console">> -> - hb_format:print(Diff), - {ok, Diff} - end. -``` - -### return_mode - -Get the return mode of a profiler run. The run mode is set to `console` - -```erlang -return_mode(Req, Opts) -> - return_mode(Req, Opts, <<"message">>). -``` - -### return_mode - -Get the return mode of a profiler run. The run mode is set to `console` -Returns a temporary filename for use in a profiling run. - -```erlang -return_mode(Req, Opts, Default) -> - hb_ao:get(<<"return-mode">>, Req, Default, Opts). -``` - -### temp_file - -Get the return mode of a profiler run. The run mode is set to `console` -Returns a temporary filename for use in a profiling run. - -```erlang -temp_file() -> temp_file(<<"out">>). -``` - -### temp_file - -Get the return mode of a profiler run. The run mode is set to `console` -Returns a temporary filename for use in a profiling run. - -```erlang -temp_file(Ext) -> - << - "profile-", - (integer_to_binary(os:system_time(microsecond)))/binary, - ".", - Ext/binary - >>. -``` - -### eprof_fun_test - -```erlang -eprof_fun_test() -> test_engine(function, <<"eprof">>). -``` - -### eprof_resolution_test - -```erlang -eprof_resolution_test() -> test_engine(resolution, <<"eprof">>). --ifdef(ENABLE_EFLAME). -``` - -### eflame_fun_test - -```erlang -eflame_fun_test() -> test_engine(function, <<"eflame">>). -``` - -### eflame_resolution_test - -```erlang -eflame_resolution_test() -> test_engine(resolution, <<"eflame">>). --endif. -``` - -### test_engine - -Run a test and validate the output for a given engine. - -```erlang -test_engine(Type, Engine) -> - validate_profiler_output(Engine, test_profiler_exec(Type, Engine)). -``` - -### test_profiler_exec - -Invoke an engine in either a function (as called from Erlang) or - -```erlang -test_profiler_exec(function, Engine) -> - eval( - fun() -> dev_meta:build(#{}, #{}, #{}) end, - #{ <<"engine">> => Engine, <<"return-mode">> => <<"message">> }, - #{} - ); -``` - -### test_profiler_exec - -Invoke an engine in either a function (as called from Erlang) or - -```erlang -test_profiler_exec(resolution, Engine) -> - hb_ao:resolve( - #{ - <<"path">> => <<"/~profile@1.0/run?run=/~meta@1.0/build">>, - <<"engine">> => Engine, <<"return-mode">> => <<"message">> }, - #{} - ). -``` - -### validate_profiler_output - -Verify the expected type of output from a profiler. - -```erlang -validate_profiler_output(<<"eprof">>, Res) -> - ?assertMatch( - {ok, - #{ - <<"content-type">> := <<"text/plain">>, - <<"body">> := Body - } - } when byte_size(Body) > 100, - Res - ); -``` - -### validate_profiler_output - -Verify the expected type of output from a profiler. - -```erlang -validate_profiler_output(<<"eflame">>, Res) -> - ?assertMatch( - {ok, - #{ - <<"content-type">> := <<"image/svg+xml">>, - <<"body">> := Body - } - } when byte_size(Body) > 100, - Res -``` - ---- - -*Generated from [dev_profile.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_profile.erl)* diff --git a/docs/book/src/dev_push.erl.md b/docs/book/src/dev_push.erl.md deleted file mode 100644 index d2a4bbd4d..000000000 --- a/docs/book/src/dev_push.erl.md +++ /dev/null @@ -1,911 +0,0 @@ -# dev_push - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_push.erl) - -`push@1.0` takes a message or slot number, evaluates it, and recursively -pushes the resulting messages to other processes. The `push`ing mechanism -continues until the there are no remaining messages to push. - ---- - -## Exported Functions - -- `push/3` - ---- - -### push - -`push@1.0` takes a message or slot number, evaluates it, and recursively -Push either a message or an assigned slot number. If a `Process` is - -```erlang -push(Base, Req, Opts) -> - Process = dev_process:as_process(Base, Opts), - ?event(push, {push_base, {base, Process}, {req, Req}}, Opts), - case hb_ao:get(<<"slot">>, {as, <<"message@1.0">>, Req}, no_slot, Opts) of - no_slot -> - case schedule_initial_message(Process, Req, Opts) of - {ok, Assignment} -> - case find_type(hb_ao:get(<<"body">>, Assignment, Opts), Opts) of - <<"Process">> -> - ?event(push, - {initializing_process, - {base, Process}, - {assignment, Assignment}}, - Opts - ), - {ok, Assignment}; - _ -> - ?event(push, - {pushing_message, - {base, Process}, - {assignment, Assignment} - }, - Opts - ), - push_with_mode(Process, Assignment, Opts) - end; - {error, Res} -> {error, Res} - end; - _ -> push_with_mode(Process, Req, Opts) - end. -``` - -### push_with_mode - -```erlang -push_with_mode(Process, Req, Opts) -> - Mode = is_async(Process, Req, Opts), - case Mode of - <<"sync">> -> - do_push(Process, Req, Opts); - <<"async">> -> - spawn(fun() -> do_push(Process, Req, Opts) end) - end. -``` - -### is_async - -Determine if the push is asynchronous. - -```erlang -is_async(Process, Req, Opts) -> - hb_ao:get_first( - [ - {Req, <<"push-mode">>}, - {Process, <<"push-mode">>}, - {Process, <<"process/push-mode">>} - ], - <<"sync">>, - Opts - ). -``` - -### do_push - -Push a message or slot number, including its downstream results. - -```erlang -do_push(PrimaryProcess, Assignment, Opts) -> - Slot = hb_ao:get(<<"slot">>, Assignment, Opts), - ID = dev_process:process_id(PrimaryProcess, #{}, Opts), - UncommittedID = - dev_process:process_id( - PrimaryProcess, - #{ <<"commitments">> => <<"none">> }, - Opts - ), - BaseID = calculate_base_id(PrimaryProcess, Opts), - ?event(debug, - {push_computing_outbox, - {process_id, ID}, - {base_id, BaseID}, - {slot, Slot} - } - ), - ?event(push, {push_computing_outbox, {process_id, ID}, {slot, Slot}}), - {Status, Result} = hb_ao:resolve( - {as, <<"process@1.0">>, PrimaryProcess}, - #{ <<"path">> => <<"compute/results">>, <<"slot">> => Slot }, - Opts#{ hashpath => ignore } - ), - % Determine if we should include the full compute result in our response. -``` - -### maybe_evaluate_message - -If the outbox message has a path we interpret it as a request to perform - -```erlang -maybe_evaluate_message(Message, Opts) -> - case hb_ao:get(<<"resolve">>, Message, Opts) of - not_found -> - {ok, Message}; - ResolvePath -> - ReqMsg = - maps:without( - [<<"target">>], - Message - ), - ResolveOpts = Opts#{ force_message => true }, - case hb_ao:resolve(ReqMsg#{ <<"path">> => ResolvePath }, ResolveOpts) of - {ok, EvalRes} -> - { - ok, - EvalRes#{ - <<"target">> => - hb_ao:get( - <<"target">>, - Message, - Opts - ) - } - }; - Err -> Err - end - end. -``` - -### push_result_message - -Push a downstream message result. The `Origin` map contains information - -```erlang -push_result_message(TargetProcess, MsgToPush, Origin, Opts) -> - NormMsgToPush = hb_util:lower_case_key_map(MsgToPush, Opts), - case hb_ao:get(<<"target">>, NormMsgToPush, undefined, Opts) of - undefined -> - ?event(push, - {skip_no_target, {msg, MsgToPush}, {origin, Origin}}, - Opts - ), - #{}; - TargetID -> - ?event(push, - {pushing_child, - {target, TargetID}, - {msg, MsgToPush}, - {origin, Origin} - }, - Opts - ), - case schedule_result(TargetProcess, MsgToPush, Origin, Opts) of - {ok, Assignment} -> - % Analyze the result of the message push. -``` - -### normalize_message - -Augment the message with from-* keys, if it doesn't already have them. - -```erlang -normalize_message(MsgToPush, Opts) -> - hb_ao:set( - MsgToPush, - #{ - <<"target">> => target_process(MsgToPush, Opts) - }, - Opts#{ hashpath => ignore } - ). -``` - -### target_process - -Find the target process ID for a message to push. - -```erlang -target_process(MsgToPush, Opts) -> - case hb_ao:get(<<"target">>, MsgToPush, Opts) of - not_found -> undefined; - RawTarget -> extract(target, RawTarget) - end. -``` - -### extract - -Return either the `target` or the `hint`. - -```erlang -extract(hint, Raw) -> - {_, Hint} = split_target(Raw), - Hint; -``` - -### extract - -Return either the `target` or the `hint`. - -```erlang -extract(target, Raw) -> - {Target, _} = split_target(Raw), - Target. -``` - -### split_target - -Split the target into the process ID and the optional query string. - -```erlang -split_target(RawTarget) -> - case binary:split(RawTarget, [<<"?">>, <<"&">>]) of - [Target, QStr] -> {Target, QStr}; - _ -> {RawTarget, <<>>} - end. -``` - -### calculate_base_id - -Calculate the base ID for a process. The base ID is not just the - -```erlang -calculate_base_id(GivenProcess, Opts) -> - Process = - case hb_ao:get(<<"process">>, GivenProcess, Opts#{ hashpath => ignore }) of - not_found -> GivenProcess; - Proc -> Proc - end, - BaseProcess = maps:without([<<"authority">>, <<"scheduler">>], Process), - {ok, BaseID} = hb_ao:resolve( - BaseProcess, - #{ <<"path">> => <<"id">>, <<"commitments">> => <<"none">> }, - Opts - ), - ?event({push_generated_base, {id, BaseID}, {base, BaseProcess}}), - BaseID. -``` - -### schedule_result - -Add the necessary keys to the message to be scheduled, then schedule it. - -```erlang -schedule_result(TargetProcess, MsgToPush, Origin, Opts) -> - schedule_result(TargetProcess, MsgToPush, <<"httpsig@1.0">>, Origin, Opts). -``` - -### schedule_result - -Add the necessary keys to the message to be scheduled, then schedule it. - -```erlang -schedule_result(TargetProcess, MsgToPush, Codec, Origin, Opts) -> - Target = hb_ao:get(<<"target">>, MsgToPush, Opts), - ?event(push, - {push_scheduling_result, - {target, {string, Target}}, - {target_process, TargetProcess}, - {msg, MsgToPush}, - {codec, Codec}, - {origin, Origin} - }, - Opts - ), - AugmentedMsg = augment_message(Origin, MsgToPush, Opts), - ?event(push, {prepared_msg, {msg, AugmentedMsg}}, Opts), - % Load the `accept-id`'d wallet into the `Opts` map, if requested. -``` - -### augment_message - -Set the necessary keys in order for the recipient to know where the - -```erlang -augment_message(Origin, ToSched, Opts) -> - ?event(push, {adding_keys, {origin, Origin}, {to, ToSched}}, Opts), - hb_message:uncommitted( - hb_ao:set( - ToSched, - #{ - <<"data-protocol">> => <<"ao">>, - <<"variant">> => <<"ao.N.1">>, - <<"type">> => <<"Message">>, - <<"from-process">> => maps:get(<<"process">>, Origin), - <<"from-uncommitted">> => maps:get(<<"from-uncommitted">>, Origin), - <<"from-base">> => maps:get(<<"from-base">>, Origin), - <<"from-scheduler">> => maps:get(<<"from-scheduler">>, Origin), - <<"from-authority">> => maps:get(<<"from-authority">>, Origin) - }, - Opts#{ hashpath => ignore } - ) - ). -``` - -### apply_security - -Apply the recipient's security policy to the message. Observes the - -```erlang -apply_security(Msg, TargetProcess, Codec, Opts) -> - apply_security(policy, Msg, TargetProcess, Codec, Opts). -``` - -### apply_security - -```erlang -apply_security(policy, Msg, TargetProcess, Codec, Opts) -> - case hb_ao:get(<<"policy">>, TargetProcess, not_found, Opts) of - not_found -> apply_security(authority, Msg, TargetProcess, Codec, Opts); - Policy -> - case hb_ao:resolve(Policy, Opts) of - {ok, PolicyOpts} -> - case hb_ao:get(<<"accept-committers">>, PolicyOpts, Opts) of - not_found -> - apply_security( - authority, - Msg, - TargetProcess, - Codec, - Opts - ); - Committers -> - commit_result(Msg, Committers, Codec, Opts) - end; - {error, Error} -> - ?event(push, {policy_error, {error, Error}}, Opts), - apply_security(authority, Msg, TargetProcess, Codec, Opts) - end - end; -``` - -### apply_security - -```erlang -apply_security(authority, Msg, TargetProcess, Codec, Opts) -> - case hb_ao:get(<<"authority">>, TargetProcess, Opts) of - not_found -> apply_security(default, Msg, TargetProcess, Codec, Opts); - Authorities when is_list(Authorities) -> - % The `authority` key has already been parsed into a list of - % committers. Sign with all local valid keys. -``` - -### apply_security - -```erlang -apply_security(default, Msg, TargetProcess, Codec, Opts) -> - ?event(push, {default_policy, {target, TargetProcess}}, Opts), - commit_result( - Msg, - [hb_util:human_id(hb_opts:get(priv_wallet, no_viable_wallet, Opts))], - Codec, - Opts - ). -``` - -### commit_result - -Attempt to sign a result message with the given committers. - -```erlang -commit_result(Msg, [], Codec, Opts) -> - case hb_opts:get(push_always_sign, true, Opts) of - true -> hb_message:commit(hb_message:uncommitted(Msg), Opts, Codec); - false -> Msg - end; -``` - -### commit_result - -Attempt to sign a result message with the given committers. - -```erlang -commit_result(Msg, Committers, Codec, Opts) -> - Signed = lists:foldl( - fun(Committer, Acc) -> - case hb_opts:as(Committer, Opts) of - {ok, CommitterOpts} -> - ?event(debug_commit, {signing_with_identity, Committer}), - hb_message:commit(Acc, CommitterOpts, Codec); - {error, not_found} -> - ?event(debug_commit, desired_signer_not_available_on_node), - ?event(push, - {policy_warning, - { - unknown_committer, - Committer - } - }, - Opts - ), - Acc - end - end, - hb_message:uncommitted(Msg), - Committers - ), - ?event(debug_commit, - {signed_message_as, {explicit, hb_message:signers(Signed, Opts)}} - ), - case hb_message:signers(Signed, Opts) of - [] -> - ?event(debug_commit, signing_with_default_identity), - commit_result(Msg, [], Codec, Opts); - _FoundSigners -> - Signed - end. -``` - -### schedule_initial_message - -Push a message or a process, prior to pushing the resulting slot number. - -```erlang -schedule_initial_message(Base, Req, Opts) -> - ModReq = Req#{ <<"path">> => <<"schedule">>, <<"method">> => <<"POST">> }, - ?event(push, {initial_push, {base, Base}, {req, ModReq}}, Opts), - case hb_ao:resolve(Base, ModReq, Opts) of - {ok, Res} -> - case hb_ao:get(<<"status">>, Res, 200, Opts) of - 200 -> {ok, Res}; - 307 -> - Location = hb_ao:get(<<"location">>, Res, Opts), - remote_schedule_result(Location, Req, Opts) - end; - {error, Res = #{ <<"status">> := 422 }} -> - ?event(push, {initial_push_wrong_format, {error, Res}}, Opts), - {error, Res}; - {error, Res} -> - ?event(push, {initial_push_error, {error, Res}}, Opts), - {error, Res} - end. -``` - -### remote_schedule_result - -```erlang -remote_schedule_result(Location, SignedReq, Opts) -> - ?event(push, {remote_schedule_result, {location, Location}, {req, SignedReq}}, Opts), - {Node, RedirectPath} = parse_redirect(Location, Opts), - Path = - case find_type(SignedReq, Opts) of - <<"Process">> -> <<"/schedule">>; - <<"Message">> -> RedirectPath - end, - % Store a copy of the message for ourselves. -``` - -### find_type - -```erlang -find_type(Req, Opts) -> - hb_ao:get_first( - [ - {Req, <<"type">>}, - {Req, <<"body/type">>} - ], - Opts - ). -``` - -### parse_redirect - -```erlang -parse_redirect(Location, Opts) -> - Parsed = uri_string:parse(Location), - Node = - uri_string:recompose( - (hb_maps:remove(query, Parsed, Opts))#{ - path => <<"/schedule">> - } - ), - {Node, hb_maps:get(path, Parsed, undefined, Opts)}. -``` - -### full_push_test_ - -```erlang -full_push_test_() -> - {timeout, 30, fun() -> - dev_process:init(), - Opts = #{ - process_async_cache => false, - priv_wallet => hb:wallet(), - cache_control => <<"always">>, - store => [ - #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-TEST">> }, - #{ <<"store-module">> => hb_store_gateway, - <<"store">> => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - } - } - ] - }, - Msg1 = dev_process:test_aos_process(Opts), - hb_cache:write(Msg1, Opts), - {ok, SchedInit} = - hb_ao:resolve(Msg1, #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => Msg1 - }, - Opts - ), - ?event({test_setup, {msg1, Msg1}, {sched_init, SchedInit}}), - Script = ping_pong_script(2), - ?event({script, Script}), - {ok, Msg2} = dev_process:schedule_aos_call(Msg1, Script, Opts), - ?event({msg_sched_result, Msg2}), - {ok, StartingMsgSlot} = - hb_ao:resolve(Msg2, #{ <<"path">> => <<"slot">> }, Opts), - ?event({starting_msg_slot, StartingMsgSlot}), - Msg3 = - #{ - <<"path">> => <<"push">>, - <<"slot">> => StartingMsgSlot - }, - {ok, _} = hb_ao:resolve(Msg1, Msg3, Opts), - ?assertEqual( - {ok, <<"Done.">>}, - hb_ao:resolve(Msg1, <<"now/results/data">>, Opts) - ) - end}. -``` - -### push_as_identity_test_ - -```erlang -push_as_identity_test_() -> - {timeout, 90, fun() -> - dev_process:init(), - % Create a new identity for the scheduler. -``` - -### multi_process_push_test_ - -```erlang -multi_process_push_test_() -> - {timeout, 30, fun() -> - dev_process:init(), - Opts = #{ - priv_wallet => hb:wallet(), - cache_control => <<"always">> - }, - Proc1 = dev_process:test_aos_process(Opts), - hb_cache:write(Proc1, Opts), - {ok, _SchedInit1} = - hb_ao:resolve(Proc1, #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => Proc1 - }, - Opts - ), - {ok, _} = dev_process:schedule_aos_call(Proc1, reply_script()), - Proc2 = dev_process:test_aos_process(Opts), - hb_cache:write(Proc2, Opts), - {ok, _SchedInit2} = - hb_ao:resolve(Proc2, #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => Proc2 - }, - Opts - ), - ProcID1 = hb_message:id(Proc1, all, Opts), - ProcID2 = hb_message:id(Proc2, all, Opts), - ?event(push, {testing_with, {proc1_id, ProcID1}, {proc2_id, ProcID2}}), - {ok, ToPush} = dev_process:schedule_aos_call( - Proc2, - << - "Handlers.add(\"Pong\",\n" - " function (test) return true end,\n" - " function(m)\n" - " print(\"GOT PONG\")\n" - " end\n" - ")\n" - "Send({ Target = \"", (ProcID1)/binary, "\", Action = \"Ping\" })" - >> - ), - SlotToPush = hb_ao:get(<<"slot">>, ToPush, Opts), - ?event(push, {slot_to_push_proc2, SlotToPush}), - Msg3 = - #{ - <<"path">> => <<"push">>, - <<"slot">> => SlotToPush, - <<"result-depth">> => 1 - }, - {ok, PushResult} = hb_ao:resolve(Proc2, Msg3, Opts), - ?event(push, {push_result_proc2, PushResult}), - AfterPush = hb_ao:resolve(Proc2, <<"now/results/data">>, Opts), - ?event(push, {after_push, AfterPush}), - ?assertEqual({ok, <<"GOT PONG">>}, AfterPush) - end}. -``` - -### push_with_redirect_hint_test_disabled - -```erlang -push_with_redirect_hint_test_disabled() -> - {timeout, 30, fun() -> - dev_process:init(), - Stores = - [ - #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - } - ], - ExtOpts = #{ priv_wallet => ar_wallet:new(), store => Stores }, - LocalOpts = #{ priv_wallet => hb:wallet(), store => Stores }, - ExtScheduler = hb_http_server:start_node(ExtOpts), - ?event(push, {external_scheduler, {location, ExtScheduler}}), - % Create the Pong server and client - Client = dev_process:test_aos_process(), - PongServer = dev_process:test_aos_process(ExtOpts), - % Push the new process that runs on the external scheduler - {ok, ServerSchedResp} = - hb_http:post( - ExtScheduler, - <<"/push">>, - PongServer, - ExtOpts - ), - ?event(push, {pong_server_sched_resp, ServerSchedResp}), - % Get the IDs of the server process - PongServerID = - hb_ao:get( - <<"process/id">>, - dev_process:ensure_process_key(PongServer, LocalOpts), - LocalOpts - ), - {ok, ServerScriptSchedResp} = - hb_http:post( - ExtScheduler, - <>, - #{ - <<"body">> => - hb_message:commit( - #{ - <<"target">> => PongServerID, - <<"action">> => <<"Eval">>, - <<"type">> => <<"Message">>, - <<"data">> => reply_script() - }, - ExtOpts - ) - }, - ExtOpts - ), - ?event(push, {pong_server_script_sched_resp, ServerScriptSchedResp}), - {ok, ToPush} = - dev_process:schedule_aos_call( - Client, - << - "Handlers.add(\"Pong\",\n" - " function (test) return true end,\n" - " function(m)\n" - " print(\"GOT PONG\")\n" - " end\n" - ")\n" - "Send({ Target = \"", - (PongServerID)/binary, "?hint=", - (ExtScheduler)/binary, - "\", Action = \"Ping\" })\n" - >>, - LocalOpts - ), - SlotToPush = hb_ao:get(<<"slot">>, ToPush, LocalOpts), - ?event(push, {slot_to_push_client, SlotToPush}), - Msg3 = #{ <<"path">> => <<"push">>, <<"slot">> => SlotToPush }, - {ok, PushResult} = hb_ao:resolve(Client, Msg3, LocalOpts), - ?event(push, {push_result_client, PushResult}), - AfterPush = hb_ao:resolve(Client, <<"now/results/data">>, LocalOpts), - ?event(push, {after_push, AfterPush}), - % Note: This test currently only gets a reply that the message was not - % trusted by the process. To fix this, we would have to add another - % trusted authority to the `test_aos_process' call. For now, this is - % enough to validate that redirects are pushed through correctly. -``` - -### push_prompts_encoding_change_test_ - -```erlang -push_prompts_encoding_change_test_() -> - {timeout, 30, fun push_prompts_encoding_change/0}. -``` - -### push_prompts_encoding_change - -```erlang -push_prompts_encoding_change() -> - dev_process:init(), - Opts = #{ - priv_wallet => hb:wallet(), - cache_control => <<"always">>, - store => - [ - #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-TEST">> }, - % Include a gateway store so that we can get the legacynet - % process when needed. -``` - -### oracle_push_test_ - -```erlang -oracle_push_test_() -> {timeout, 30, fun oracle_push/0}. -``` - -### oracle_push - -```erlang -oracle_push() -> - dev_process:init(), - Client = dev_process:test_aos_process(), - {ok, _} = hb_cache:write(Client, #{}), - {ok, _} = dev_process:schedule_aos_call(Client, oracle_script()), - Msg3 = - #{ - <<"path">> => <<"push">>, - <<"slot">> => 0 - }, - {ok, PushResult} = hb_ao:resolve(Client, Msg3, #{ priv_wallet => hb:wallet() }), - ?event({result, PushResult}), - ComputeRes = - hb_ao:resolve( - Client, - <<"now/results/data">>, - #{ priv_wallet => hb:wallet() } - ), - ?event({compute_res, ComputeRes}), - ?assertMatch({ok, _}, ComputeRes). -``` - -### nested_push_prompts_encoding_change_test_ - -Test that a message that generates another message which resides on an - -```erlang -nested_push_prompts_encoding_change_test_() -> - {timeout, 30, fun nested_push_prompts_encoding_change/0}. -``` - -### nested_push_prompts_encoding_change - -```erlang -nested_push_prompts_encoding_change() -> - dev_process:init(), - Opts = #{ - priv_wallet => hb:wallet(), - cache_control => <<"always">>, - store => hb_opts:get(store) - }, - ?event(push_debug, {opts, Opts}), - Msg1 = dev_process:test_aos_process(Opts), - hb_cache:write(Msg1, Opts), - {ok, SchedInit} = - hb_ao:resolve(Msg1, #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => Msg1 - }, - Opts - ), - ?event({test_setup, {msg1, Msg1}, {sched_init, SchedInit}}), - Script = message_to_legacynet_scheduler_script(), - ?event({script, Script}), - {ok, Msg2} = dev_process:schedule_aos_call(Msg1, Script), - ?event(push, {msg_sched_result, Msg2}), - {ok, StartingMsgSlot} = - hb_ao:resolve(Msg2, #{ <<"path">> => <<"slot">> }, Opts), - ?event({starting_msg_slot, StartingMsgSlot}), - Msg3 = - #{ - <<"path">> => <<"push">>, - <<"slot">> => StartingMsgSlot - }, - {ok, Res} = hb_ao:resolve(Msg1, Msg3, Opts), - ?event(push, {res, Res}), - Msg = hb_message:commit(#{ - <<"path">> => <<"push">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - #{ - <<"target">> => hb_message:id(Msg1, all, Opts), - <<"action">> => <<"Ping">> - }, - Opts - ) - }, Opts), - ?event(push, {msg1, Msg}), - Res2 = - hb_ao:resolve_many( - [ - hb_message:id(Msg1, all, Opts), - {as, <<"process@1.0">>, <<>>}, - Msg - ], - Opts - ), - ?assertMatch({ok, #{ <<"1">> := #{ <<"resulted-in">> := _ }}}, Res2). --endif. -``` - -### ping_pong_script - -```erlang -ping_pong_script(Limit) -> - << - "Handlers.add(\"Ping\",\n" - " function (test) return true end,\n" - " function(m)\n" - " C = tonumber(m.Count)\n" - " if C <= ", (integer_to_binary(Limit))/binary, " then\n" - " Send({ Target = ao.id, Action = \"Ping\", Count = C + 1 })\n" - " print(\"Ping\", C + 1)\n" - " else\n" - " print(\"Done.\")\n" - " end\n" - " end\n" - ")\n" - "Send({ Target = ao.id, Action = \"Ping\", Count = 1 })\n" - >>. -``` - -### reply_script - -```erlang -reply_script() -> - << - """ - Handlers.add("Reply", - { Action = "Ping" }, - function(m) - print("Replying to...") - print(m.From) - Send({ Target = m.From, Action = "Reply", Message = "Pong!" }) - print("Done.") - end - ) - """ - >>. -``` - -### message_to_legacynet_scheduler_script - -```erlang -message_to_legacynet_scheduler_script() -> - << - """ - Handlers.add("Ping", - { Action = "Ping" }, - function(m) - print("Pinging...") - print(m.From) - Send({ - Target = "QQiMcAge5ZtxcUV7ruxpi16KYRE8UBP0GAAqCIJPXz0", - Action = "Ping" - }) - print("Done.") - end - ) - """ - >>. -``` - -### oracle_script - -```erlang -oracle_script() -> - << - """ - Handlers.add("Oracle", - function(m) - return true - end, - function(m) - print(m.Body) - end - ) - Send({ - target = ao.id, - resolve = "/~relay@1.0/call", - ["relay-path"] = "https://arweave.net" - }) - """ -``` - ---- - -*Generated from [dev_push.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_push.erl)* diff --git a/docs/book/src/dev_query.erl.md b/docs/book/src/dev_query.erl.md deleted file mode 100644 index bf2f727b9..000000000 --- a/docs/book/src/dev_query.erl.md +++ /dev/null @@ -1,359 +0,0 @@ -# dev_query - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query.erl) - -A discovery engine for searching for and returning messages found in -a node's cache, through supported stores. -This device supports various modes of matching, including: -- `all` (default): Match all keys in the request message. -- `base`: Match all keys in the base message. -- `only`: Match only the key(s) specified in the `only` key. -The `only` key can be a binary, a map, or a list of keys. If it is a binary, -it is split on commas to get a list of keys to search for. If it is a message, -it is used directly as the match spec. If it is a list, it is assumed to be -a list of keys that we should select from the request or base message and -use as the match spec. -The `return` key can be used to specify the type of data to return. -- `count`: Return the number of matches. -- `paths`: Return the paths of the matches in a list. -- `messages`: Return the messages associated with each match in a list. -- `first-path`: Return the first path of the matches. -- `first-message`: Return the first message of the matches. -- `boolean`: Return a boolean indicating whether any matches were found. - ---- - -## Exported Functions - -- `all/3` -- `base/3` -- `graphql/3` -- `has_results/3` -- `info/1` -- `only/3` -- `test_setup/0` - ---- - -### info - -A discovery engine for searching for and returning messages found in - -```erlang -info(_Opts) -> - #{ - excludes => [<<"keys">>, <<"set">>], - default => fun default/4 - }. -``` - -### graphql - -Execute the query via GraphQL. - -```erlang -graphql(Req, Base, Opts) -> - dev_query_graphql:handle(Req, Base, Opts). -``` - -### has_results - -Return whether a GraphQL esponse in a message has transaction results. - -```erlang -has_results(Base, Req, Opts) -> - JSON = - hb_ao:get_first( - [ - {{as, <<"message@1.0">>, Base}, <<"body">>}, - {{as, <<"message@1.0">>, Req}, <<"body">>} - ], - <<"{}">>, - Opts - ), - Decoded = hb_json:decode(JSON), - ?event(debug_multi, {has_results, {decoded_json, Decoded}}), - case Decoded of - #{ <<"data">> := #{ <<"transactions">> := #{ <<"edges">> := Nodes } } } - when length(Nodes) > 0 -> - true; - _ -> false - end. -``` - -### default - -Search for the keys specified in the request message. - -```erlang -default(_, Base, Req, Opts) -> - all(Base, Req, Opts). -``` - -### all - -Search the node's store for all of the keys and values in the request, - -```erlang -all(Base, Req, Opts) -> - match(Req, Base, Req, Opts). -``` - -### base - -Search the node's store for all of the keys and values in the base - -```erlang -base(Base, Req, Opts) -> - match(Base, Base, Req, Opts). -``` - -### only - -Search only for the (list of) key(s) specified in `only` in the request. - -```erlang -only(Base, Req, Opts) -> - case hb_maps:get(<<"only">>, Req, not_found, Opts) of - KeyBin when is_binary(KeyBin) -> - % The descriptor is a binary, so we split it on commas to get a - % list of keys to search for. If there is only one key, we - % return a list with that key. -``` - -### match - -Match the request against the base message, using the keys to select - -```erlang -match(Keys, Base, Req, Opts) when is_list(Keys) -> - UserSpec = - maps:from_list( - lists:filtermap( - fun(Key) -> - % Search for the value in the request. If not found, - % look in the base message. -``` - -### match - -```erlang -match(UserSpec, _Base, Req, Opts) -> - ?event({matching, {spec, UserSpec}}), - FilteredSpec = - hb_maps:without( - hb_maps:get(<<"exclude">>, Req, ?DEFAULT_EXCLUDES, Opts), - UserSpec - ), - ReturnType = hb_maps:get(<<"return">>, Req, <<"paths">>, Opts), - ?event({matching, {spec, FilteredSpec}, {return, ReturnType}}), - case hb_cache:match(FilteredSpec, Opts) of - {ok, Matches} when ReturnType == <<"count">> -> - ?event({matched, {paths, Matches}}), - {ok, length(Matches)}; - {ok, Matches} when ReturnType == <<"paths">> -> - ?event({matched, {paths, Matches}}), - {ok, Matches}; - {ok, Matches} when ReturnType == <<"messages">> -> - ?event({matched, {paths, Matches}}), - Messages = - lists:map( - fun(Path) -> - hb_util:ok(hb_cache:read(Path, Opts)) - end, - Matches - ), - ?event({matched, {messages, Messages}}), - {ok, Messages}; - {ok, Matches} when ReturnType == <<"first-path">> -> - ?event({matched, {paths, Matches}}), - {ok, hd(Matches)}; - {ok, Matches} when ReturnType == <<"first">> - orelse ReturnType == <<"first-message">> -> - ?event({matched, {paths, Matches}}), - {ok, hb_util:ok(hb_cache:read(hd(Matches), Opts))}; - {ok, Matches} when ReturnType == <<"boolean">> -> - ?event({matched, {paths, Matches}}), - {ok, length(Matches) > 0}; - not_found when ReturnType == <<"boolean">> -> - {ok, false}; - not_found -> - {error, not_found} - end. -``` - -### test_setup - -Return test options with a test store. - -```erlang -test_setup() -> - Store = hb_test_utils:test_store(hb_store_lmdb), - Opts = #{ store => Store, priv_wallet => hb:wallet() }, - % Write a simple message. -``` - -### basic_test - -Search for and find a basic test key. - -```erlang -basic_test() -> - {ok, Opts, _} = test_setup(), - {ok, [ID]} = hb_ao:resolve(<<"~query@1.0/all?basic=binary-value">>, Opts), - {ok, Read} = hb_cache:read(ID, Opts), - ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Read)), - ?assertEqual(<<"binary-value-2">>, hb_maps:get(<<"basic-2">>, Read)), - {ok, [Msg]} = - hb_ao:resolve( - <<"~query@1.0/all?basic-2=binary-value-2&return=messages">>, - Opts - ), - ?assertEqual(<<"binary-value-2">>, hb_maps:get(<<"basic-2">>, Msg)), - ok. -``` - -### only_test - -Ensure that we can search for and match only a single key. - -```erlang -only_test() -> - {ok, Opts, _} = test_setup(), - {ok, [Msg]} = - hb_ao:resolve( - <<"~query@1.0/only=basic&basic=binary-value&wrong=1&return=messages">>, - Opts - ), - ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg)), - ok. -``` - -### multiple_test - -Ensure that we can specify multiple keys to match. - -```erlang -multiple_test() -> - {ok, Opts, _} = test_setup(), - {ok, [Msg]} = - hb_ao:resolve( - << - "~query@1.0/only=basic,basic-2", - "&basic=binary-value&basic-2=binary-value-2", - "&return=messages" - >>, - Opts - ), - ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg)), - ?assertEqual(<<"binary-value-2">>, hb_maps:get(<<"basic-2">>, Msg)), - ok. -``` - -### nested_test - -Search for and find a nested test key. - -```erlang -nested_test() -> - {ok, Opts, _} = test_setup(), - {ok, [MsgWithNested]} = - hb_ao:resolve( - <<"~query@1.0/all?test-key=test-value&return=messages">>, - Opts - ), - ?assert(hb_maps:is_key(<<"nested">>, MsgWithNested, Opts)), - Nested = hb_maps:get(<<"nested">>, MsgWithNested, undefined, Opts), - ?assertEqual(<<"test-value-3">>, hb_maps:get(<<"test-key-3">>, Nested, Opts)), - ?assertEqual(<<"test-value-4">>, hb_maps:get(<<"test-key-4">>, Nested, Opts)), - ok. -``` - -### list_test - -Search for and find a list message with typed elements. - -```erlang -list_test() -> - {ok, Opts, _} = test_setup(), - {ok, [Msg]} = - hb_ao:resolve( - <<"~query@1.0/all?2+integer=2&3+atom=ok&return=messages">>, - Opts - ), - ?assertEqual([<<"a">>, 2, ok], Msg), - ok. -``` - -### return_key_test - -Ensure user's can opt not to specify a key to resolve, instead specifying - -```erlang -return_key_test() -> - {ok, Opts, _} = test_setup(), - {ok, [ID]} = - hb_ao:resolve( - <<"~query@1.0/basic=binary-value">>, - Opts - ), - {ok, Msg} = hb_cache:read(ID, Opts), - ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg, Opts)), - ok. -``` - -### return_types_test - -Validate the functioning of various return types. - -```erlang -return_types_test() -> - {ok, Opts, _} = test_setup(), - {ok, [Msg]} = - hb_ao:resolve( - <<"~query@1.0/basic=binary-value&return=messages">>, - Opts - ), - ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg, Opts)), - ?assertEqual( - {ok, 1}, - hb_ao:resolve( - <<"~query@1.0/basic=binary-value&return=count">>, - Opts - ) - ), - ?assertEqual( - {ok, true}, - hb_ao:resolve( - <<"~query@1.0/basic=binary-value&return=boolean">>, - Opts - ) - ), - ?assertEqual( - {ok, <<"binary-value">>}, - hb_ao:resolve( - <<"~query@1.0/basic=binary-value&return=first-message/basic">>, - Opts - ) - ), - ok. -``` - -### http_test - -```erlang -http_test() -> - {ok, Opts, _} = test_setup(), - Node = hb_http_server:start_node(Opts), - {ok, Msg} = - hb_http:get( - Node, - <<"~query@1.0/only=basic&basic=binary-value?return=first">>, - Opts - ), - ?assertEqual(<<"binary-value">>, hb_maps:get(<<"basic">>, Msg, Opts)), -``` - ---- - -*Generated from [dev_query.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query.erl)* diff --git a/docs/book/src/dev_query_arweave.erl.md b/docs/book/src/dev_query_arweave.erl.md deleted file mode 100644 index f7195ca8e..000000000 --- a/docs/book/src/dev_query_arweave.erl.md +++ /dev/null @@ -1,546 +0,0 @@ -# dev_query_arweave - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_arweave.erl) - -An implementation of the Arweave GraphQL API, inside the `~query@1.0` -device. - ---- - -## Exported Functions - -- `query/4` - ---- - -### query - -An implementation of the Arweave GraphQL API, inside the `~query@1.0` -The arguments that are supported by the Arweave GraphQL API. -Handle an Arweave GraphQL query for either transactions or blocks. - -```erlang -query(List, <<"edges">>, _Args, _Opts) -> - {ok, [{ok, Msg} || Msg <- List]}; -``` - -### query - -An implementation of the Arweave GraphQL API, inside the `~query@1.0` -The arguments that are supported by the Arweave GraphQL API. -Handle an Arweave GraphQL query for either transactions or blocks. - -```erlang -query(Msg, <<"node">>, _Args, _Opts) -> - {ok, Msg}; -``` - -### query - -An implementation of the Arweave GraphQL API, inside the `~query@1.0` -The arguments that are supported by the Arweave GraphQL API. -Handle an Arweave GraphQL query for either transactions or blocks. - -```erlang -query(Obj, <<"transaction">>, Args, Opts) -> - case query(Obj, <<"transactions">>, Args, Opts) of - {ok, []} -> {ok, null}; - {ok, [Msg|_]} -> {ok, Msg} - end; -``` - -### query - -An implementation of the Arweave GraphQL API, inside the `~query@1.0` -The arguments that are supported by the Arweave GraphQL API. -Handle an Arweave GraphQL query for either transactions or blocks. - -```erlang -query(Obj, <<"transactions">>, Args, Opts) -> - ?event({transactions_query, - {object, Obj}, - {field, <<"transactions">>}, - {args, Args} - }), - Matches = match_args(Args, Opts), - ?event({transactions_matches, Matches}), - Messages = - lists:filtermap( - fun(Match) -> - case hb_cache:read(Match, Opts) of - {ok, Msg} -> {true, Msg}; - not_found -> false - end - end, - Matches - ), - {ok, Messages}; -``` - -### query - -An implementation of the Arweave GraphQL API, inside the `~query@1.0` -The arguments that are supported by the Arweave GraphQL API. -Handle an Arweave GraphQL query for either transactions or blocks. - -```erlang -query(Obj, <<"block">>, Args, Opts) -> - case query(Obj, <<"blocks">>, Args, Opts) of - {ok, []} -> {ok, null}; - {ok, [Msg|_]} -> {ok, Msg} - end; -``` - -### query - -An implementation of the Arweave GraphQL API, inside the `~query@1.0` -The arguments that are supported by the Arweave GraphQL API. -Handle an Arweave GraphQL query for either transactions or blocks. - -```erlang -query(Obj, <<"blocks">>, Args, Opts) -> - ?event({blocks, - {object, Obj}, - {field, <<"blocks">>}, - {args, Args} - }), - Matches = match_args(Args, Opts), - ?event({blocks_matches, Matches}), - Blocks = - lists:filtermap( - fun(Match) -> - case hb_cache:read(Match, Opts) of - {ok, Msg} -> {true, Msg}; - not_found -> false - end - end, - Matches - ), - % Return the blocks as a list of messages. -``` - -### query - -```erlang -query(Block, <<"previous">>, _Args, Opts) -> - {ok, hb_maps:get(<<"previous_block">>, Block, null, Opts)}; -``` - -### query - -```erlang -query(Block, <<"height">>, _Args, Opts) -> - {ok, hb_maps:get(<<"height">>, Block, null, Opts)}; -``` - -### query - -```erlang -query(Block, <<"timestamp">>, _Args, Opts) -> - {ok, hb_maps:get(<<"timestamp">>, Block, null, Opts)}; -``` - -### query - -```erlang -query(Msg, <<"signature">>, _Args, Opts) -> - % Return the signature of the transaction. -``` - -### query - -```erlang -query(Msg, <<"owner">>, _Args, Opts) -> - ?event({query_owner, Msg}), - case hb_message:commitments(#{ <<"committer">> => '_' }, Msg, Opts) of - not_found -> {ok, null}; - Commitments -> - case hb_maps:keys(Commitments) of - [] -> {ok, null}; - [CommID | _] -> - {ok, Commitment} = hb_maps:find(CommID, Commitments, Opts), - {ok, Address} = hb_maps:find(<<"committer">>, Commitment, Opts), - {ok, KeyID} = hb_maps:find(<<"keyid">>, Commitment, Opts), - Key = dev_codec_httpsig_keyid:remove_scheme_prefix(KeyID), - {ok, #{ - <<"address">> => Address, - <<"key">> => Key - }} - end - end; -``` - -### query - -```erlang -query(#{ <<"key">> := Key }, <<"key">>, _Args, _Opts) -> - {ok, Key}; -``` - -### query - -```erlang -query(#{ <<"address">> := Address }, <<"address">>, _Args, _Opts) -> - {ok, Address}; -``` - -### query - -```erlang -query(Msg, <<"fee">>, _Args, Opts) -> - {ok, hb_maps:get(<<"fee">>, Msg, 0, Opts)}; -``` - -### query - -```erlang -query(Msg, <<"quantity">>, _Args, Opts) -> - {ok, hb_maps:get(<<"quantity">>, Msg, 0, Opts)}; -``` - -### query - -```erlang -query(Number, <<"winston">>, _Args, _Opts) when is_number(Number) -> - {ok, Number}; -``` - -### query - -```erlang -query(Msg, <<"recipient">>, _Args, Opts) -> - case find_field_key(<<"field-target">>, Msg, Opts) of - {ok, null} -> {ok, <<"">>}; - OkRes -> OkRes - end; -``` - -### query - -```erlang -query(Msg, <<"anchor">>, _Args, Opts) -> - case find_field_key(<<"field-anchor">>, Msg, Opts) of - {ok, null} -> {ok, <<"">>}; - {ok, Anchor} -> {ok, hb_util:human_id(Anchor)} - end; -``` - -### query - -```erlang -query(Msg, <<"data">>, _Args, Opts) -> - Data = - hb_ao:get_first( - [ - {{as, <<"message@1.0">>, Msg}, <<"data">>}, - {{as, <<"message@1.0">>, Msg}, <<"body">>} - ], - <<>>, - Opts - ), - Type = hb_maps:get(<<"content-type">>, Msg, null, Opts), - {ok, #{ <<"data">> => Data, <<"type">> => Type }}; -``` - -### query - -```erlang -query(#{ <<"data">> := Data }, <<"size">>, _Args, _Opts) -> - {ok, byte_size(Data)}; -``` - -### query - -```erlang -query(#{ <<"type">> := Type }, <<"type">>, _Args, _Opts) -> - {ok, Type}; -``` - -### query - -Find and return a value from the fields of a message (from its - -```erlang -query(Obj, Field, Args, _Opts) -> - ?event({unimplemented_transactions_query, - {object, Obj}, - {field, Field}, - {args, Args} - }), - {ok, <<"Not implemented.">>}. -``` - -### find_field_key - -Find and return a value from the fields of a message (from its - -```erlang -find_field_key(Field, Msg, Opts) -> - case hb_message:commitments(#{ Field => '_' }, Msg, Opts) of - not_found -> {ok, null}; - Commitments -> - case hb_maps:keys(Commitments) of - [] -> {ok, null}; - [CommID | _] -> - {ok, Commitment} = hb_maps:find(CommID, Commitments, Opts), - case hb_maps:find(Field, Commitment, Opts) of - {ok, Value} -> {ok, Value}; - error -> {ok, null} - end - end - end. -``` - -### match_args - -Progressively generate matches from each argument for a transaction - -```erlang -match_args(Args, Opts) when is_map(Args) -> - match_args( - maps:to_list( - maps:with( - ?SUPPORTED_QUERY_ARGS, - Args - ) - ), - [], - Opts - ). -``` - -### match_args - -```erlang -match_args([], Results, Opts) -> - ?event({match_args_results, Results}), - Matches = - lists:foldl( - fun(Result, Acc) -> - hb_util:list_with(resolve_ids(Result, Opts), Acc) - end, - resolve_ids(hd(Results), Opts), - tl(Results) - ), - hb_util:unique( - lists:flatten( - [ - all_ids(ID, Opts) - || - ID <- Matches - ] - ) - ); -``` - -### match_args - -```erlang -match_args([{Field, X} | Rest], Acc, Opts) -> - MatchRes = match(Field, X, Opts), - ?event({match, {field, Field}, {arg, X}, {match_res, MatchRes}}), - case MatchRes of - {ok, Result} -> - match_args(Rest, [Result | Acc], Opts); - _Error -> - match_args(Rest, Acc, Opts) - end. -``` - -### match - -Generate a match upon `tags` in the arguments, if given. - -```erlang -match(_, null, _) -> ignore; -``` - -### match - -Generate a match upon `tags` in the arguments, if given. - -```erlang -match(<<"height">>, Heights, Opts) -> - Min = hb_maps:get(<<"min">>, Heights, 0, Opts), - Max = - case hb_maps:find(<<"max">>, Heights, Opts) of - {ok, GivenMax} -> GivenMax; - error -> - {ok, Latest} = dev_arweave_block_cache:latest(Opts), - Latest - end, - #{ store := ScopedStores } = scope(Opts), - {ok, - lists:filtermap( - fun(Height) -> - Path = dev_arweave_block_cache:path(Height, Opts), - case hb_store:type(ScopedStores, Path) of - not_found -> false; - _ -> {true, hb_store:resolve(ScopedStores, Path)} - end - end, - lists:seq(Min, Max) - ) - }; -``` - -### match - -Generate a match upon `tags` in the arguments, if given. - -```erlang -match(<<"id">>, ID, _Opts) -> - {ok, [ID]}; -``` - -### match - -Generate a match upon `tags` in the arguments, if given. - -```erlang -match(<<"ids">>, IDs, _Opts) -> - {ok, IDs}; -``` - -### match - -Generate a match upon `tags` in the arguments, if given. - -```erlang -match(<<"tags">>, Tags, Opts) -> - hb_cache:match(dev_query_graphql:keys_to_template(Tags), Opts); -``` - -### match - -Generate a match upon `tags` in the arguments, if given. - -```erlang -match(<<"owners">>, Owners, Opts) -> - {ok, matching_commitments(<<"committer">>, Owners, Opts)}; -``` - -### match - -Generate a match upon `tags` in the arguments, if given. - -```erlang -match(<<"owner">>, Owner, Opts) -> - Res = matching_commitments(<<"committer">>, Owner, Opts), - ?event({match_owner, Owner, Res}), - {ok, Res}; -``` - -### match - -Generate a match upon `tags` in the arguments, if given. - -```erlang -match(<<"recipients">>, Recipients, Opts) -> - {ok, matching_commitments(<<"field-target">>, Recipients, Opts)}; -``` - -### match - -Generate a match upon `tags` in the arguments, if given. - -```erlang -match(UnsupportedFilter, _, _) -> - throw({unsupported_query_filter, UnsupportedFilter}). -``` - -### matching_commitments - -Return the base IDs for messages that have a matching commitment. - -```erlang -matching_commitments(Field, Values, Opts) when is_list(Values) -> - hb_util:unique(lists:flatten( - lists:map( - fun(Value) -> matching_commitments(Field, Value, Opts) end, - Values - ) - )); -``` - -### matching_commitments - -Return the base IDs for messages that have a matching commitment. - -```erlang -matching_commitments(Field, Value, Opts) when is_binary(Value) -> - case hb_cache:match(#{ Field => Value }, Opts) of - {ok, IDs} -> - ?event( - {found_matching_commitments, - {field, Field}, - {value, Value}, - {ids, IDs} - } - ), - lists:map(fun(ID) -> commitment_id_to_base_id(ID, Opts) end, IDs); - not_found -> not_found - end. -``` - -### commitment_id_to_base_id - -Convert a commitment message's ID to a base ID. - -```erlang -commitment_id_to_base_id(ID, Opts) -> - Store = hb_opts:get(store, no_store, Opts), - ?event({commitment_id_to_base_id, ID}), - case hb_store:read(Store, << ID/binary, "/signature">>) of - {ok, EncSig} -> - Sig = hb_util:decode(EncSig), - ?event({commitment_id_to_base_id_sig, Sig}), - hb_util:encode(hb_crypto:sha256(Sig)); - not_found -> not_found - end. -``` - -### all_ids - -Find all IDs for a message, by any of its other IDs. - -```erlang -all_ids(ID, Opts) -> - Store = hb_opts:get(store, no_store, Opts), - case hb_store:list(Store, << ID/binary, "/commitments">>) of - {ok, []} -> [ID]; - {ok, CommitmentIDs} -> CommitmentIDs; - _ -> [] - end. -``` - -### scope - -Scope the stores used for block matching. The searched stores can be - -```erlang -scope(Opts) -> - Scope = hb_opts:get(query_arweave_scope, [local], Opts), - hb_store:scope(Opts, Scope). -``` - -### resolve_ids - -Resolve a list of IDs to their store paths, using the stores provided. - -```erlang -resolve_ids(IDs, Opts) -> - Scoped = scope(Opts), - lists:map( - fun(ID) -> - case hb_cache:read(ID, Opts) of - {ok, Msg} -> hb_message:id(Msg, uncommitted, Scoped); - not_found -> ID - end - end, - IDs -``` - ---- - -*Generated from [dev_query_arweave.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_arweave.erl)* diff --git a/docs/book/src/dev_query_graphql.erl.md b/docs/book/src/dev_query_graphql.erl.md deleted file mode 100644 index 523c48d63..000000000 --- a/docs/book/src/dev_query_graphql.erl.md +++ /dev/null @@ -1,497 +0,0 @@ -# dev_query_graphql - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_graphql.erl) - -A GraphQL interface for querying a node's cache. Accessible through the -`~query@1.0/graphql` device key. - ---- - -## Exported Functions - -- `execute/4` -- `handle/3` -- `keys_to_template/1` -- `test_query/3` -- `test_query/4` - ---- - -### schema - -A GraphQL interface for querying a node's cache. Accessible through the -Returns the complete GraphQL schema. -Ensure that the GraphQL schema and context are initialized. Can be - -```erlang -schema() -> - hb_util:ok(file:read_file("scripts/schema.gql")). -``` - -### ensure_started - -A GraphQL interface for querying a node's cache. Accessible through the -Returns the complete GraphQL schema. -Ensure that the GraphQL schema and context are initialized. Can be - -```erlang -ensure_started() -> ensure_started(#{}). -``` - -### ensure_started - -A GraphQL interface for querying a node's cache. Accessible through the -Returns the complete GraphQL schema. -Ensure that the GraphQL schema and context are initialized. Can be - -```erlang -ensure_started(Opts) -> - case hb_name:lookup(graphql_controller) of - PID when is_pid(PID) -> ok; - undefined -> - Parent = self(), - PID = - spawn_link( - fun() -> - init(Opts), - Parent ! {started, self()}, - receive stop -> ok end - end - ), - receive {started, PID} -> ok - after ?START_TIMEOUT -> exit(graphql_start_timeout) - end - end. -``` - -### init - -Initialize the GraphQL schema and context. Should only be called once. - -```erlang -init(_Opts) -> - ?event(graphql_init_called), - application:ensure_all_started(graphql), - ?event(graphql_application_started), - GraphQLOpts = - #{ - scalars => #{ default => ?MODULE }, - interfaces => #{ default => ?MODULE }, - unions => #{ default => ?MODULE }, - objects => #{ default => ?MODULE }, - enums => #{ default => ?MODULE } - }, - ok = graphql:load_schema(GraphQLOpts, schema()), - ?event(graphql_schema_loaded), - Root = - {root, - #{ - query => 'Query', - interfaces => [] - } - }, - ok = graphql:insert_schema_definition(Root), - ?event(graphql_schema_definition_inserted), - ok = graphql:validate_schema(), - ?event(graphql_schema_validated), - hb_name:register(graphql_controller, self()), - ?event(graphql_controller_registered), - ok. -``` - -### handle - -```erlang -handle(_Base, RawReq, Opts) -> - ?event({request, RawReq}), - Req = - case hb_maps:find(<<"query">>, RawReq, Opts) of - {ok, _} -> RawReq; - error -> - % Parse the query, assuming that the request body is a JSON - % object with the necessary fields. -``` - -### execute - -The main entrypoint for resolving GraphQL elements, called by the - -```erlang -execute(#{opts := Opts}, Obj, Field, Args) -> - ?event({graphql_query, {object, Obj}, {field, Field}, {args, Args}}), - case lists:member(Field, ?MESSAGE_QUERY_KEYS) of - true -> message_query(Obj, Field, Args, Opts); - false -> dev_query_arweave:query(Obj, Field, Args, Opts) - end. -``` - -### message_query - -Handle a HyperBEAM `message` query. - -```erlang -message_query(Obj, <<"message">>, #{<<"keys">> := Keys}, Opts) -> - Template = keys_to_template(Keys), - ?event( - {graphql_execute_called, - {object, Obj}, - {field, <<"message">>}, - {raw_keys, Keys}, - {template, Template} - } - ), - case hb_cache:match(Template, Opts) of - {ok, [ID | _IDs]} -> - ?event({graphql_cache_match_found, ID}), - {ok, Msg} = hb_cache:read(ID, Opts), - ?event({graphql_cache_read, Msg}), - {ok, Msg}; - not_found -> - ?event(graphql_cache_match_not_found), - {ok, #{<<"id">> => <<"not-found">>, <<"keys">> => #{}}} - end; -``` - -### message_query - -Handle a HyperBEAM `message` query. - -```erlang -message_query(Msg, Field, _Args, Opts) when Field =:= <<"keys">>; Field =:= <<"tags">> -> - OnlyKeys = - hb_maps:to_list( - hb_private:reset( - hb_maps:without( - [<<"data">>, <<"body">>], - hb_message:uncommitted(Msg, Opts), - Opts - ) - ), - Opts - ), - ?event({message_query_keys_or_tags, {object, Msg}, {only_keys, OnlyKeys}}), - Res = { - ok, - [ - {ok, - #{ - <<"name">> => Name, - <<"value">> => hb_cache:ensure_loaded(Value, Opts) - } - } - || - {Name, Value} <- OnlyKeys - ] - }, - ?event({message_query_keys_or_tags_result, Res}), - Res; -``` - -### message_query - -Handle a HyperBEAM `message` query. - -```erlang -message_query(Msg, Field, _Args, Opts) - when Field =:= <<"name">> orelse Field =:= <<"value">> -> - ?event({message_query_name_or_value, {object, Msg}, {field, Field}}), - {ok, hb_maps:get(Field, Msg, null, Opts)}; -``` - -### message_query - -Handle a HyperBEAM `message` query. - -```erlang -message_query(Msg = #{ <<"independent_hash">> := _ }, <<"id">>, _Args, Opts) -> - {ok, hb_maps:get(<<"independent_hash">>, Msg, null, Opts)}; -``` - -### message_query - -Handle a HyperBEAM `message` query. - -```erlang -message_query(Msg, <<"id">>, _Args, Opts) -> - ?event({message_query_id, {object, Msg}}), - {ok, hb_message:id(Msg, all, Opts)}; -``` - -### message_query - -Handle a HyperBEAM `message` query. - -```erlang -message_query(_Msg, <<"cursor">>, _Args, _Opts) -> - {ok, <<"">>}; -``` - -### message_query - -Handle a HyperBEAM `message` query. - -```erlang -message_query(_Obj, _Field, _, _) -> - {ok, <<"Not found.">>}. -``` - -### keys_to_template - -Handle a HyperBEAM `message` query. - -```erlang -keys_to_template(Keys) -> - maps:from_list(lists:foldl( - fun(#{<<"name">> := Name, <<"value">> := Value}, Acc) -> - [{Name, Value} | Acc]; - (#{<<"name">> := Name, <<"values">> := [Value]}, Acc) -> - [{Name, Value} | Acc]; - (#{<<"name">> := Name, <<"values">> := Values}, _Acc) -> - throw( - {multivalue_tag_search_not_supported, #{ - <<"name">> => Name, - <<"values">> => Values - }} - ) - end, - [], - Keys - )). -``` - -### test_query - -```erlang -test_query(Node, Query, Opts) -> - test_query(Node, Query, undefined, Opts). -``` - -### test_query - -```erlang -test_query(Node, Query, Variables, Opts) -> - test_query(Node, Query, Variables, undefined, Opts). -``` - -### test_query - -```erlang -test_query(Node, Query, Variables, OperationName, Opts) -> - UnencodedPayload = - maps:filter( - fun(_, undefined) -> false; - (_, _) -> true - end, - #{ - <<"query">> => Query, - <<"variables">> => Variables, - <<"operationName">> => OperationName - } - ), - ?event({test_query_unencoded_payload, UnencodedPayload}), - {ok, Res} = - hb_http:post( - Node, - #{ - <<"path">> => <<"~query@1.0/graphql">>, - <<"content-type">> => <<"application/json">>, - <<"codec-device">> => <<"json@1.0">>, - <<"body">> => hb_json:encode(UnencodedPayload) - }, - Opts - ), - hb_json:decode(hb_maps:get(<<"body">>, Res, <<>>, Opts)). -%%% Tests -``` - -### lookup_test - -```erlang -lookup_test() -> - {ok, Opts, _} = dev_query:test_setup(), - Node = hb_http_server:start_node(Opts), - Query = - <<""" - query GetMessage { - message( - keys: - [ - { - name: "basic", - value: "binary-value" - } - ] - ) { - id - keys { - name - value - } - } - } - """>>, - Res = test_query(Node, Query, Opts), - ?event({test_response, Res}), - ?assertMatch( - #{ <<"data">> := - #{ - <<"message">> := - #{ - <<"id">> := _, - <<"keys">> := - [ - #{ - <<"name">> := <<"basic">>, - <<"value">> := <<"binary-value">> - }, - #{ - <<"name">> := <<"basic-2">>, - <<"value">> := <<"binary-value-2">> - } - ] - } - } - }, - Res - ). -``` - -### lookup_with_vars_test - -```erlang -lookup_with_vars_test() -> - {ok, Opts, _} = dev_query:test_setup(), - Node = hb_http_server:start_node(Opts), - {ok, Res} = - hb_http:post( - Node, - #{ - <<"path">> => <<"~query@1.0/graphql">>, - <<"content-type">> => <<"application/json">>, - <<"codec-device">> => <<"json@1.0">>, - <<"body">> => - hb_json:encode(#{ - <<"query">> => - <<""" - query GetMessage($keys: [KeyInput]) { - message( - keys: $keys - ) { - id - keys { - name - value - } - } - } - """>>, - <<"operationName">> => <<"GetMessage">>, - <<"variables">> => #{ - <<"keys">> => - [ - #{ - <<"name">> => <<"basic">>, - <<"value">> => <<"binary-value">> - } - ] - } - }) - }, - Opts - ), - Object = hb_json:decode(hb_maps:get(<<"body">>, Res, <<>>, Opts)), - ?event({test_response, Object}), - ?assertMatch( - #{ <<"data">> := - #{ - <<"message">> := - #{ - <<"id">> := _, - <<"keys">> := - [ - #{ - <<"name">> := <<"basic">>, - <<"value">> := <<"binary-value">> - }, - #{ - <<"name">> := <<"basic-2">>, - <<"value">> := <<"binary-value-2">> - } - ] - } - } - }, - Object - ). -``` - -### lookup_without_opname_test - -```erlang -lookup_without_opname_test() -> - {ok, Opts, _} = dev_query:test_setup(), - Node = hb_http_server:start_node(Opts), - {ok, Res} = - hb_http:post( - Node, - #{ - <<"path">> => <<"~query@1.0/graphql">>, - <<"content-type">> => <<"application/json">>, - <<"codec-device">> => <<"json@1.0">>, - <<"body">> => - hb_json:encode(#{ - <<"query">> => - <<""" - query($keys: [KeyInput]) { - message( - keys: $keys - ) { - id - keys { - name - value - } - } - } - """>>, - <<"variables">> => #{ - <<"keys">> => - [ - #{ - <<"name">> => <<"basic">>, - <<"value">> => <<"binary-value">> - } - ] - } - }) - }, - Opts - ), - Object = hb_json:decode(hb_maps:get(<<"body">>, Res, <<>>, Opts)), - ?event({test_response, Object}), - ?assertMatch( - #{ <<"data">> := - #{ - <<"message">> := - #{ - <<"id">> := _, - <<"keys">> := - [ - #{ - <<"name">> := <<"basic">>, - <<"value">> := <<"binary-value">> - }, - #{ - <<"name">> := <<"basic-2">>, - <<"value">> := <<"binary-value-2">> - } - ] - } - } - }, - Object -``` - ---- - -*Generated from [dev_query_graphql.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_graphql.erl)* diff --git a/docs/book/src/dev_query_test_vectors.erl.md b/docs/book/src/dev_query_test_vectors.erl.md deleted file mode 100644 index 3fa562c81..000000000 --- a/docs/book/src/dev_query_test_vectors.erl.md +++ /dev/null @@ -1,842 +0,0 @@ -# dev_query_test_vectors - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_test_vectors.erl) - -A suite of test queries and responses for the `~query@1.0` device's -GraphQL implementation. - ---- - -### write_test_message - -A suite of test queries and responses for the `~query@1.0` device's - -```erlang -write_test_message(Opts) -> - hb_cache:write( - Msg = hb_message:commit( - #{ - <<"data-protocol">> => <<"ao">>, - <<"variant">> => <<"ao.N.1">>, - <<"type">> => <<"Message">>, - <<"action">> => <<"Eval">>, - <<"data">> => <<"test data">> - }, - Opts, - #{ - <<"commitment-device">> => <<"ans104@1.0">> - } - ), - Opts - ), - {ok, Msg}. -``` - -### get_test_blocks - -Populate the cache with three test blocks. - -```erlang -get_test_blocks(Node) -> - InitialHeight = 1745749, - FinalHeight = 1745750, - lists:foreach( - fun(Height) -> - {ok, _} = - hb_http:request( - <<"GET">>, - Node, - <<"/~arweave@2.9-pre/block=", (hb_util:bin(Height))/binary>>, - #{} - ) - end, - lists:seq(InitialHeight, FinalHeight) - ). -``` - -### write_test_message_with_recipient - -```erlang -write_test_message_with_recipient(Recipient, Opts) -> - hb_cache:write( - Msg = hb_message:commit( - #{ - <<"data-protocol">> => <<"ao">>, - <<"variant">> => <<"ao.N.1">>, - <<"type">> => <<"Message">>, - <<"action">> => <<"Eval">>, - <<"content-type">> => <<"text/plain">>, - <<"data">> => <<"test data">>, - <<"target">> => Recipient - }, - Opts, - #{ - <<"commitment-device">> => <<"ans104@1.0">> - } - ), - Opts - ), - {ok, Msg}. -``` - -### simple_blocks_query_test - -```erlang -simple_blocks_query_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - get_test_blocks(Node), - Query = - <<""" - query { - blocks( - ids: ["V7yZNKPQLIQfUu8r8-lcEaz4o7idl6LTHn5AHlGIFF8TKfxIe7s_yFxjqan6OW45"] - ) { - edges { - node { - id - previous - height - timestamp - } - } - } - } - """>>, - ?assertMatch( - #{ - <<"data">> := #{ - <<"blocks">> := #{ - <<"edges">> := [ - #{ - <<"node">> := #{ - <<"id">> := _, - <<"previous">> := _, - <<"height">> := 1745749, - <<"timestamp">> := 1756866695 - } - } - ] - } - } - }, - dev_query_graphql:test_query(Node, Query, #{}, Opts) - ). -``` - -### block_by_height_query_test - -```erlang -block_by_height_query_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - get_test_blocks(Node), - Query = - <<""" - query { - blocks( height: {min: 1745749, max: 1745750} ) { - edges { - node { - id - previous - height - timestamp - } - } - } - } - """>>, - ?assertMatch( - #{ - <<"data">> := #{ - <<"blocks">> := #{ - <<"edges">> := [ - #{ - <<"node">> := #{ - <<"id">> := _, - <<"previous">> := _, - <<"height">> := 1745749, - <<"timestamp">> := 1756866695 - } - }, - #{ - <<"node">> := #{ - <<"id">> := _, - <<"previous">> := _, - <<"height">> := 1745750, - <<"timestamp">> := _ - } - } - ] - } - } - }, - dev_query_graphql:test_query(Node, Query, #{}, Opts) - ). -``` - -### simple_ans104_query_test - -```erlang -simple_ans104_query_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - {ok, WrittenMsg} = write_test_message(Opts), - ?assertMatch( - {ok, [_]}, - hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) - ), - Query = - <<""" - query($owners: [String!]) { - transactions( - tags: - [ - {name: "type" values: ["Message"]}, - {name: "variant" values: ["ao.N.1"]} - ], - owners: $owners - ) { - edges { - node { - id, - tags { - name, - value - } - } - } - } - } - """>>, - Res = - dev_query_graphql:test_query( - Node, - Query, - #{ - <<"owners">> => [hb:address()] - }, - Opts - ), - ExpectedID = hb_message:id(WrittenMsg, all, Opts), - ?event({expected_id, ExpectedID}), - ?event({simple_ans104_query_test, Res}), - ?assertMatch( - #{ - <<"data">> := #{ - <<"transactions">> := #{ - <<"edges">> := - [#{ - <<"node">> := - #{ - <<"id">> := ExpectedID, - <<"tags">> := - [#{ <<"name">> := _, <<"value">> := _ }|_] - } - }] - } - } - } when ?IS_ID(ExpectedID), - Res - ). -``` - -### transactions_query_tags_test - -Test transactions query with tags filter - -```erlang -transactions_query_tags_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - {ok, WrittenMsg} = write_test_message(Opts), - ?assertMatch( - {ok, [_]}, - hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) - ), - Query = - <<""" - query { - transactions( - tags: [ - {name: "type", values: ["Message"]}, - {name: "variant", values: ["ao.N.1"]} - ] - ) { - edges { - node { - id - tags { - name - value - } - } - } - } - } - """>>, - Res = - dev_query_graphql:test_query( - Node, - Query, - #{}, - Opts - ), - ExpectedID = hb_message:id(WrittenMsg, all, Opts), - ?event({expected_id, ExpectedID}), - ?event({transactions_query_tags_test, Res}), - ?assertMatch( - #{ - <<"data">> := #{ - <<"transactions">> := #{ - <<"edges">> := - [#{ - <<"node">> := - #{ - <<"id">> := ExpectedID, - <<"tags">> := - [#{ <<"name">> := _, <<"value">> := _ }|_] - } - }] - } - } - } when ?IS_ID(ExpectedID), - Res - ). -``` - -### transactions_query_owners_test - -Test transactions query with owners filter - -```erlang -transactions_query_owners_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - {ok, WrittenMsg} = write_test_message(Opts), - ?assertMatch( - {ok, [_]}, - hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) - ), - Query = - <<""" - query($owners: [String!]) { - transactions( - owners: $owners - ) { - edges { - node { - id - tags { - name - value - } - } - } - } - } - """>>, - Res = - dev_query_graphql:test_query( - Node, - Query, - #{ - <<"owners">> => [hb:address()] - }, - Opts - ), - ExpectedID = hb_message:id(WrittenMsg, all, Opts), - ?event({expected_id, ExpectedID}), - ?event({transactions_query_owners_test, Res}), - ?assertMatch( - #{ - <<"data">> := #{ - <<"transactions">> := #{ - <<"edges">> := - [#{ - <<"node">> := - #{ - <<"id">> := ExpectedID, - <<"tags">> := - [#{ <<"name">> := _, <<"value">> := _ }|_] - } - }] - } - } - } when ?IS_ID(ExpectedID), - Res - ). -``` - -### transactions_query_recipients_test - -Test transactions query with recipients filter - -```erlang -transactions_query_recipients_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - Alice = ar_wallet:new(), - ?event({alice, Alice, {explicit, hb_util:human_id(Alice)}}), - AliceAddress = hb_util:human_id(Alice), - {ok, WrittenMsg} = write_test_message_with_recipient(AliceAddress, Opts), - ?assertMatch( - {ok, [_]}, - hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) - ), - Query = - <<""" - query($recipients: [String!]) { - transactions( - recipients: $recipients - ) { - edges { - node { - id - tags { - name - value - } - } - } - } - } - """>>, - Res = - dev_query_graphql:test_query( - Node, - Query, - #{ - <<"recipients">> => [AliceAddress] - }, - Opts - ), - ExpectedID = hb_message:id(WrittenMsg, all, Opts), - ?event({expected_id, ExpectedID}), - ?event({transactions_query_recipients_test, Res}), - ?assertMatch( - #{ - <<"data">> := #{ - <<"transactions">> := #{ - <<"edges">> := - [#{ - <<"node">> := - #{ - <<"id">> := ExpectedID, - <<"tags">> := - [#{ <<"name">> := _, <<"value">> := _ }|_] - } - }] - } - } - } when ?IS_ID(ExpectedID), - Res - ). -``` - -### transactions_query_ids_test - -Test transactions query with ids filter - -```erlang -transactions_query_ids_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - {ok, WrittenMsg} = write_test_message(Opts), - ExpectedID = hb_message:id(WrittenMsg, all, Opts), - ?assertMatch( - {ok, [_]}, - hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) - ), - Query = - <<""" - query($ids: [ID!]) { - transactions( - ids: $ids - ) { - edges { - node { - id - tags { - name - value - } - } - } - } - } - """>>, - Res = - dev_query_graphql:test_query( - Node, - Query, - #{ - <<"ids">> => [ExpectedID] - }, - Opts - ), - ?event({expected_id, ExpectedID}), - ?event({transactions_query_ids_test, Res}), - ?assertMatch( - #{ - <<"data">> := #{ - <<"transactions">> := #{ - <<"edges">> := - [#{ - <<"node">> := - #{ - <<"id">> := ExpectedID, - <<"tags">> := - [#{ <<"name">> := _, <<"value">> := _ }|_] - } - }] - } - } - } when ?IS_ID(ExpectedID), - Res - ). -``` - -### transactions_query_combined_test - -Test transactions query with combined filters - -```erlang -transactions_query_combined_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - {ok, WrittenMsg} = write_test_message(Opts), - ExpectedID = hb_message:id(WrittenMsg, all, Opts), - ?assertMatch( - {ok, [_]}, - hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) - ), - Query = - <<""" - query($owners: [String!], $ids: [ID!]) { - transactions( - owners: $owners, - ids: $ids, - tags: [ - {name: "type", values: ["Message"]} - ] - ) { - edges { - node { - id - tags { - name - value - } - } - } - } - } - """>>, - Res = - dev_query_graphql:test_query( - Node, - Query, - #{ - <<"owners">> => [hb:address()], - <<"ids">> => [ExpectedID] - }, - Opts - ), - ?event({expected_id, ExpectedID}), - ?event({transactions_query_combined_test, Res}), - ?assertMatch( - #{ - <<"data">> := #{ - <<"transactions">> := #{ - <<"edges">> := - [#{ - <<"node">> := - #{ - <<"id">> := ExpectedID, - <<"tags">> := - [#{ <<"name">> := _, <<"value">> := _ }|_] - } - }] - } - } - } when ?IS_ID(ExpectedID), - Res - ). -``` - -### transaction_query_by_id_test - -Test single transaction query by ID - -```erlang -transaction_query_by_id_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - {ok, WrittenMsg} = write_test_message(Opts), - ExpectedID = hb_message:id(WrittenMsg, all, Opts), - ?assertMatch( - {ok, [_]}, - hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) - ), - Query = - <<""" - query($id: ID!) { - transaction(id: $id) { - id - tags { - name - value - } - } - } - """>>, - Res = - dev_query_graphql:test_query( - Node, - Query, - #{ - <<"id">> => ExpectedID - }, - Opts - ), - ?event({expected_id, ExpectedID}), - ?event({transaction_query_by_id_test, Res}), - ?assertMatch( - #{ - <<"data">> := #{ - <<"transaction">> := #{ - <<"id">> := ExpectedID, - <<"tags">> := - [#{ <<"name">> := _, <<"value">> := _ }|_] - } - } - } when ?IS_ID(ExpectedID), - Res - ). -``` - -### transaction_query_full_test - -Test single transaction query with more fields - -```erlang -transaction_query_full_test() -> - Opts = - #{ - priv_wallet => SenderKey = hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - Alice = ar_wallet:new(), - ?event({alice, Alice, {explicit, hb_util:human_id(Alice)}}), - AliceAddress = hb_util:human_id(Alice), - SenderAddress = hb_util:human_id(SenderKey), - SenderPubKey = hb_util:encode(ar_wallet:to_pubkey(SenderKey)), - {ok, WrittenMsg} = write_test_message_with_recipient(AliceAddress, Opts), - ExpectedID = hb_message:id(WrittenMsg, all, Opts), - ?assertMatch( - {ok, [_]}, - hb_cache:match(#{<<"type">> => <<"Message">>}, Opts) - ), - Query = - <<""" - query($id: ID!) { - transaction(id: $id) { - id - anchor - signature - recipient - owner { - address - key - } - tags { - name - value - } - data { - size - type - } - } - } - """>>, - Res = - dev_query_graphql:test_query( - Node, - Query, - #{ - <<"id">> => ExpectedID - }, - Opts - ), - ?event({expected_id, ExpectedID}), - ?event({transaction_query_full_test, Res}), - ?assertMatch( - #{ - <<"data">> := #{ - <<"transaction">> := #{ - <<"id">> := ExpectedID, - <<"recipient">> := AliceAddress, - <<"anchor">> := <<"">>, - <<"owner">> := #{ - <<"address">> := SenderAddress, - <<"key">> := SenderPubKey - }, - <<"data">> := #{ - <<"size">> := <<"9">>, - <<"type">> := <<"text/plain">> - }, - <<"tags">> := - [#{ <<"name">> := _, <<"value">> := _ }|_] - % Note: other fields may be "Not implemented." for now - } - } - } when ?IS_ID(ExpectedID), - Res - ). -``` - -### transaction_query_not_found_test - -Test single transaction query with non-existent ID - -```erlang -transaction_query_not_found_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Res = - dev_query_graphql:test_query( - hb_http_server:start_node(Opts), - <<""" - query($id: ID!) { - transaction(id: $id) { - id - tags { - name - value - } - } - } - """>>, - #{ - <<"id">> => hb_util:encode(crypto:strong_rand_bytes(32)) - }, - Opts - ), - % Should return null for non-existent transaction - ?assertMatch( - #{ - <<"data">> := #{ - <<"transaction">> := null - } - }, - Res - ). -``` - -### transaction_query_with_anchor_test - -Test parsing, storing, and querying a transaction with an anchor. - -```erlang -transaction_query_with_anchor_test() -> - Opts = - #{ - priv_wallet => hb:wallet(), - store => [hb_test_utils:test_store(hb_store_lmdb)] - }, - Node = hb_http_server:start_node(Opts), - {ok, ID} = - hb_cache:write( - hb_message:convert( - ar_bundles:sign_item( - #tx { - anchor = AnchorID = crypto:strong_rand_bytes(32), - data = <<"test-data">> - }, - hb:wallet() - ), - <<"structured@1.0">>, - <<"ans104@1.0">>, - Opts - ), - Opts - ), - EncodedAnchor = hb_util:encode(AnchorID), - Query = - <<""" - query($id: ID!) { - transaction(id: $id) { - data { - size - type - } - anchor - } - } - """>>, - Res = - dev_query_graphql:test_query( - Node, - Query, - #{ - <<"id">> => ID - }, - Opts - ), - ?event({transaction_query_with_anchor_test, Res}), - ?assertMatch( - #{ - <<"data">> := #{ - <<"transaction">> := #{ - <<"anchor">> := EncodedAnchor - } - } - }, - Res -``` - ---- - -*Generated from [dev_query_test_vectors.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_query_test_vectors.erl)* diff --git a/docs/book/src/dev_relay.erl.md b/docs/book/src/dev_relay.erl.md deleted file mode 100644 index 262e1ee65..000000000 --- a/docs/book/src/dev_relay.erl.md +++ /dev/null @@ -1,301 +0,0 @@ -# dev_relay - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_relay.erl) - -This module implements the relay device, which is responsible for -relaying messages between nodes and other HTTP(S) endpoints. -It can be called in either `call` or `cast` mode. In `call` mode, it -returns a `{ok, Result}` tuple, where `Result` is the response from the -remote peer to the message sent. In `cast` mode, the invocation returns -immediately, and the message is relayed asynchronously. No response is given -and the device returns `{ok, <<"OK">>}`. -Example usage: -
-    curl /~relay@.1.0/call?method=GET?0.path=https://www.arweave.net/
-
- ---- - -## Exported Functions - -- `call/3` -- `cast/3` -- `request/3` - ---- - -### call - -This module implements the relay device, which is responsible for -Execute a `call` request using a node's routes. - -```erlang -call(M1, RawM2, Opts) -> - ?event({relay_call, {m1, M1}, {raw_m2, RawM2}}), - {ok, BaseTarget} = hb_message:find_target(M1, RawM2, Opts), - ?event({relay_call, {message_to_relay, BaseTarget}}), - RelayPath = - hb_ao:get_first( - [ - {M1, <<"path">>}, - {{as, <<"message@1.0">>, BaseTarget}, <<"path">>}, - {RawM2, <<"relay-path">>}, - {M1, <<"relay-path">>} - ], - Opts - ), - RelayDevice = - hb_ao:get_first( - [ - {M1, <<"relay-device">>}, - {{as, <<"message@1.0">>, BaseTarget}, <<"relay-device">>}, - {RawM2, <<"relay-device">>} - ], - Opts - ), - RelayPeer = - hb_ao:get_first( - [ - {M1, <<"peer">>}, - {{as, <<"message@1.0">>, BaseTarget}, <<"peer">>}, - {RawM2, <<"peer">>} - ], - Opts - ), - RelayMethod = - hb_ao:get_first( - [ - {M1, <<"method">>}, - {{as, <<"message@1.0">>, BaseTarget}, <<"method">>}, - {RawM2, <<"relay-method">>}, - {M1, <<"relay-method">>}, - {RawM2, <<"method">>} - ], - Opts - ), - RelayBody = - hb_ao:get_first( - [ - {M1, <<"body">>}, - {{as, <<"message@1.0">>, BaseTarget}, <<"body">>}, - {RawM2, <<"relay-body">>}, - {M1, <<"relay-body">>}, - {RawM2, <<"body">>} - ], - Opts - ), - Commit = - hb_ao:get_first( - [ - {{as, <<"message@1.0">>, BaseTarget}, <<"commit-request">>}, - {RawM2, <<"relay-commit-request">>}, - {M1, <<"relay-commit-request">>}, - {RawM2, <<"commit-request">>}, - {M1, <<"commit-request">>} - ], - false, - Opts - ), - TargetMod1 = - if RelayBody == not_found -> BaseTarget; - true -> BaseTarget#{<<"body">> => RelayBody} - end, - TargetMod2 = - TargetMod1#{ - <<"method">> => RelayMethod, - <<"path">> => RelayPath - }, - TargetMod3 = - case RelayDevice of - not_found -> hb_maps:without([<<"device">>], TargetMod2); - _ -> TargetMod2#{<<"device">> => RelayDevice} - end, - TargetMod4 = - case Commit of - true -> - case hb_opts:get(relay_allow_commit_request, false, Opts) of - true -> - ?event(debug_relay, {recommitting, TargetMod3}, Opts), - Committed = hb_message:commit(TargetMod3, Opts), - ?event(debug_relay, {relay_call, {committed, Committed}}, Opts), - true = hb_message:verify(Committed, all), - Committed; - false -> - throw(relay_commit_request_not_allowed) - end; - false -> TargetMod3 - end, - ?event(debug_relay, {relay_call, {without_http_params, TargetMod3}}), - ?event(debug_relay, {relay_call, {with_http_params, TargetMod4}}), - true = hb_message:verify(TargetMod4), - ?event(debug_relay, {relay_call, {verified, true}}), - Client = - case hb_maps:get(<<"http-client">>, BaseTarget, not_found, Opts) of - not_found -> hb_opts:get(relay_http_client, Opts); - RequestedClient -> RequestedClient - end, - % Let `hb_http:request/2' handle finding the peer and dispatching the - % request, unless the peer is explicitly given. -``` - -### cast - -Execute a request in the same way as `call/3`, but asynchronously. Always -Preprocess a request to check if it should be relayed to a different node. - -```erlang -cast(M1, M2, Opts) -> - spawn(fun() -> call(M1, M2, Opts) end), - {ok, <<"OK">>}. -``` - -### request - -Execute a request in the same way as `call/3`, but asynchronously. Always -Preprocess a request to check if it should be relayed to a different node. - -```erlang -request(_Msg1, Msg2, Opts) -> - {ok, - #{ - <<"body">> => - [ - #{ <<"device">> => <<"relay@1.0">> }, - #{ - <<"path">> => <<"call">>, - <<"target">> => <<"body">>, - <<"body">> => - hb_ao:get(<<"request">>, Msg2, Opts#{ hashpath => ignore }) - } - ] - } - }. -``` - -### call_get_test - -```erlang -call_get_test() -> - application:ensure_all_started([hb]), - {ok, #{<<"body">> := Body}} = - hb_ao:resolve( - #{ - <<"device">> => <<"relay@1.0">>, - <<"method">> => <<"GET">>, - <<"path">> => <<"https://www.google.com/">> - }, - <<"call">>, - #{ protocol => http2 } - ), - ?assertEqual(true, byte_size(Body) > 10_000). -``` - -### relay_nearest_test - -```erlang -relay_nearest_test() -> - Peer1 = hb_http_server:start_node(#{ priv_wallet => W1 = ar_wallet:new() }), - Peer2 = hb_http_server:start_node(#{ priv_wallet => W2 = ar_wallet:new() }), - Address1 = hb_util:human_id(ar_wallet:to_address(W1)), - Address2 = hb_util:human_id(ar_wallet:to_address(W2)), - Peers = [Address1, Address2], - Node = - hb_http_server:start_node(Opts = #{ - store => hb_opts:get(store), - priv_wallet => ar_wallet:new(), - routes => [ - #{ - <<"template">> => <<"/.*">>, - <<"strategy">> => <<"Nearest">>, - <<"nodes">> => [ - #{ - <<"prefix">> => Peer1, - <<"wallet">> => Address1 - }, - #{ - <<"prefix">> => Peer2, - <<"wallet">> => Address2 - } - ] - } - ] - }), - {ok, RelayRes} = - hb_http:get( - Node, - <<"/~relay@1.0/call?relay-path=/~meta@1.0/info">>, - Opts#{ http_only_result => false } - ), - ?event( - {relay_res, - {response, RelayRes}, - {signer, hb_message:signers(RelayRes, Opts)}, - {peers, Peers} - } - ), - HasValidSigner = - lists:any( - fun(Peer) -> - lists:member(Peer, hb_message:signers(RelayRes, Opts)) - end, - Peers - ), - ?assert(HasValidSigner). -``` - -### commit_request_test - -Test that a `relay@1.0/call` correctly commits requests as specified. - -```erlang -commit_request_test() -> - Port = 10000 + rand:uniform(10000), - Wallet = ar_wallet:new(), - Executor = - hb_http_server:start_node( - #{ - port => Port, - force_signed_requests => true - } - ), - Node = - hb_http_server:start_node(#{ - priv_wallet => Wallet, - relay_allow_commit_request => true, - routes => - [ - #{ - <<"template">> => <<"/test-key">>, - <<"strategy">> => <<"Nearest">>, - <<"nodes">> => [ - #{ - <<"wallet">> => hb_util:human_id(Wallet), - <<"prefix">> => Executor - } - ] - } - ], - on => #{ - <<"request">> => - #{ - <<"device">> => <<"router@1.0">>, - <<"path">> => <<"preprocess">>, - <<"commit-request">> => true - } - } - }), - {ok, Res} = - hb_http:get( - Node, - #{ - <<"path">> => <<"test-key">>, - <<"test-key">> => <<"value">> - }, - #{} - ), - ?event({res, Res}), -``` - ---- - -*Generated from [dev_relay.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_relay.erl)* diff --git a/docs/book/src/dev_router.erl.md b/docs/book/src/dev_router.erl.md deleted file mode 100644 index 5249f8434..000000000 --- a/docs/book/src/dev_router.erl.md +++ /dev/null @@ -1,1450 +0,0 @@ -# dev_router - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_router.erl) - -A device that routes outbound messages from the node to their -appropriate network recipients via HTTP. All messages are initially -routed to a single process per node, which then load-balances them -between downstream workers that perform the actual requests. -The routes for the router are defined in the `routes` key of the `Opts`, -as a precidence-ordered list of maps. The first map that matches the -message will be used to determine the route. -Multiple nodes can be specified as viable for a single route, with the -`Choose` key determining how many nodes to choose from the list (defaulting -to 1). The `Strategy` key determines the load distribution strategy, -which can be one of `Random`, `By-Base`, or `Nearest`. The route may also -define additional parallel execution parameters, which are used by the -`hb_http` module to manage control of requests. -The structure of the routes should be as follows: -
-    Node?: The node to route the message to.
-    Nodes?: A list of nodes to route the message to.
-    Strategy?: The load distribution strategy to use.
-    Choose?: The number of nodes to choose from the list.
-    Template?: A message template to match the message against, either as a
-               map or a path regex.
-
- ---- - -## Exported Functions - -- `info/1` -- `info/3` -- `match/3` -- `preprocess/3` -- `register/3` -- `route/2` -- `route/3` -- `routes/3` - ---- - -### info - -A device that routes outbound messages from the node to their -Exported function for getting device info, controls which functions are - -```erlang -info(_) -> - #{ exports => [info, routes, route, match, register, preprocess] }. -``` - -### info - -HTTP info response providing information about this device - -```erlang -info(_Msg1, _Msg2, _Opts) -> - InfoBody = #{ - <<"description">> => <<"Router device for handling outbound message routing">>, - <<"version">> => <<"1.0">>, - <<"api">> => #{ - <<"info">> => #{ - <<"description">> => <<"Get device info">> - }, - <<"routes">> => #{ - <<"description">> => <<"Get or add routes">>, - <<"method">> => <<"GET or POST">> - }, - <<"route">> => #{ - <<"description">> => <<"Find a route for a message">>, - <<"required_params">> => #{ - <<"route-path">> => <<"Path to route">> - } - }, - <<"match">> => #{ - <<"description">> => <<"Match a message against available routes">> - }, - <<"register">> => #{ - <<"description">> => <<"Register a route with a remote router node">>, - <<"node-message">> => #{ - <<"routes">> => - [ - #{ - <<"registration-peer">> => <<"Location of the router peer">>, - <<"prefix">> => <<"Prefix for the route">>, - <<"price">> => <<"Price for the route">>, - <<"template">> => <<"Template to match the route">> - } - ] - } - }, - <<"preprocess">> => #{ - <<"description">> => <<"Preprocess a request to check if it should be relayed">> - } - } - }, - {ok, InfoBody}. -``` - -### register - -Register function that allows telling the current node to register -Device function that returns all known routes. - -```erlang -register(_M1, M2, Opts) -> - %% Extract all required parameters from options - %% These values will be used to construct the registration message - RouterOpts = hb_opts:get(router_opts, #{}, Opts), - RouterRegMsgs = - case hb_maps:get(<<"offered">>, RouterOpts, #{}, Opts) of - RegList when is_list(RegList) -> RegList; - RegMsg when is_map(RegMsg) -> [RegMsg] - end, - lists:foreach( - fun(RegMsg) -> - RouterNode = - hb_ao:get( - <<"registration-peer">>, - RegMsg, - not_found, - Opts - ), - {ok, SigOpts} = - case hb_ao:get(<<"as">>, M2, not_found, Opts) of - not_found -> {ok, Opts}; - AsID -> hb_opts:as(AsID, Opts) - end, - % Post registration request to the router node - % The message includes our route details and attestation - % for verification - {ok, Res} = - hb_http:post( - RouterNode, - <<"/~router@1.0/routes">>, - hb_message:commit( - #{ - <<"subject">> => <<"self">>, - <<"action">> => <<"register">>, - <<"route">> => RegMsg - }, - SigOpts - ), - Opts - ), - ?event({registered, {msg, M2}, {res, Res}}), - {ok, <<"Route registered.">>} - end, - RouterRegMsgs - ), - {ok, <<"Routes registered.">>}. -``` - -### routes - -Register function that allows telling the current node to register -Device function that returns all known routes. - -```erlang -routes(M1, M2, Opts) -> - ?event({routes_msg, M1, M2}), - Routes = load_routes(Opts), - ?event({routes, Routes}), - case hb_ao:get(<<"method">>, M2, Opts) of - <<"POST">> -> - RouterOpts = hb_opts:get(router_opts, #{}, Opts), - ?event(debug_route_reg, {router_opts, RouterOpts}), - case hb_maps:get(<<"registrar">>, RouterOpts, not_found, Opts) of - not_found -> - % There is no registrar; register if and only if the message - % is signed by an authorized operator. -``` - -### route - -Find the appropriate route for the given message. If we are able to - -```erlang -route(Msg, Opts) -> route(undefined, Msg, Opts). -``` - -### route - -Find the appropriate route for the given message. If we are able to - -```erlang -route(_, Msg, Opts) -> - Routes = load_routes(Opts), - R = match_routes(Msg, Routes, Opts), - ?event({find_route, {msg, Msg}, {routes, Routes}, {res, R}}), - case (R =/= no_matches) andalso hb_ao:get(<<"node">>, R, Opts) of - false -> {error, no_matches}; - Node when is_binary(Node) -> {ok, Node}; - Node when is_map(Node) -> apply_route(Msg, Node, Opts); - not_found -> - ModR = apply_routes(Msg, R, Opts), - case hb_ao:get(<<"strategy">>, R, Opts) of - not_found -> {ok, ModR}; - <<"All">> -> {ok, ModR}; - Strategy -> - ChooseN = hb_ao:get(<<"choose">>, R, 1, Opts), - % Get the first element of the path -- the `base' message - % of the request. -``` - -### load_routes - -Load the current routes for the node. Allows either explicit routes from - -```erlang -load_routes(Opts) -> - RouterOpts = hb_opts:get(router_opts, #{}, Opts), - case hb_maps:get(<<"provider">>, RouterOpts, not_found, Opts) of - not_found -> hb_opts:get(routes, [], Opts); - RoutesProvider -> - ProviderMsgs = hb_singleton:from(RoutesProvider, Opts), - ?event({<<"provider">>, ProviderMsgs}), - case hb_ao:resolve_many(ProviderMsgs, Opts) of - {ok, Routes} -> hb_cache:ensure_all_loaded(Routes, Opts); - {error, Error} -> throw({routes, routes_provider_failed, Error}) - end - end. -``` - -### extract_base - -Extract the base message ID from a request message. Produces a single - -```erlang -extract_base(#{ <<"path">> := Path }, Opts) -> - extract_base(Path, Opts); -``` - -### extract_base - -Extract the base message ID from a request message. Produces a single - -```erlang -extract_base(RawPath, Opts) when is_binary(RawPath) -> - BasePath = hb_path:hd(#{ <<"path">> => RawPath }, Opts), - case ?IS_ID(BasePath) of - true -> BasePath; - false -> - case binary:split(BasePath, [<<"\~">>, <<"?">>, <<"&">>], [global]) of - [BaseMsgID|_] when ?IS_ID(BaseMsgID) -> BaseMsgID; - _ -> hb_crypto:sha256(BasePath) - end - end. -``` - -### apply_routes - -Generate a `uri` key for each node in a route. -Apply a node map's rules for transforming the path of the message. - -```erlang -apply_routes(Msg, R, Opts) -> - Nodes = hb_ao:get(<<"nodes">>, R, Opts), - NodesWithRouteApplied = - lists:map( - fun(N) -> - ?event({apply_route, {msg, Msg}, {node, N}}), - case apply_route(Msg, N, Opts) of - {ok, URI} when is_binary(URI) -> N#{ <<"uri">> => URI }; - {ok, RMsg} -> hb_maps:merge(N, RMsg); - {error, _} -> N - end - end, - hb_util:message_to_ordered_list(Nodes, Opts) - ), - ?event({nodes_after_apply, NodesWithRouteApplied}), - R#{ <<"nodes">> => NodesWithRouteApplied }. -``` - -### apply_route - -Generate a `uri` key for each node in a route. -Apply a node map's rules for transforming the path of the message. - -```erlang -apply_route(Msg, Route, Opts) -> - % LoadedRoute = hb_cache:ensure_all_loaded(Route, Opts), - RouteOpts = hb_maps:get(<<"opts">>, Route, #{}), - {ok, #{ - <<"opts">> => RouteOpts, - <<"uri">> => - hb_util:ok( - do_apply_route( - Msg, - hb_maps:without([<<"opts">>], Route, Opts), - Opts - ) - ) - }}. -``` - -### do_apply_route - -```erlang -do_apply_route(#{ <<"route-path">> := Path }, R, Opts) -> - do_apply_route(#{ <<"path">> => Path }, R, Opts); -``` - -### do_apply_route - -```erlang -do_apply_route(#{ <<"path">> := RawPath }, #{ <<"prefix">> := RawPrefix }, Opts) -> - Path = hb_cache:ensure_loaded(RawPath, Opts), - Prefix = hb_cache:ensure_loaded(RawPrefix, Opts), - {ok, <>}; -``` - -### do_apply_route - -```erlang -do_apply_route(#{ <<"path">> := RawPath }, #{ <<"suffix">> := RawSuffix }, Opts) -> - Path = hb_cache:ensure_loaded(RawPath, Opts), - Suffix = hb_cache:ensure_loaded(RawSuffix, Opts), - {ok, <>}; -``` - -### do_apply_route - -```erlang -do_apply_route( - #{ <<"path">> := RawPath }, - #{ <<"match">> := RawMatch, <<"with">> := RawWith }, - Opts) -> - Path = hb_cache:ensure_loaded(RawPath, Opts), - Match = hb_cache:ensure_loaded(RawMatch, Opts), - With = hb_cache:ensure_loaded(RawWith, Opts), - % Apply the regex to the path and replace the first occurrence. -``` - -### match - -Find the first matching template in a list of known routes. Allows the - -```erlang -match(Base, Req, Opts) -> - ?event(debug_preprocess, - {matching_routes, - {base, Base}, - {req, Req} - } - ), - TargetPath = hb_util:find_target_path(Req, Opts), - Match = - match_routes( - Req#{ <<"path">> => TargetPath }, - hb_ao:get(<<"routes">>, {as, <<"message@1.0">>, Base}, [], Opts), - Opts - ), - case Match of - no_matches -> {error, no_matching_route}; - _ -> {ok, Match} - end. -``` - -### match_routes - -```erlang -match_routes(ToMatch, Routes, Opts) -> - match_routes( - hb_cache:ensure_all_loaded(ToMatch, Opts), - hb_cache:ensure_all_loaded(Routes, Opts), - hb_ao:keys(hb_ao:normalize_keys(Routes, Opts)), - Opts - ). -``` - -### match_routes - -```erlang -match_routes(#{ <<"path">> := Explicit = <<"http://", _/binary>> }, _, _, _) -> - % If the route is an explicit HTTP URL, we can match it directly. -``` - -### match_routes - -```erlang -match_routes(#{ <<"path">> := Explicit = <<"https://", _/binary>> }, _, _, _) -> - #{ <<"node">> => Explicit, <<"reference">> => <<"explicit">> }; -``` - -### match_routes - -```erlang -match_routes(_, _, [], _) -> no_matches; -``` - -### match_routes - -```erlang -match_routes(ToMatch, Routes, [XKey|Keys], Opts) -> - XM = hb_ao:get(XKey, Routes, Opts), - Template = - hb_ao:get( - <<"template">>, - XM, - #{}, - Opts#{ hashpath => ignore } - ), - case hb_util:template_matches(ToMatch, Template, Opts) of - true -> XM#{ <<"reference">> => hb_path:to_binary([<<"routes">>, XKey]) }; - false -> match_routes(ToMatch, Routes, Keys, Opts) - end. -``` - -### choose - -Implements the load distribution strategies if given a cluster. - -```erlang -choose(0, _, _, _, _) -> []; -``` - -### choose - -Implements the load distribution strategies if given a cluster. - -```erlang -choose(N, <<"Random">>, _, Nodes, _Opts) -> - Node = lists:nth(rand:uniform(length(Nodes)), Nodes), - [Node | choose(N - 1, <<"Random">>, nop, lists:delete(Node, Nodes), _Opts)]; -``` - -### choose - -Implements the load distribution strategies if given a cluster. - -```erlang -choose(N, <<"By-Weight">>, _, Nodes, Opts) -> - ?event({nodes, Nodes}), - NodesWithWeight = - [ - { Node, hb_util:float(hb_ao:get(<<"weight">>, Node, Opts)) } - || - Node <- Nodes - ], - Node = hb_util:weighted_random(NodesWithWeight), - [ - Node - | - choose(N - 1, <<"By-Weight">>, nop, lists:delete(Node, Nodes), Opts) - ]; -``` - -### choose - -Implements the load distribution strategies if given a cluster. - -```erlang -choose(N, <<"By-Base">>, Hashpath, Nodes, Opts) when is_binary(Hashpath) -> - choose(N, <<"By-Base">>, binary_to_bignum(Hashpath), Nodes, Opts); -``` - -### choose - -Implements the load distribution strategies if given a cluster. - -```erlang -choose(N, <<"By-Base">>, HashInt, Nodes, Opts) -> - Node = lists:nth((HashInt rem length(Nodes)) + 1, Nodes), - [ - Node - | - choose( - N - 1, - <<"By-Base">>, - HashInt, - lists:delete(Node, Nodes), - Opts - ) - ]; -``` - -### choose - -Implements the load distribution strategies if given a cluster. - -```erlang -choose(N, <<"Nearest">>, HashPath, Nodes, Opts) -> - BareHashPath = hb_util:native_id(HashPath), - NodesWithDistances = - lists:map( - fun(Node) -> - Wallet = hb_ao:get(<<"wallet">>, Node, Opts), - DistanceScore = - field_distance( - hb_util:native_id(Wallet), - BareHashPath - ), - {Node, DistanceScore} - end, - Nodes - ), - lists:reverse( - element(1, - lists:foldl( - fun(_, {Current, Remaining}) -> - Res = {Lowest, _} = lowest_distance(Remaining), - {[Lowest|Current], lists:delete(Res, Remaining)} - end, - {[], NodesWithDistances}, - lists:seq(1, N) - ) - ) - ). -``` - -### field_distance - -Calculate the minimum distance between two numbers - -```erlang -field_distance(A, B) when is_binary(A) -> - field_distance(binary_to_bignum(A), B); -``` - -### field_distance - -Calculate the minimum distance between two numbers - -```erlang -field_distance(A, B) when is_binary(B) -> - field_distance(A, binary_to_bignum(B)); -``` - -### field_distance - -Calculate the minimum distance between two numbers - -```erlang -field_distance(A, B) -> - AbsDiff = abs(A - B), - min(AbsDiff, (1 bsl 256) - AbsDiff). -``` - -### lowest_distance - -Find the node with the lowest distance to the given hashpath. - -```erlang -lowest_distance(Nodes) -> lowest_distance(Nodes, {undefined, infinity}). -``` - -### lowest_distance - -Find the node with the lowest distance to the given hashpath. - -```erlang -lowest_distance([], X) -> X; -``` - -### lowest_distance - -Find the node with the lowest distance to the given hashpath. - -```erlang -lowest_distance([{Node, Distance}|Nodes], {CurrentNode, CurrentDistance}) -> - case Distance of - infinity -> lowest_distance(Nodes, {Node, Distance}); - _ when Distance < CurrentDistance -> - lowest_distance(Nodes, {Node, Distance}); - _ -> lowest_distance(Nodes, {CurrentNode, CurrentDistance}) - end. -``` - -### binary_to_bignum - -Cast a human-readable or native-encoded ID to a big integer. - -```erlang -binary_to_bignum(Bin) when ?IS_ID(Bin) -> - << Num:256/unsigned-integer >> = hb_util:native_id(Bin), - Num. -``` - -### preprocess - -Preprocess a request to check if it should be relayed to a different node. - -```erlang -preprocess(Msg1, Msg2, Opts) -> - Req = hb_ao:get(<<"request">>, Msg2, Opts#{ hashpath => ignore }), - ?event(debug_preprocess, {called_preprocess,Req}), - TemplateRoutes = load_routes(Opts), - ?event(debug_preprocess, {template_routes, TemplateRoutes}), - Res = hb_http:message_to_request(Req, Opts), - ?event(debug_preprocess, {match, Res}), - case Res of - {error, _} -> - ?event(debug_preprocess, preprocessor_did_not_match), - case hb_opts:get(router_preprocess_default, <<"local">>, Opts) of - <<"local">> -> - ?event(debug_preprocess, executing_locally), - {ok, #{ - <<"body">> => - hb_ao:get(<<"body">>, Msg2, Opts#{ hashpath => ignore }) - }}; - <<"error">> -> - ?event(debug_preprocess, preprocessor_returning_error), - {ok, #{ - <<"body">> => - [#{ - <<"status">> => 404, - <<"message">> => - <<"No matching template found in the given routes.">> - }] - }} - end; - {ok, _Method, Node, _Path, _MsgWithoutMeta, _ReqOpts} -> - ?event(debug_preprocess, {matched_route, {explicit, Res}}), - CommitRequest = - hb_util:atom( - hb_ao:get_first( - [ - {Msg1, <<"commit-request">>} - ], - false, - Opts - ) - ), - MaybeCommit = - case CommitRequest of - true -> #{ <<"commit-request">> => true }; - false -> #{} - end, - % Construct a request to `relay@1.0/call' which will proxy a request - % to `apply@1.0/body' with the original request body as the argument. -``` - -### test_provider_test - -```erlang -test_provider_test() -> - Node = - hb_http_server:start_node(Opts = - #{ - router_opts => #{ - <<"provider">> => #{ - <<"path">> => <<"/test-key/routes">>, - <<"test-key">> => #{ - <<"routes">> => [ - #{ - <<"template">> => <<"*">>, - <<"node">> => <<"testnode">> - } - ] - } - } - }, - store => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - } - } - ), - ?assertEqual( - {ok, <<"testnode">>}, - hb_http:get(Node, <<"/~router@1.0/routes/1/node">>, Opts) - ). -``` - -### dynamic_provider_test - -```erlang -dynamic_provider_test() -> - {ok, Script} = file:read_file("test/test.lua"), - Node = hb_http_server:start_node(#{ - router_opts => #{ - <<"provider">> => #{ - <<"device">> => <<"lua@5.3a">>, - <<"path">> => <<"provider">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Script - }, - <<"node">> => <<"test-dynamic-node">> - } - }, - priv_wallet => ar_wallet:new() - }), - ?assertEqual( - {ok, <<"test-dynamic-node">>}, - hb_http:get(Node, <<"/~router@1.0/routes/1/node">>, #{}) - ). -``` - -### local_process_provider_test_ - -```erlang -local_process_provider_test_() -> - {timeout, 30, fun local_process_provider/0}. -``` - -### local_process_provider - -```erlang -local_process_provider() -> - {ok, Script} = file:read_file("test/test.lua"), - Node = hb_http_server:start_node(#{ - priv_wallet => ar_wallet:new(), - router_opts => #{ - <<"provider">> => #{ - <<"path">> => <<"/router~node-process@1.0/now/known-routes">> - } - }, - node_processes => #{ - <<"router">> => #{ - <<"device">> => <<"process@1.0">>, - <<"execution-device">> => <<"lua@5.3a">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"body">> => Script - }, - <<"node">> => <<"router-node">>, - <<"function">> => <<"compute_routes">> - } - } - }), - ?assertEqual( - {ok, <<"test1">>}, - hb_http:get(Node, <<"/~router@1.0/routes/1/template">>, #{}) - ), - % Query the route 10 times with the same path. This should yield 2 different - % results, as the route provider should choose 1 node of a set of 2 at random. -``` - -### local_dynamic_router_test_ - -Example of a Lua module being used as the `<<"provider">>` for a - -```erlang -local_dynamic_router_test_() -> - {timeout, 60, fun local_dynamic_router/0}. -``` - -### local_dynamic_router - -```erlang -local_dynamic_router() -> - BenchRoutes = 50, - TestNodes = 5, - {ok, Module} = file:read_file(<<"scripts/dynamic-router.lua">>), - Node = hb_http_server:start_node(Opts = #{ - store => hb_test_utils:test_store(), - priv_wallet => ar_wallet:new(), - router_opts => #{ - <<"registrar">> => #{ - <<"device">> => <<"router@1.0">>, - <<"path">> => <<"/router1~node-process@1.0/schedule">> - }, - <<"provider">> => #{ - <<"path">> => - RouteProvider = - <<"/router1~node-process@1.0/compute/routes~message@1.0">> - } - }, - node_processes => #{ - <<"router1">> => #{ - <<"device">> => <<"process@1.0">>, - <<"execution-device">> => <<"lua@5.3a">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"name">> => <<"dynamic-router">>, - <<"body">> => Module - }, - % Set module-specific factors for the test - <<"pricing-weight">> => 9, - <<"performance-weight">> => 1, - <<"score-preference">> => 4 - } - } - }), - Store = hb_opts:get(store, no_store, Opts), - ?event(debug_dynrouter, {store, Store}), - % Register workers with the dynamic router with varied prices. -``` - -### dynamic_router_pricing_test_ - -Test that verifies dynamic router functionality and template-based pricing. - -```erlang -dynamic_router_pricing_test_() -> - {timeout, 30, fun dynamic_router_pricing/0}. -``` - -### dynamic_router_pricing - -```erlang -dynamic_router_pricing() -> - {ok, Module} = file:read_file(<<"scripts/dynamic-router.lua">>), - {ok, ClientScript} = file:read_file("scripts/hyper-token-p4-client.lua"), - {ok, TokenScript} = file:read_file("scripts/hyper-token.lua"), - {ok, ProcessScript} = file:read_file("scripts/hyper-token-p4.lua"), - ExecWallet = hb:wallet(<<"test/admissible-report-wallet.json">>), - ProxyWallet = ar_wallet:new(), - ExecNodeAddr = hb_util:human_id(ar_wallet:to_address(ExecWallet)), - Processor = - #{ - <<"device">> => <<"p4@1.0">>, - <<"ledger-device">> => <<"lua@5.3a">>, - <<"pricing-device">> => <<"simple-pay@1.0">>, - <<"ledger-path">> => <<"/ledger2~node-process@1.0">>, - <<"module">> => #{ - <<"content-type">> => <<"text/x-lua">>, - <<"name">> => <<"scripts/hyper-token-p4-client.lua">>, - <<"body">> => ClientScript - } - }, - ExecNode = - hb_http_server:start_node( - ExecOpts = #{ - priv_wallet => ExecWallet, - port => 10009, - store => hb_test_utils:test_store(), - node_processes => #{ - <<"ledger2">> => #{ - <<"device">> => <<"process@1.0">>, - <<"execution-device">> => <<"lua@5.3a">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"authority-match">> => 1, - <<"admin">> => ExecNodeAddr, - <<"token">> => - <<"iVplXcMZwiu5mn0EZxY-PxAkz_A9KOU0cmRE0rwej3E">>, - <<"module">> => [ - #{ - <<"content-type">> => <<"text/x-lua">>, - <<"name">> => <<"scripts/hyper-token.lua">>, - <<"body">> => TokenScript - }, - #{ - <<"content-type">> => <<"text/x-lua">>, - <<"name">> => <<"scripts/hyper-token-p4.lua">>, - <<"body">> => ProcessScript - } - ], - <<"authority">> => ExecNodeAddr - } - }, - p4_recipient => ExecNodeAddr, - p4_non_chargable_routes => [ - #{ <<"template">> => <<"/*~node-process@1.0/*">> }, - #{ <<"template">> => <<"/*~router@1.0/*">> } - ], - on => #{ - <<"request">> => Processor, - <<"response">> => Processor - }, - node_process_spawn_codec => <<"ans104@1.0">>, - router_opts => #{ - <<"offered">> => [ - #{ - <<"registration-peer">> => <<"http://localhost:10010">>, - <<"template">> => <<"/c">>, - <<"prefix">> => <<"http://localhost:10009">>, - <<"price">> => 0 - }, - #{ - <<"registration-peer">> => <<"http://localhost:10010">>, - <<"template">> => <<"/b">>, - <<"prefix">> => <<"http://localhost:10009">>, - <<"price">> => 1 - } - ] - } - } - ), - RouterNode = hb_http_server:start_node(#{ - port => 10010, - store => hb_test_utils:test_store(), - priv_wallet => ProxyWallet, - on => - #{ - <<"request">> => #{ - <<"device">> => <<"router@1.0">>, - <<"path">> => <<"preprocess">>, - <<"commit-request">> => true - } - }, - router_opts => #{ - <<"provider">> => #{ - <<"path">> => - <<"/router2~node-process@1.0/compute/routes~message@1.0">> - }, - <<"registrar">> => #{ - <<"path">> => <<"/router2~node-process@1.0">> - }, - <<"registrar-path">> => <<"schedule">> - }, - relay_allow_commit_request => true, - node_processes => #{ - <<"router2">> => #{ - <<"type">> => <<"Process">>, - <<"device">> => <<"process@1.0">>, - <<"execution-device">> => <<"lua@5.3a">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"module">> => <<"dynamic-router">>, - <<"body">> => Module - }, - % Set module-specific factors for the test - <<"pricing-weight">> => 9, - <<"performance-weight">> => 1, - <<"score-preference">> => 4, - <<"is-admissible">> => #{ - <<"path">> => <<"default">>, - <<"default">> => <<"false">> - }, - <<"trusted-peer">> => ExecNodeAddr - } - } - }), - ?event( - debug_load_routes, - {node_message, hb_http:get(RouterNode, <<"/~meta@1.0/info">>, #{})} - ), - % Register workers with the dynamic router with varied prices. -``` - -### dynamic_router_test_ - -Example of a Lua module being used as the `<<"provider">>` for a - -```erlang -dynamic_router_test_() -> - {timeout, 30, fun dynamic_router/0}. -``` - -### dynamic_router - -```erlang -dynamic_router() -> - {ok, Module} = file:read_file(<<"scripts/dynamic-router.lua">>), - ExecWallet = hb:wallet(<<"test/admissible-report-wallet.json">>), - ProxyWallet = ar_wallet:new(), - ExecNode = - hb_http_server:start_node( - ExecOpts = #{ priv_wallet => ExecWallet, store => hb_test_utils:test_store() } - ), - Node = hb_http_server:start_node(ProxyOpts = #{ - snp_trusted => [ - #{ - <<"vcpus">> => 32, - <<"vcpu_type">> => 5, - <<"vmm_type">> => 1, - <<"guest_features">> => 1, - <<"firmware">> => - <<"b8c5d4082d5738db6b0fb0294174992738645df70c44cdecf7fad3a62244b788e7e408c582ee48a74b289f3acec78510">>, - <<"kernel">> => - <<"69d0cd7d13858e4fcef6bc7797aebd258730f215bc5642c4ad8e4b893cc67576">>, - <<"initrd">> => - <<"544045560322dbcd2c454bdc50f35edf0147829ec440e6cb487b4a1503f923c1">>, - <<"append">> => - <<"95a34faced5e487991f9cc2253a41cbd26b708bf00328f98dddbbf6b3ea2892e">> - } - ], - store => hb_test_utils:test_store(), - priv_wallet => ProxyWallet, - on => - #{ - <<"request">> => #{ - <<"device">> => <<"router@1.0">>, - <<"path">> => <<"preprocess">> - } - }, - router_opts => #{ - <<"provider">> => #{ - <<"path">> => <<"/router~node-process@1.0/compute/routes~message@1.0">> - } - }, - node_processes => #{ - <<"router">> => #{ - <<"type">> => <<"Process">>, - <<"device">> => <<"process@1.0">>, - <<"execution-device">> => <<"lua@5.3a">>, - <<"scheduler-device">> => <<"scheduler@1.0">>, - <<"module">> => #{ - <<"content-type">> => <<"application/lua">>, - <<"module">> => <<"dynamic-router">>, - <<"body">> => Module - }, - % Set module-specific factors for the test - <<"pricing-weight">> => 9, - <<"performance-weight">> => 1, - <<"score-preference">> => 4, - <<"is-admissible">> => #{ - <<"device">> => <<"snp@1.0">>, - <<"path">> => <<"verify">> - } - } - } - }), % mergeRight this takes our defined Opts and merges them into the - % node opts configs. -``` - -### dynamic_routing_by_performance_test_ - -Demonstrates routing tables being dynamically created and adjusted - -```erlang -dynamic_routing_by_performance_test_() -> - {timeout, 60, fun dynamic_routing_by_performance/0}. -``` - -### dynamic_routing_by_performance - -```erlang -dynamic_routing_by_performance() -> - % Setup test parameters - TestNodes = 4, - BenchRoutes = 16, - TestPath = <<"/worker">>, - % Start the main node for the test, loading the `dynamic-router' script and - % the http_monitor to generate performance messages. -``` - -### weighted_random_strategy_test - -```erlang -weighted_random_strategy_test() -> - Nodes = - [ - #{ <<"host">> => <<"1">>, <<"weight">> => 1 }, - #{ <<"host">> => <<"2">>, <<"weight">> => 99 } - ], - SimRes = simulate(1000, 1, Nodes, <<"By-Weight">>), - [HitsOnFirstHost, _] = simulation_distribution(SimRes, Nodes), - ProportionOfFirstHost = HitsOnFirstHost / 1000, - ?event(debug_weighted_random, {proportion_of_first_host, ProportionOfFirstHost}), - ?assert(ProportionOfFirstHost < 0.05), - ?assert(ProportionOfFirstHost >= 0.0001). -``` - -### strategy_suite_test_ - -```erlang -strategy_suite_test_() -> - lists:map( - fun(Strategy) -> - {foreach, - fun() -> ok end, - fun(_) -> ok end, - [ - { - binary_to_list(Strategy) ++ ": " ++ Desc, - fun() -> Test(Strategy) end - } - || - {Desc, Test} <- [ - {"unique", fun unique_test/1}, - {"choose 1", fun choose_1_test/1}, - {"choose n", fun choose_n_test/1} - ] - ] - } - end, - [<<"Random">>, <<"By-Base">>, <<"Nearest">>] - ). -``` - -### by_base_determinism_test - -Ensure that `By-Base` always chooses the same node for the same - -```erlang -by_base_determinism_test() -> - FirstN = 5, - Nodes = generate_nodes(5), - HashPaths = generate_hashpaths(100), - Simulation = simulate(HashPaths, FirstN, Nodes, <<"By-Base">>), - Simulation2 = simulate(HashPaths, FirstN, Nodes, <<"By-Base">>), - ?assertEqual(Simulation, Simulation2). -``` - -### unique_test - -```erlang -unique_test(Strategy) -> - TestSize = 1, - FirstN = 5, - Nodes = generate_nodes(5), - Simulation = simulate(TestSize, FirstN, Nodes, Strategy), - unique_nodes(Simulation). -``` - -### choose_1_test - -```erlang -choose_1_test(Strategy) -> - TestSize = 1500, - Nodes = generate_nodes(20), - Simulation = simulate(TestSize, 1, Nodes, Strategy), - within_norms(Simulation, Nodes, TestSize). -``` - -### choose_n_test - -```erlang -choose_n_test(Strategy) -> - TestSize = 1500, - FirstN = 5, - Nodes = generate_nodes(20), - Simulation = simulate(TestSize, FirstN, Nodes, Strategy), - within_norms(Simulation, Nodes, TestSize * 5), - unique_nodes(Simulation). -``` - -### unique_nodes - -```erlang -unique_nodes(Simulation) -> - lists:foreach( - fun(SelectedNodes) -> - lists:foreach( - fun(Node) -> - ?assertEqual(1, hb_util:count(Node, SelectedNodes)) - end, - SelectedNodes - ) - end, - Simulation - ). -``` - -### route_template_message_matches_test - -```erlang -route_template_message_matches_test() -> - Routes = [ - #{ - <<"template">> => #{ <<"other-key">> => <<"other-value">> }, - <<"node">> => <<"incorrect">> - }, - #{ - <<"template">> => #{ <<"special-key">> => <<"special-value">> }, - <<"node">> => <<"correct">> - } - ], - ?assertEqual( - {ok, <<"correct">>}, - route( - #{ <<"path">> => <<"/">>, <<"special-key">> => <<"special-value">> }, - #{ routes => Routes } - ) - ), - ?assertEqual( - {error, no_matches}, - route( - #{ <<"path">> => <<"/">>, <<"special-key">> => <<"special-value2">> }, - #{ routes => Routes } - ) - ), - ?assertEqual( - {ok, <<"fallback">>}, - route( - #{ <<"path">> => <<"/">> }, - #{ routes => Routes ++ [#{ <<"node">> => <<"fallback">> }] } - ) - ). -``` - -### route_regex_matches_test - -```erlang -route_regex_matches_test() -> - Routes = [ - #{ - <<"template">> => <<"/.*/compute">>, - <<"node">> => <<"incorrect">> - }, - #{ - <<"template">> => <<"/.*/schedule">>, - <<"node">> => <<"correct">> - } - ], - ?assertEqual( - {ok, <<"correct">>}, - route(#{ <<"path">> => <<"/abc/schedule">> }, #{ routes => Routes }) - ), - ?assertEqual( - {ok, <<"correct">>}, - route(#{ <<"path">> => <<"/a/b/c/schedule">> }, #{ routes => Routes }) - ), - ?assertEqual( - {error, no_matches}, - route(#{ <<"path">> => <<"/a/b/c/bad-key">> }, #{ routes => Routes }) - ). -``` - -### explicit_route_test - -```erlang -explicit_route_test() -> - Routes = [ - #{ - <<"template">> => <<"*">>, - <<"node">> => <<"unimportant">> - } - ], - ?assertEqual( - {ok, <<"https://google.com">>}, - route( - #{ <<"path">> => <<"https://google.com">> }, - #{ routes => Routes } - ) - ), - ?assertEqual( - {ok, <<"http://google.com">>}, - route( - #{ <<"path">> => <<"http://google.com">> }, - #{ routes => Routes } - ) - ), - % Test that `route-path' can also be used to specify the path, via an AO - % call. -``` - -### device_call_from_singleton_test - -```erlang -device_call_from_singleton_test() -> - % Try with a real-world example, taken from a GET request to the router. -``` - -### get_routes_test - -```erlang -get_routes_test() -> - Node = hb_http_server:start_node( - #{ - force_signed => false, - routes => [ - #{ - <<"template">> => <<"*">>, - <<"node">> => <<"our_node">>, - <<"priority">> => 10 - } - ] - } - ), - Res = hb_http:get(Node, <<"/~router@1.0/routes/1/node">>, #{}), - ?event({get_routes_test, Res}), - {ok, Recvd} = Res, - ?assertMatch(<<"our_node">>, Recvd). -``` - -### add_route_test - -Test that the `preprocess/3` function re-routes a request to remote - -```erlang -add_route_test() -> - Owner = ar_wallet:new(), - Node = hb_http_server:start_node( - #{ - force_signed => false, - routes => [ - #{ - <<"template">> => <<"/some/path">>, - <<"node">> => <<"old">>, - <<"priority">> => 10 - } - ], - operator => hb_util:encode(ar_wallet:to_address(Owner)) - } - ), - Res = - hb_http:post( - Node, - hb_message:commit( - #{ - <<"path">> => <<"/~router@1.0/routes">>, - <<"template">> => <<"/some/new/path">>, - <<"node">> => <<"new">>, - <<"priority">> => 15 - }, - Owner - ), - #{} - ), - ?event({post_res, Res}), - ?assertMatch({ok, <<"Route added.">>}, Res), - GetRes = hb_http:get(Node, <<"/~router@1.0/routes/2/node">>, #{}), - ?event({get_res, GetRes}), - {ok, Recvd} = GetRes, - ?assertMatch(<<"new">>, Recvd). -``` - -### request_hook_reroute_to_nearest_test - -Test that the `preprocess/3` function re-routes a request to remote - -```erlang -request_hook_reroute_to_nearest_test() -> - Peer1 = hb_http_server:start_node(#{ priv_wallet => W1 = ar_wallet:new() }), - Peer2 = hb_http_server:start_node(#{ priv_wallet => W2 = ar_wallet:new() }), - Address1 = hb_util:human_id(ar_wallet:to_address(W1)), - Address2 = hb_util:human_id(ar_wallet:to_address(W2)), - Peers = [Address1, Address2], - Node = - hb_http_server:start_node(Opts = #{ - priv_wallet => ar_wallet:new(), - routes => - [ - #{ - <<"template">> => <<"/.*/.*/.*">>, - <<"strategy">> => <<"Nearest">>, - <<"nodes">> => - lists:map( - fun({Address, Node}) -> - #{ - <<"prefix">> => Node, - <<"wallet">> => Address - } - end, - [ - {Address1, Peer1}, - {Address2, Peer2} - ] - ) - } - ], - on => #{ <<"request">> => #{ <<"device">> => <<"relay@1.0">> } } - }), - Res = - lists:map( - fun(_) -> - hb_util:ok( - hb_http:get( - Node, - <<"/~meta@1.0/info/address">>, - Opts#{ http_only_result => true } - ) - ) - end, - lists:seq(1, 3) - ), - ?event(debug_test, - {res, { - {response, Res}, - {signers, hb_message:signers(Res, Opts)} - }} - ), - HasValidSigner = lists:any( - fun(Peer) -> - lists:member(Peer, Res) - end, - Peers - ), - ?assert(HasValidSigner). -``` - -### generate_nodes - -```erlang -generate_nodes(N) -> - [ - #{ - <<"host">> => - <<"http://localhost:", (integer_to_binary(Port))/binary>>, - <<"wallet">> => hb_util:encode(crypto:strong_rand_bytes(32)) - } - || - Port <- lists:seq(1, N) - ]. -``` - -### generate_hashpaths - -```erlang -generate_hashpaths(Runs) -> - [ - hb_util:encode(crypto:strong_rand_bytes(32)) - || - _ <- lists:seq(1, Runs) - ]. -``` - -### simulate - -```erlang -simulate(Runs, ChooseN, Nodes, Strategy) when is_integer(Runs) -> - simulate( - generate_hashpaths(Runs), - ChooseN, - Nodes, - Strategy - ); -``` - -### simulate - -```erlang -simulate(HashPaths, ChooseN, Nodes, Strategy) -> - [ - choose(ChooseN, Strategy, HashPath, Nodes, #{}) - || - HashPath <- HashPaths - ]. -``` - -### simulation_occurences - -```erlang -simulation_occurences(SimRes, Nodes) -> - lists:foldl( - fun(NearestNodes, Acc) -> - lists:foldl( - fun(Node, Acc2) -> - Acc2#{ Node => hb_maps:get(Node, Acc2, 0, #{}) + 1 } - end, - Acc, - NearestNodes - ) - end, - #{ Node => 0 || Node <- Nodes }, - SimRes - ). -``` - -### simulation_distribution - -```erlang -simulation_distribution(SimRes, Nodes) -> - hb_maps:values(simulation_occurences(SimRes, Nodes), #{}). -``` - -### within_norms - -```erlang -within_norms(SimRes, Nodes, TestSize) -> - Distribution = simulation_distribution(SimRes, Nodes), - % Check that the mean is `TestSize/length(Nodes)' - Mean = hb_util:mean(Distribution), - ?assert(Mean == (TestSize / length(Nodes))), - % Check that the highest count is not more than 3 standard deviations - % away from the mean. -``` - ---- - -*Generated from [dev_router.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_router.erl)* diff --git a/docs/book/src/dev_scheduler.erl.md b/docs/book/src/dev_scheduler.erl.md deleted file mode 100644 index 67bac346c..000000000 --- a/docs/book/src/dev_scheduler.erl.md +++ /dev/null @@ -1,1859 +0,0 @@ -# dev_scheduler - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler.erl) - -A simple scheduler scheme for AO. -This device expects a message of the form: - Process: `#{ id, Scheduler: #{ Authority } }` -
-It exposes the following keys for scheduling:
-    `#{ method: GET, path: <<"/info">> }` ->
-        Returns information about the scheduler.
-    `#{ method: GET, path: <<"/slot">> }` -> `slot(Msg1, Msg2, Opts)`
-        Returns the current slot for a process.
-    `#{ method: GET, path: <<"/schedule">> }` -> `get_schedule(Msg1, Msg2, Opts)`
-        Returns the schedule for a process in a cursor-traversable format.
-   ` #{ method: POST, path: <<"/schedule">> }` -> `post_schedule(Msg1, Msg2, Opts)`
-        Schedules a new message for a process, or starts a new scheduler
-        for the given message.
-
- ---- - -## Exported Functions - -- `checkpoint/1` -- `info/0` -- `location/3` -- `next/3` -- `parse_schedulers/1` -- `router/4` -- `schedule/3` -- `slot/3` -- `start/0` -- `status/3` -- `test_process/0` - ---- - -### start - -A simple scheduler scheme for AO. -Helper to ensure that the environment is started. - -```erlang -start() -> - % We need the rocksdb backend to run for hb_cache module to work - application:ensure_all_started(hb), - <> - = crypto:strong_rand_bytes(12), - rand:seed(exsplus, {I1, I2, I3}), - ok. -``` - -### info - -This device uses a default_handler to route requests to the correct - -```erlang -info() -> - #{ - exports => - [ - location, - status, - next, - schedule, - slot, - init, - checkpoint - ], - excludes => [set, keys], - default => fun router/4 - }. -``` - -### parse_schedulers - -General utility functions that are available to other modules. - -```erlang -parse_schedulers(SchedLoc) when is_list(SchedLoc) -> SchedLoc; -``` - -### parse_schedulers - -General utility functions that are available to other modules. - -```erlang -parse_schedulers(SchedLoc) when is_binary(SchedLoc) -> - binary:split( - binary:replace(SchedLoc, <<"\"">>, <<"">>, [global]), - <<",">>, - [global, trim_all] - ). -``` - -### router - -The default handler for the scheduler device. - -```erlang -router(_, Msg1, Msg2, Opts) -> - ?event({scheduler_router_called, {msg2, Msg2}, {opts, Opts}}), - schedule(Msg1, Msg2, Opts). -``` - -### next - -Load the schedule for a process into the cache, then return the next - -```erlang -next(Msg1, Msg2, Opts) -> - ?event(debug_next, {scheduler_next_called, {msg1, Msg1}, {msg2, Msg2}}), - ?event(next, started_next), - ?event(next_profiling, started_next), - Schedule = message_cached_assignments(Msg1, Opts), - LastProcessed = - hb_util:int( - hb_ao:get( - <<"at-slot">>, - Msg1, - Opts#{ hashpath => ignore } - ) - ), - ?event(next_profiling, got_last_processed), - ?event(debug_next, {in_message_cache, {schedule, Schedule}}), - ?event(next, {last_processed, LastProcessed, {message_cache, length(Schedule)}}), - % Get the assignments from the message cache, local cache, or fetch from - % the SU. Returns an ordered list of assignments. -``` - -### validate_next_slot - -Validate the `next` slot generated by `find_next_assignment`. - -```erlang -validate_next_slot(Msg1, [NextAssignment|Assignments], Lookahead, Last, Opts) -> - % Paranoia: Get the slot of the next assignment, to ensure that it is the - % last processed slot + 1. -``` - -### find_next_assignment - -Get the assignments for a process from the message cache, local cache, - -```erlang -find_next_assignment(_Msg1, _Msg2, Schedule = [_Next|_], _LastSlot, _Opts) -> - {ok, Schedule, undefined}; -``` - -### find_next_assignment - -Get the assignments for a process from the message cache, local cache, - -```erlang -find_next_assignment(Msg1, Msg2, _Schedule, LastSlot, Opts) -> - ProcID = dev_process:process_id(Msg1, Msg2, Opts), - LocalCacheRes = - case hb_util:atom(hb_opts:get(scheduler_ignore_local_cache, false, Opts)) of - true -> not_found; - false -> - check_lookahead_and_local_cache(Msg1, ProcID, LastSlot + 1, Opts) - end, - case LocalCacheRes of - {ok, Worker, Assignment} -> - ?event(next_debug, - {in_cache, - {slot, LastSlot + 1}, - {assignment, Assignment} - } - ), - ?event(next_profiling, read_assignment), - {ok, [Assignment], Worker}; - not_found -> - {ok, RecvdAssignments} = - hb_ao:resolve( - Msg1, - #{ - <<"method">> => <<"GET">>, - <<"path">> => <<"schedule/assignments">>, - <<"from">> => LastSlot - }, - Opts#{ scheduler_follow_redirects => true } - ), - % Convert the assignments to an ordered list of messages, - % after removing all keys before the last processed slot. -``` - -### message_cached_assignments - -Non-device exported helper to get the cached assignments held in a - -```erlang -message_cached_assignments(Msg, Opts) -> - hb_private:get( - <<"scheduler@1.0/assignments">>, - Msg, - [], - Opts - ). -``` - -### spawn_lookahead_worker - -Spawn a new Erlang process to fetch the next assignments from the local - -```erlang -spawn_lookahead_worker(ProcID, Slot, Opts) -> - Caller = self(), - spawn( - fun() -> - ?event(next_lookahead, - {looking_ahead, - {proc_id, ProcID}, - {slot, Slot}, - {caller, Caller} - } - ), - case dev_scheduler_cache:read(ProcID, Slot, Opts) of - {ok, Assignment} -> - LoadedAssignment = hb_cache:ensure_all_loaded(Assignment, Opts), - Caller ! {assignment, ProcID, Slot, LoadedAssignment}; - not_found -> - fail - end - end - ). -``` - -### check_lookahead_and_local_cache - -Check if we have a result from a lookahead worker or from our local - -```erlang -check_lookahead_and_local_cache(Msg1, ProcID, TargetSlot, Opts) when is_map(Msg1) -> - case hb_private:get(<<"scheduler@1.0/lookahead-worker">>, Msg1, Opts) of - not_found -> - check_lookahead_and_local_cache(undefined, ProcID, TargetSlot, Opts); - LookaheadWorker -> - check_lookahead_and_local_cache(LookaheadWorker, ProcID, TargetSlot, Opts) - end; -``` - -### check_lookahead_and_local_cache - -Check if we have a result from a lookahead worker or from our local - -```erlang -check_lookahead_and_local_cache(Worker, ProcID, TargetSlot, Opts) when is_pid(Worker) -> - receive - {assignment, ProcID, OldSlot, _Assignment} when OldSlot < TargetSlot -> - % The lookahead worker has found an assignment for a slot that is - % before the target slot. We remove it from the cache and continue - % searching. -``` - -### check_lookahead_and_local_cache - -```erlang -check_lookahead_and_local_cache(undefined, ProcID, TargetSlot, Opts) -> - % The lookahead worker has not found an assignment for the target - % slot yet, so we check our local cache. -``` - -### status - -Returns information about the entire scheduler. - -```erlang -status(_M1, _M2, _Opts) -> - ?event(getting_scheduler_status), - Wallet = dev_scheduler_registry:get_wallet(), - {ok, - #{ - <<"address">> => hb_util:id(ar_wallet:to_address(Wallet)), - <<"processes">> => - lists:map( - fun hb_util:id/1, - dev_scheduler_registry:get_processes() - ), - <<"cache-control">> => <<"no-store">> - } - }. -``` - -### location - -Router for `record` requests. Expects either a `POST` or `GET` request. - -```erlang -location(Msg1, Msg2, Opts) -> - case hb_ao:get(<<"method">>, Msg2, <<"GET">>, Opts) of - <<"POST">> -> post_location(Msg1, Msg2, Opts); - <<"GET">> -> get_location(Msg1, Msg2, Opts) - end. -``` - -### get_location - -Search for the location of the scheduler in the scheduler-location - -```erlang -get_location(_Msg1, Req, Opts) -> - % Get the address of the scheduler from the request. -``` - -### post_location - -Generate a new scheduler location record and register it. We both send - -```erlang -post_location(Msg1, RawReq, RawOpts) -> - Opts = - case dev_whois:ensure_host(RawOpts) of - {ok, NewOpts} -> NewOpts; - _ -> RawOpts - end, - % Ensure that the request is signed by the operator. -``` - -### schedule - -A router for choosing between getting the existing schedule, or - -```erlang -schedule(Msg1, Msg2, Opts) -> - ?event({resolving_schedule_request, {msg2, Msg2}, {state_msg, Msg1}}), - case hb_util:key_to_atom(hb_ao:get(<<"method">>, Msg2, <<"GET">>, Opts)) of - post -> post_schedule(Msg1, Msg2, Opts); - get -> get_schedule(Msg1, Msg2, Opts) - end. -``` - -### post_schedule - -Schedules a new message on the SU. Searches Msg1 for the appropriate ID, - -```erlang -post_schedule(Msg1, Msg2, Opts) -> - ?event(scheduling_message), - % Find the target message to schedule: - ToSched = find_message_to_schedule(Msg1, Msg2, Opts), - ?event({to_sched, ToSched}), - % Find the ProcessID of the target message: - % - If it is a Process, use the ID of the message. -``` - -### do_post_schedule - -Post schedule the message. `Msg2` by this point has been refined to only - -```erlang -do_post_schedule(ProcID, PID, Msg2, Opts) -> - % Should we verify the message again before scheduling? - Verified = - case hb_opts:get(verify_assignments, true, Opts) of - true -> - ?event(debug_scheduler_verify, - {verifying_message_before_scheduling, Msg2} - ), - Res = length(hb_message:signers(Msg2, Opts)) > 0 - andalso hb_message:verify(Msg2, signers, Opts), - ?event(debug_scheduler_verify, {verified, Res}), - Res; - accept_unsigned -> - ?event( - debug_scheduler_verify, - {accepting_unsigned_message_before_scheduling, Msg2} - ), - hb_message:verify(Msg2, signers, Opts); - false -> true - end, - ?event({verified, Verified}), - % Handle scheduling of the message if the message is valid. -``` - -### find_server - -Locate the correct scheduling server for a given process. - -```erlang -find_server(ProcID, Msg1, Opts) -> - find_server(ProcID, Msg1, undefined, Opts). -``` - -### find_server - -```erlang -find_server(ProcID, Msg1, ToSched, Opts) -> - case get_hint(ProcID, Opts) of - {ok, Hint} -> - ?event({found_hint_in_proc_id, Hint}), - generate_redirect(ProcID, Hint, Opts); - not_found -> - ?event({no_hint_in_proc_id, ProcID}), - case dev_scheduler_registry:find(ProcID, false, Opts) of - PID when is_pid(PID) -> - ?event({found_pid_in_local_registry, PID}), - {local, PID}; - not_found -> - ?event({no_pid_in_local_registry, ProcID}), - Proc = find_process_message(ProcID, Msg1, ToSched, Opts), - ?event({found_process, {process, Proc}, {msg1, Msg1}}), - SchedLoc = - hb_ao:get_first( - [ - {Proc, <<"scheduler">>}, - {Proc, <<"scheduler-location">>} - ] ++ - case ToSched of - undefined -> []; - _ -> [{ToSched, <<"scheduler-location">>}] - end, - not_found, - Opts#{ hashpath => ignore } - ), - ?event({sched_loc, SchedLoc}), - case SchedLoc of - not_found -> - {error, <<"No scheduler information provided.">>}; - _ -> - ?event( - {confirming_if_scheduler_is_local, - {addr, SchedLoc} - } - ), - ParsedLoc = parse_schedulers(SchedLoc), - case is_local_scheduler(ProcID, Proc, ParsedLoc, Opts) of - {ok, PID} -> - % We are the scheduler. Start the server if - % it has not already been started, with the - % given options. -``` - -### find_process_message - -Find the process message for a given process ID and base message. - -```erlang -find_process_message(ProcID, Msg1, ToSched, Opts) -> - % Find the process from the message. -``` - -### is_local_scheduler - -Determine if a scheduler is local. If so, return the PID and options. - -```erlang -is_local_scheduler(_, _, [], _Opts) -> false; -``` - -### is_local_scheduler - -Determine if a scheduler is local. If so, return the PID and options. - -```erlang -is_local_scheduler(ProcID, ProcMsg, [Scheduler | Rest], Opts) -> - case is_local_scheduler(ProcID, ProcMsg, Scheduler, Opts) of - {ok, PID} -> {ok, PID}; - false -> is_local_scheduler(ProcID, ProcMsg, Rest, Opts) - end; -``` - -### is_local_scheduler - -Determine if a scheduler is local. If so, return the PID and options. - -```erlang -is_local_scheduler(ProcID, ProcMsg, Scheduler, Opts) -> - case hb_opts:as(Scheduler, Opts) of - {ok, _} -> - { - ok, - dev_scheduler_registry:find(ProcID, ProcMsg, Opts) - }; - {error, _} -> false - end. -``` - -### get_hint - -If a hint is present in the string, return it. Else, return not_found. - -```erlang -get_hint(Str, Opts) when is_binary(Str) -> - case hb_opts:get(scheduler_follow_hints, true, Opts) of - true -> - case binary:split(Str, <<"?">>, [global]) of - [_, QS] -> - QueryMap = hb_maps:from_list(uri_string:dissect_query(QS)), - case hb_maps:get(<<"hint">>, QueryMap, not_found, Opts) of - not_found -> not_found; - Hint -> {ok, Hint} - end; - _ -> not_found - end; - false -> not_found - end; -``` - -### get_hint - -If a hint is present in the string, return it. Else, return not_found. -Generate a redirect message to a scheduler. - -```erlang -get_hint(_Str, _Opts) -> not_found. -``` - -### generate_redirect - -If a hint is present in the string, return it. Else, return not_found. -Generate a redirect message to a scheduler. - -```erlang -generate_redirect(ProcID, SchedulerLocation, Opts) -> - Variant = hb_ao:get(<<"variant">>, SchedulerLocation, <<"ao.N.1">>, Opts), - ?event({generating_redirect, {proc_id, ProcID}, {variant, Variant}}), - RedirectLocation = - case is_binary(SchedulerLocation) of - true -> SchedulerLocation; - false -> - hb_ao:get_first( - [ - {SchedulerLocation, <<"url">>}, - {SchedulerLocation, <<"location">>} - ], - <<"/">>, - Opts - ) - end, - {redirect, - #{ - <<"status">> => 307, - <<"location">> => RedirectLocation, - <<"body">> => - <<"Redirecting to scheduler: ", RedirectLocation/binary>>, - <<"variant">> => Variant - } - }. -``` - -### without_hint - -Take a process ID or target with a potential hint and return just the - -```erlang -without_hint(Target) when ?IS_ID(Target) -> - hb_util:human_id(Target); -``` - -### without_hint - -Take a process ID or target with a potential hint and return just the - -```erlang -without_hint(Target) -> - case binary:split(Target, [<<"?">>, <<"&">>], [global]) of - [ProcID] when ?IS_ID(ProcID) -> hb_util:human_id(ProcID); - _ -> throw({invalid_operation_target, Target}) - end. -``` - -### find_remote_scheduler - -Use the SchedulerLocation to find the remote path and return a redirect. - -```erlang -find_remote_scheduler(_ProcID, [], _Opts) -> {error, not_found}; -``` - -### find_remote_scheduler - -Use the SchedulerLocation to find the remote path and return a redirect. - -```erlang -find_remote_scheduler(ProcID, [Scheduler | Rest], Opts) -> - case find_remote_scheduler(ProcID, Rest, Opts) of - {error, not_found} -> - find_remote_scheduler(ProcID, Scheduler, Opts); - {ok, Redirect} -> - {ok, Redirect} - end; -``` - -### find_remote_scheduler - -Use the SchedulerLocation to find the remote path and return a redirect. - -```erlang -find_remote_scheduler(ProcID, Scheduler, Opts) -> - % Parse the scheduler location to see if it has a hint. If there is a hint, - % we will use it to construct a redirect message. -``` - -### slot - -Returns information about the current slot for a process. - -```erlang -slot(M1, M2, Opts) -> - ?event({getting_current_slot, {msg, M1}}), - ProcID = find_target_id(M1, M2, Opts), - case find_server(ProcID, M1, Opts) of - {local, PID} -> - ?event({getting_current_slot, {proc_id, ProcID}}), - {Timestamp, Height, Hash} = ar_timestamp:get(), - #{ current := CurrentSlot, wallets := Wallets } = - dev_scheduler_server:info(PID), - {ok, #{ - <<"process">> => ProcID, - <<"current">> => CurrentSlot, - <<"timestamp">> => Timestamp, - <<"block-height">> => Height, - <<"block-hash">> => Hash, - <<"cache-control">> => <<"no-store">>, - <<"addresses">> => lists:map(fun hb_util:human_id/1, Wallets) - }}; - {redirect, Redirect} -> - case hb_opts:get(scheduler_follow_redirects, true, Opts) of - false -> {ok, Redirect}; - true -> remote_slot(ProcID, Redirect, Opts) - end - end. -``` - -### remote_slot - -Get the current slot from a remote scheduler. - -```erlang -remote_slot(ProcID, Redirect, Opts) -> - ?event({getting_remote_slot, {proc_id, ProcID}, {redirect, {explicit, Redirect}}}), - Node = node_from_redirect(Redirect, Opts), - ?event({getting_slot_from_node, {string, Node}}), - remote_slot( - hb_ao:get(<<"variant">>, Redirect, <<"ao.N.1">>, Opts), - ProcID, - Node, - Opts - ). -``` - -### remote_slot - -Get the current slot from a remote scheduler, based on the variant of - -```erlang -remote_slot(<<"ao.N.1">>, ProcID, Node, Opts) -> - % The process is running on a mainnet AO-Core scheduler, so we can just - % use the `/slot' endpoint to get the current slot. -``` - -### remote_slot - -```erlang -remote_slot(<<"ao.TN.1">>, ProcID, Node, Opts) -> - % The process is running on a testnet AO-Core scheduler, so we need to use - % `/processes/procID/latest' to get the current slot. -``` - -### get_schedule - -Generate and return a schedule for a process, optionally between - -```erlang -get_schedule(Msg1, Msg2, Opts) -> - ProcID = hb_util:human_id(find_target_id(Msg1, Msg2, Opts)), - From = - case hb_ao:get(<<"from">>, Msg2, not_found, Opts) of - not_found -> 0; - X when X < 0 -> 0; - FromRes -> hb_util:int(FromRes) - end, - To = - case hb_ao:get(<<"to">>, Msg2, not_found, Opts) of - not_found -> undefined; - ToRes -> hb_util:int(ToRes) - end, - Format = hb_ao:get(<<"accept">>, Msg2, <<"application/http">>, Opts), - ?event( - {parsed_get_schedule, - {process, ProcID}, - {from, From}, - {to, To}, - {format, Format} - } - ), - case find_server(ProcID, Msg1, Opts) of - {local, _PID} -> - generate_local_schedule(Format, ProcID, From, To, Opts); - {redirect, Redirect} -> - ?event({redirect_received, {redirect, Redirect}}), - case hb_opts:get(scheduler_follow_redirects, true, Opts) of - true -> - case get_remote_schedule(ProcID, From, To, Redirect, Opts) of - {ok, Res} -> - case uri_string:percent_decode(Format) of - <<"application/aos-2">> -> - dev_scheduler_formats:assignments_to_aos2( - ProcID, - hb_ao:get( - <<"assignments">>, Res, [], Opts), - hb_util:atom(hb_ao:get( - <<"continues">>, Res, false, Opts)), - Opts - ); - _ -> - {ok, Res} - end; - {error, Res} -> - {error, Res} - end; - false -> - {ok, Redirect} - end - end. -``` - -### get_remote_schedule - -Get a schedule from a remote scheduler, but first read all of the - -```erlang -get_remote_schedule(RawProcID, From, To, Redirect, Opts) -> - % If we are responding to a legacy scheduler request we must add one to the - % `from' slot to account for the fact that the legacy scheduler gives us - % the slots _after_ the stated nonce. -``` - -### do_get_remote_schedule - -Get a schedule from a remote scheduler, unless we already have already - -```erlang -do_get_remote_schedule(ProcID, LocalAssignments, From, To, _, Opts) - when (To =/= undefined) andalso (From >= To) -> - % We already have all of the assignments from the local cache. Return them - % as a bundle. We set the 'more' to `undefined' to indicate that there may - % be more assignments to fetch, but we don't know for sure. -``` - -### do_get_remote_schedule - -```erlang -do_get_remote_schedule(ProcID, LocalAssignments, From, To, Redirect, Opts) -> - % We don't have all of the assignments from the local cache, so we need to - % fetch the rest from the remote scheduler. -``` - -### cache_remote_schedule - -Cache a schedule received from a remote scheduler. - -```erlang -cache_remote_schedule(<<"ao.TN.1">>, ProcID, Schedule, Opts) -> - % If the schedule has a variant of ao.TN.1, we add this to the raw assignment - % before caching it. -``` - -### cache_remote_schedule - -```erlang -cache_remote_schedule(<<"ao.N.1">>, ProcID, Schedule, Opts) -> - Assignments = - hb_ao:get( - <<"assignments">>, - Schedule, - Opts#{ hashpath => ignore } - ), - cache_remote_schedule(common, ProcID, Assignments, Opts); -``` - -### cache_remote_schedule - -```erlang -cache_remote_schedule(_, _ProcID, Schedule, Opts) -> - Cacher = - fun() -> - ?event(debug_sched, {caching_remote_schedule, {schedule, Schedule}}), - lists:foreach( - fun(Assignment) -> - % We do not care about the result of the write because it is only - % an additional cache. -``` - -### node_from_redirect - -Get the node URL from a redirect. - -```erlang -node_from_redirect(Redirect, Opts) -> - uri_string:recompose( - ( - hb_maps:remove( - query, - uri_string:parse( - hb_ao:get(<<"location">>, Redirect, Opts) - ), - Opts - ) - )#{path => <<"/">>} - ). -``` - -### filter_json_assignments - -Filter JSON assignment results from a remote legacy scheduler. - -```erlang -filter_json_assignments(JSONRes, To, From, Opts) -> - Edges = hb_maps:get(<<"edges">>, JSONRes, [], Opts), - Filtered = - lists:filter( - fun(Edge) -> - Node = hb_maps:get(<<"node">>, Edge, undefined, Opts), - Assignment = hb_maps:get(<<"assignment">>, Node, undefined, Opts), - Tags = hb_maps:get(<<"tags">>, Assignment, undefined, Opts), - Nonces = - lists:filtermap( - fun(#{ <<"name">> := <<"Nonce">>, <<"value">> := Nonce }) -> - {true, hb_util:int(Nonce)}; - (_) -> false - end, - Tags - ), - Nonce = hd(Nonces), - ?event({filter, {nonce, Nonce}, {from, From}, {to, To}}), - Nonce >= From andalso Nonce =< To - end, - Edges - ), - ?event({filtered, {length, length(Filtered)}, {edges, Filtered}}), - JSONRes#{ <<"edges">> => Filtered }. -``` - -### post_remote_schedule - -Filter JSON assignment results from a remote legacy scheduler. - -```erlang -post_remote_schedule(RawProcID, Redirect, OnlyCommitted, Opts) -> - RemoteOpts = Opts#{ http_client => httpc }, - ProcID = without_hint(RawProcID), - Location = hb_ao:get(<<"location">>, Redirect, Opts), - Parsed = uri_string:parse(Location), - Node = uri_string:recompose((hb_maps:remove(query, Parsed, Opts))#{path => <<"/">>}), - Variant = hb_ao:get(<<"variant">>, Redirect, <<"ao.N.1">>, Opts), - case Variant of - <<"ao.N.1">> -> - PostMsg = #{ - <<"path">> => << ProcID/binary, "/schedule">>, - <<"body">> => OnlyCommitted, - <<"method">> => <<"POST">> - }, - hb_http:post(Node, PostMsg, RemoteOpts); - <<"ao.TN.1">> -> - % Ensure that the message is signed with ANS-104. -``` - -### post_legacy_schedule - -```erlang -post_legacy_schedule(ProcID, OnlyCommitted, Node, Opts) -> - ?event({encoding_for_legacy_scheduler, {node, {string, Node}}}), - Encoded = - try - Item = - hb_message:convert( - OnlyCommitted, - <<"ans104@1.0">>, - Opts - ), - ?event( - {encoded_for_legacy_scheduler, - {item, Item}, - {exact, {explicit, Item}} - } - ), - {ok, ar_bundles:serialize(Item)} - catch - Class:Reason -> - {error, - #{ - <<"status">> => 422, - <<"body">> => - << - "Failed to encode message for legacy scheduler on ", - Node/binary, - ". Try different encoding?" - >>, - <<"class">> => Class, - <<"reason">> => - iolist_to_binary(io_lib:format("~p", [Reason])) - } - } - end, - case Encoded of - {error, EncodingErr} -> - ?event({could_not_encode_for_legacy_scheduler, {error, EncodingErr}}), - {error, #{ - <<"status">> => 422, - <<"body">> => - <<"Incorrect encoding. Scheduler has variant: ao.TN.1">> - } - }; - {ok, Body} -> - ?event({encoded_for_legacy_scheduler, {encoded, Body}}), - PostMsg = #{ - <<"path">> => P = <<"/?proc-id=", ProcID/binary>>, - <<"body">> => Body, - <<"method">> => <<"POST">> - }, - ?event({posting_to_remote_legacy_scheduler, - {node, {string, Node}}, - {path, {string, P}}, - {process_id, {string, ProcID}} - }), - LegacyOpts = Opts#{ protocol => http2 }, - case hb_http:post(Node, PostMsg, LegacyOpts) of - {ok, PostRes} -> - ?event({remote_schedule_result, PostRes}), - JSONRes = - hb_json:decode( - hb_ao:get(<<"body">>, PostRes, Opts) - ), - % Legacy SUs return only the ID of the assignment, so we need - % to read and return it. -``` - -### find_target_id - -Find the schedule ID from a given request. The precidence order for - -```erlang -find_target_id(Msg1, Msg2, Opts) -> - TempOpts = Opts#{ hashpath => ignore }, - Res = case hb_ao:resolve(Msg2, <<"target">>, TempOpts) of - {ok, Target} -> - % ID found at Msg2/target - Target; - _ -> - case hb_ao:resolve(Msg2, <<"type">>, TempOpts) of - {ok, <<"Process">>} -> - % Msg2 is a Process, so the ID is at Msg2/id - hb_message:id(Msg2, all, Opts); - _ -> - case hb_ao:resolve(Msg1, <<"process">>, TempOpts) of - {ok, Process} -> - % ID found at Msg1/process/id - hb_message:id(Process, all, Opts); - _ -> - % Does the message have a type of Process? - case hb_ao:get(<<"type">>, Msg1, TempOpts) of - <<"Process">> -> - % Yes, so try Msg1/id - hb_message:id(Msg1, all, Opts); - _ -> - % No, so the ID is at Msg2/id - hb_message:id(Msg2, all, Opts) - end - end - end - end, - ?event({found_id, {id, Res}, {msg1, Msg1}, {msg2, Msg2}}), - Res. -``` - -### find_message_to_schedule - -Search the given base and request message pair to find the message to - -```erlang -find_message_to_schedule(_Msg1, Msg2, Opts) -> - Subject = - hb_ao:get( - <<"subject">>, - Msg2, - not_found, - Opts#{ hashpath => ignore } - ), - case Subject of - <<"self">> -> Msg2; - not_found -> - hb_ao:get(<<"body">>, Msg2, Msg2, Opts#{ hashpath => ignore }); - Subject -> - hb_ao:get(Subject, Msg2, Opts#{ hashpath => ignore }) - end. -``` - -### generate_local_schedule - -Generate a `GET /schedule` response for a process. - -```erlang -generate_local_schedule(Format, ProcID, From, To, Opts) -> - ?event( - {servicing_request_for_assignments, - {proc_id, ProcID}, - {from, From}, - {to, To} - } - ), - ?event(generating_schedule_from_local_server), - {Assignments, More} = get_local_assignments(ProcID, From, To, Opts), - ?event({got_assignments, length(Assignments), {more, More}}), - % Determine and apply the formatting function to use for generation - % of the response, based on the `Accept' header. -``` - -### get_local_assignments - -Get the assignments for a process, and whether the request was truncated. - -```erlang -get_local_assignments(ProcID, From, undefined, Opts) -> - case dev_scheduler_cache:latest(ProcID, Opts) of - not_found -> - % No assignments in cache. -``` - -### get_local_assignments - -```erlang -get_local_assignments(ProcID, From, RequestedTo, Opts) -> - ?event({handling_req_to_get_assignments, ProcID, {from, From}, {to, RequestedTo}}), - ComputedTo = - case (RequestedTo - From) > ?MAX_ASSIGNMENT_QUERY_LEN of - true -> From + ?MAX_ASSIGNMENT_QUERY_LEN; - false -> RequestedTo - end, - { - read_local_assignments(ProcID, From, ComputedTo, Opts), - ComputedTo < RequestedTo - }. -``` - -### read_local_assignments - -Get the assignments for a process. - -```erlang -read_local_assignments(_ProcID, From, To, _Opts) when From > To -> - []; -``` - -### read_local_assignments - -Get the assignments for a process. - -```erlang -read_local_assignments(ProcID, CurrentSlot, To, Opts) -> - case dev_scheduler_cache:read(ProcID, CurrentSlot, Opts) of - not_found -> - % No assignment found in cache. -``` - -### checkpoint - -Returns the current state of the scheduler. -Generate a _transformed_ process message, not as they are generated - -```erlang -checkpoint(State) -> {ok, State}. -%%% Tests -``` - -### test_process - -Returns the current state of the scheduler. -Generate a _transformed_ process message, not as they are generated - -```erlang -test_process() -> test_process(#{ priv_wallet => hb:wallet()}). -``` - -### test_process - -Returns the current state of the scheduler. -Generate a _transformed_ process message, not as they are generated - -```erlang -test_process(#{ priv_wallet := Wallet}) -> - test_process(hb_util:human_id(ar_wallet:to_address(Wallet))); -``` - -### test_process - -Returns the current state of the scheduler. -Generate a _transformed_ process message, not as they are generated - -```erlang -test_process(Address) -> - #{ - <<"device">> => <<"scheduler@1.0">>, - <<"device-stack">> => [<<"cron@1.0">>, <<"wasm-64@1.0">>, <<"poda@1.0">>], - <<"image">> => <<"wasm-image-id">>, - <<"type">> => <<"Process">>, - <<"scheduler-location">> => Address, - <<"test-random-seed">> => rand:uniform(1337) - }. -``` - -### status_test - -```erlang -status_test() -> - start(), - ?assertMatch( - #{<<"processes">> := Processes, - <<"address">> := Address} - when is_list(Processes) and is_binary(Address), - hb_ao:get(status, test_process()) - ). -``` - -### register_new_process_test - -```erlang -register_new_process_test() -> - start(), - Opts = #{ priv_wallet => hb:wallet() }, - Msg1 = hb_message:commit(test_process(Opts), Opts), - ?event({test_registering_new_process, {msg, Msg1}}), - ?assertMatch({ok, _}, - hb_ao:resolve( - Msg1, - #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"schedule">>, - <<"body">> => Msg1 - }, - #{} - ) - ), - ?event({status_response, Msg1}), - Procs = hb_ao:get(<<"processes">>, hb_ao:get(status, Msg1)), - ?event({procs, Procs}), - ?assert( - lists:member( - hb_util:id(Msg1, all), - hb_ao:get(<<"processes">>, hb_ao:get(status, Msg1)) - ) - ). -``` - -### register_location_on_boot_test - -Test that a scheduler location is registered on boot. - -```erlang -register_location_on_boot_test() -> - NotifiedPeerWallet = ar_wallet:new(), - RegisteringNodeWallet = ar_wallet:new(), - start(), - NotifiedPeer = - hb_http_server:start_node(#{ - priv_wallet => NotifiedPeerWallet, - store => [ - #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/scheduler-location-notified">> - } - ] - }), - RegisteringNode = hb_http_server:start_node( - #{ - priv_wallet => RegisteringNodeWallet, - on => - #{ - <<"start">> => #{ - <<"device">> => <<"scheduler@1.0">>, - <<"path">> => <<"location">>, - <<"method">> => <<"POST">>, - <<"target">> => <<"self">>, - <<"require-codec">> => <<"ans104@1.0">>, - <<"url">> => <<"https://hyperbeam-test-ignore.com">>, - <<"hook">> => #{ - <<"result">> => <<"ignore">>, - <<"commit-request">> => true - } - } - }, - scheduler_location_notify_peers => [NotifiedPeer] - } - ), - {ok, CurrentLocation} = - hb_http:get( - RegisteringNode, - #{ - <<"method">> => <<"GET">>, - <<"path">> => <<"/~scheduler@1.0/location">>, - <<"address">> => - hb_util:human_id(ar_wallet:to_address(RegisteringNodeWallet)) - }, - #{} - ), - ?event({current_location, CurrentLocation}), - ?assertMatch( - #{ - <<"url">> := <<"https://hyperbeam-test-ignore.com">>, - <<"nonce">> := 0 - }, - hb_ao:get(<<"body">>, CurrentLocation, #{}) - ). -``` - -### schedule_message_and_get_slot_test - -```erlang -schedule_message_and_get_slot_test() -> - start(), - Msg1 = test_process(), - Msg2 = #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit(#{ - <<"type">> => <<"Message">>, - <<"test-key">> => <<"true">> - }, hb:wallet()) - }, - ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg2, #{})), - ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg2, #{})), - Msg3 = #{ - <<"path">> => <<"slot">>, - <<"method">> => <<"GET">>, - <<"process">> => hb_util:id(Msg1) - }, - ?event({pg, dev_scheduler_registry:get_processes()}), - ?event({getting_schedule, {msg, Msg3}}), - ?assertMatch({ok, #{ <<"current">> := CurrentSlot }} - when CurrentSlot > 0, - hb_ao:resolve(Msg1, Msg3, #{})). -``` - -### redirect_to_hint_test - -```erlang -redirect_to_hint_test() -> - start(), - RandAddr = hb_util:human_id(crypto:strong_rand_bytes(32)), - TestLoc = <<"http://test.computer">>, - Msg1 = test_process(<< RandAddr/binary, "?hint=", TestLoc/binary>>), - Msg2 = #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => Msg1 - }, - ?assertMatch( - {ok, #{ <<"location">> := Location }} when is_binary(Location), - hb_ao:resolve( - Msg1, - Msg2, - #{ - scheduler_follow_hints => true, - scheduler_follow_redirects => false - } - ) - ). -``` - -### redirect_from_graphql_test_ - -```erlang -redirect_from_graphql_test_() -> - {timeout, 60, fun redirect_from_graphql/0}. -``` - -### redirect_from_graphql - -```erlang -redirect_from_graphql() -> - start(), - Opts = - #{ store => - [ - #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-mainnet">> }, - #{ <<"store-module">> => hb_store_gateway, <<"store">> => false } - ] - }, - {ok, Msg} = hb_cache:read(<<"0syT13r0s0tgPmIed95bJnuSqaD29HQNN8D3ElLSrsc">>, Opts), - ?assertMatch( - {ok, #{ <<"location">> := Location }} when is_binary(Location), - hb_ao:resolve( - Msg, - #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit(#{ - <<"type">> => <<"Message">>, - <<"target">> => - <<"0syT13r0s0tgPmIed95bJnuSqaD29HQNN8D3ElLSrsc">>, - <<"test-key">> => <<"Test-Val">> - }, - hb:wallet() - ) - }, - #{ - scheduler_follow_redirects => false - } - ) - ). -``` - -### get_local_schedule_test - -```erlang -get_local_schedule_test() -> - start(), - Msg1 = test_process(), - Msg2 = #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit(#{ - <<"type">> => <<"Message">>, - <<"test-key">> => <<"Test-Val">> - }, hb:wallet()) - }, - Msg3 = #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit(#{ - <<"type">> => <<"Message">>, - <<"test-key">> => <<"Test-Val-2">> - }, hb:wallet()) - }, - ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg2, #{})), - ?assertMatch({ok, _}, hb_ao:resolve(Msg1, Msg3, #{})), - ?assertMatch( - {ok, _}, - hb_ao:resolve(Msg1, #{ - <<"method">> => <<"GET">>, - <<"path">> => <<"schedule">>, - <<"target">> => hb_util:id(Msg1) - }, - #{}) - ). -``` - -### http_init - -```erlang -http_init() -> http_init(#{}). -``` - -### http_init - -```erlang -http_init(Opts) -> - start(), - Wallet = ar_wallet:new(), - ExtendedOpts = Opts#{ - priv_wallet => Wallet, - store => [ - #{ - <<"store-module">> => hb_store_lmdb, - <<"name">> => <<"cache-mainnet/lmdb">> - }, - #{ <<"store-module">> => hb_store_gateway, <<"store">> => false } - ] - }, - Node = hb_http_server:start_node(ExtendedOpts), - {Node, ExtendedOpts}. -``` - -### register_scheduler_test - -```erlang -register_scheduler_test() -> - start(), - {Node, Wallet} = http_init(), - Msg1 = hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/location">>, - <<"url">> => <<"https://hyperbeam-test-ignore.com">>, - <<"method">> => <<"POST">>, - <<"nonce">> => 1, - <<"require-codec">> => <<"ans104@1.0">> - }, Wallet), - {ok, Res} = hb_http:post(Node, Msg1, #{}), - ?assertMatch(#{ <<"url">> := Location } when is_binary(Location), Res). -``` - -### http_post_schedule_sign - -```erlang -http_post_schedule_sign(Node, Msg, ProcessMsg, Wallet) -> - Msg1 = hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - Msg#{ - <<"target">> => - hb_util:human_id(hb_message:id(ProcessMsg, all)), - <<"type">> => <<"Message">> - }, - Wallet - ) - }, Wallet), - hb_http:post(Node, Msg1, #{}). -``` - -### http_get_slot - -```erlang -http_get_slot(N, PMsg) -> - ID = hb_message:id(PMsg, all), - Wallet = hb:wallet(), - {ok, _} = hb_http:get(N, hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/slot">>, - <<"method">> => <<"GET">>, - <<"target">> => ID - }, Wallet), #{}). -``` - -### http_get_schedule - -```erlang -http_get_schedule(N, PMsg, From, To) -> - http_get_schedule(N, PMsg, From, To, <<"application/http">>). -``` - -### http_get_schedule - -```erlang -http_get_schedule(N, PMsg, From, To, Format) -> - ID = hb_message:id(PMsg, all), - Wallet = hb:wallet(), - {ok, _} = hb_http:get(N, hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"method">> => <<"GET">>, - <<"target">> => hb_util:human_id(ID), - <<"from">> => From, - <<"to">> => To, - <<"accept">> => Format - }, Wallet), #{}). -``` - -### http_get_schedule_redirect_test_ - -```erlang -http_get_schedule_redirect_test_() -> - {timeout, 60, fun http_get_schedule_redirect/0}. -``` - -### http_get_schedule_redirect - -```erlang -http_get_schedule_redirect() -> - Opts = - #{ - store => - [ - #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-mainnet">> }, - #{ <<"store-module">> => hb_store_gateway, <<"opts">> => #{} } - ], - scheduler_follow_redirects => false - }, - {N, _Wallet} = http_init(Opts), - start(), - ProcID = <<"0syT13r0s0tgPmIed95bJnuSqaD29HQNN8D3ElLSrsc">>, - Res = hb_http:get(N, <<"/", ProcID/binary, "/schedule">>, Opts), - ?assertMatch({ok, #{ <<"location">> := Location }} when is_binary(Location), Res). -``` - -### http_post_schedule_test_ - -```erlang -http_post_schedule_test_() -> - {timeout, 60, fun http_post_schedule/0}. -``` - -### http_post_schedule - -```erlang -http_post_schedule() -> - {N, Opts} = http_init(), - PMsg = hb_message:commit(test_process(Opts), Opts), - Msg1 = hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => PMsg - }, Opts), - {ok, _Res} = hb_http:post(N, Msg1, Opts), - {ok, Res2} = - http_post_schedule_sign( - N, - #{ <<"inner">> => <<"test-message">> }, - PMsg, - Opts - ), - ?assertEqual(<<"test-message">>, hb_ao:get(<<"body/inner">>, Res2, Opts)), - ?assertMatch({ok, #{ <<"current">> := 1 }}, http_get_slot(N, PMsg)). -``` - -### http_get_schedule_test_ - -```erlang -http_get_schedule_test_() -> - {timeout, 20, fun() -> - {Node, Opts} = http_init(), - PMsg = hb_message:commit(test_process(Opts), Opts), - Msg1 = hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => PMsg - }, Opts), - Msg2 = hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - #{ - <<"target">> => - hb_util:human_id( - hb_message:id(PMsg, all, Opts) - ), - <<"body">> => <<"test-message">>, - <<"type">> => <<"Message">> - }, - Opts - ) - }, Opts), - {ok, _} = hb_http:post(Node, Msg1, Opts), - lists:foreach( - fun(_) -> - {ok, Res} = hb_http:post(Node, Msg2, Opts), - ?event(debug_scheduler_test, {res, Res}) - end, - lists:seq(1, 10) - ), - ?assertMatch({ok, #{ <<"current">> := 10 }}, http_get_slot(Node, PMsg)), - ?debug_wait(5000), - {ok, Schedule} = http_get_schedule(Node, PMsg, 0, 10), - Assignments = hb_ao:get(<<"assignments">>, Schedule, Opts), - ?assertEqual( - 12, % +1 for the hashpath - hb_maps:size(Assignments, Opts) - ) - end}. -``` - -### http_get_legacy_schedule_test_ - -```erlang -http_get_legacy_schedule_test_() -> - {timeout, 60, fun() -> - Target = <<"CtOVB2dBtyN_vw3BdzCOrvcQvd9Y1oUGT-zLit8E3qM">>, - {Node, Opts} = http_init(), - {ok, Res} = hb_http:get(Node, <<"/~scheduler@1.0/schedule&target=", Target/binary>>, Opts), - LoadedRes = hb_cache:ensure_all_loaded(Res, Opts), - ?assertMatch(#{ <<"assignments">> := As } when map_size(As) > 0, LoadedRes) - end}. -``` - -### http_get_legacy_slot_test_ - -```erlang -http_get_legacy_slot_test_() -> - {timeout, 60, fun() -> - Target = <<"CtOVB2dBtyN_vw3BdzCOrvcQvd9Y1oUGT-zLit8E3qM">>, - {Node, Opts} = http_init(), - Res = hb_http:get(Node, <<"/~scheduler@1.0/slot&target=", Target/binary>>, Opts), - ?assertMatch({ok, #{ <<"current">> := Slot }} when Slot > 0, Res) - end}. -``` - -### http_get_legacy_schedule_slot_range_test_ - -```erlang -http_get_legacy_schedule_slot_range_test_() -> - {timeout, 60, fun() -> - Target = <<"zrhm4OpfW85UXfLznhdD-kQ7XijXM-s2fAboha0V5GY">>, - {Node, Opts} = http_init(), - {ok, Res} = hb_http:get(Node, <<"/~scheduler@1.0/schedule&target=", Target/binary, - "&from=0&to=10">>, Opts), - LoadedRes = hb_cache:ensure_all_loaded(Res, Opts), - ?event({res, LoadedRes}), - ?assertMatch(#{ <<"assignments">> := As } when map_size(As) == 11, LoadedRes) - end}. -``` - -### http_get_legacy_schedule_as_aos2_test_ - -```erlang -http_get_legacy_schedule_as_aos2_test_() -> - {timeout, 60, fun() -> - Target = <<"CtOVB2dBtyN_vw3BdzCOrvcQvd9Y1oUGT-zLit8E3qM">>, - {Node, Opts} = http_init(), - {ok, Res} = - hb_http:get( - Node, - #{ - <<"path">> => <<"/~scheduler@1.0/schedule?target=", Target/binary>>, - <<"accept">> => <<"application/aos-2">>, - <<"method">> => <<"GET">> - }, - #{} - ), - Decoded = hb_json:decode(hb_ao:get(<<"body">>, Res, Opts)), - ?assertMatch(#{ <<"edges">> := As } when length(As) > 0, Decoded) - end}. -``` - -### http_post_legacy_schedule_test_ - -```erlang -http_post_legacy_schedule_test_() -> - {timeout, 60, fun() -> - {Node, Opts} = http_init(), - Target = <<"zrhm4OpfW85UXfLznhdD-kQ7XijXM-s2fAboha0V5GY">>, - Signed = - hb_message:commit( - #{ - <<"data-protocol">> => <<"ao">>, - <<"variant">> => <<"ao.TN.1">>, - <<"type">> => <<"Message">>, - <<"action">> => <<"ping">>, - <<"target">> => Target, - <<"test-from">> => hb_util:human_id(hb:address()) - }, - Opts, - <<"ans104@1.0">> - ), - WithMethodAndPath = - Signed#{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"method">> => <<"POST">> - }, - ?event(debug_downgrade, {signed, Signed}), - {Status, Res} = hb_http:post(Node, WithMethodAndPath, Opts), - ?event(debug_downgrade, {status, Status}), - ?event({res, Res}), - ?assertMatch( - {ok, #{ <<"slot">> := Slot }} when Slot > 0, - {Status, Res} - ) - end}. -``` - -### http_get_json_schedule_test_ - -```erlang -http_get_json_schedule_test_() -> - {timeout, 60, fun() -> - {Node, Opts} = http_init(), - PMsg = hb_message:commit(test_process(Opts), Opts), - Msg1 = hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => PMsg - }, Opts), - {ok, _} = hb_http:post(Node, Msg1, Opts), - Msg2 = hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - hb_message:commit( - #{ - <<"inner">> => <<"test">>, - <<"target">> => hb_util:human_id(hb_message:id(PMsg, all)) - }, - Opts - ) - }, - Opts - ), - lists:foreach( - fun(_) -> {ok, _} = hb_http:post(Node, Msg2, Opts) end, - lists:seq(1, 10) - ), - ?assertMatch({ok, #{ <<"current">> := 10 }}, http_get_slot(Node, PMsg)), - {ok, Schedule} = http_get_schedule(Node, PMsg, 0, 10, <<"application/aos-2">>), - ?event({schedule, Schedule}), - JSON = hb_ao:get(<<"body">>, Schedule, Opts), - Assignments = hb_json:decode(JSON), - ?assertEqual( - 11, % +1 for the hashpath - length(hb_maps:get(<<"edges">>, Assignments)) - ) - end}. -``` - -### single_resolution - -```erlang -single_resolution(Opts) -> - start(), - BenchTime = 1, - Wallet = hb_opts:get(priv_wallet, hb:wallet(), Opts), - Msg1 = test_process(Opts#{ priv_wallet => Wallet }), - ?event({benchmark_start, ?MODULE}), - MsgToSchedule = hb_message:commit(#{ - <<"type">> => <<"Message">>, - <<"test-key">> => <<"test-val">> - }, Opts), - Iterations = hb_test_utils:benchmark( - fun(_) -> - MsgX = #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => MsgToSchedule - }, - ?assertMatch({ok, _}, hb_ao:resolve(Msg1, MsgX, Opts)) - end, - BenchTime - ), - ?event(benchmark, {scheduled, Iterations}), - Msg3 = #{ - <<"path">> => <<"slot">>, - <<"method">> => <<"GET">>, - <<"process">> => hb_util:human_id(hb_message:id(Msg1, all, Opts)) - }, - ?assertMatch({ok, #{ <<"current">> := CurrentSlot }} - when CurrentSlot == Iterations - 1, - hb_ao:resolve(Msg1, Msg3, Opts)), - ?event(bench, {res, Iterations - 1}), - hb_test_utils:benchmark_print( - <<"Scheduled through AO-Core:">>, - <<"messages">>, - Iterations, - BenchTime - ), - ?assert(Iterations > 3). -``` - -### many_clients - -```erlang -many_clients(Opts) -> - BenchTime = 1, - Processes = hb_opts:get(workers, 25, Opts), - {Node, Opts} = http_init(Opts), - PMsg = hb_message:commit(test_process(Opts), Opts), - Msg1 = hb_message:commit(#{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"method">> => <<"POST">>, - <<"process">> => PMsg, - <<"body">> => hb_message:commit(#{ <<"inner">> => <<"test">> }, Opts) - }, Opts), - {ok, _} = hb_http:post(Node, Msg1, Opts), - Iterations = hb_test_utils:benchmark( - fun(X) -> - {ok, _} = hb_http:post(Node, Msg1, Opts), - ?event(bench, {iteration, X, self()}) - end, - BenchTime, - Processes - ), - ?event({iterations, Iterations}), - hb_format:eunit_print( - "Scheduled ~p messages with ~p workers through HTTP in ~ps (~.2f msg/s)", - [Iterations, Processes, BenchTime, Iterations / BenchTime] - ), - {ok, Res} = http_get_slot(Node, PMsg), - ?event(bench, {res, Res}), - ?assert(Iterations > 10). -``` - -### benchmark_suite_test_ - -```erlang -benchmark_suite_test_() -> - {timeout, 10, fun() -> - rand:seed(exsplus, erlang:timestamp()), - Port = 30000 + rand:uniform(10000), - Bench = [ - {benchmark, "benchmark", fun single_resolution/1}, - {multihttp_benchmark, "multihttp_benchmark", fun many_clients/1} - ], - filelib:ensure_dir( - binary_to_list(Base = <<"cache-TEST/run-">>) - ), - hb_test_utils:suite_with_opts(Bench, benchmark_suite(Port, Base)) - end}. -``` - -### benchmark_suite - -```erlang -benchmark_suite(Port, Base) -> - PortBin = integer_to_binary(Port), - [ - #{ - name => fs, - requires => [hb_store_fs], - opts => #{ - store => #{ <<"store-module">> => hb_store_fs, - <<"name">> => <> - }, - scheduling_mode => local_confirmation, - port => Port - }, - desc => <<"FS store, local conf.">> - }, - #{ - name => fs_aggressive, - requires => [hb_store_fs], - opts => #{ - store => #{ <<"store-module">> => hb_store_fs, - <<"name">> => <> - }, - scheduling_mode => aggressive, - port => Port + 1 - }, - desc => <<"FS store, aggressive conf.">> - }, - #{ - name => rocksdb, - requires => [hb_store_rocksdb], - opts => #{ - store => #{ <<"store-module">> => hb_store_rocksdb, - <<"name">> => <> - }, - scheduling_mode => local_confirmation, - port => Port + 2 - }, - desc => <<"RocksDB store, local conf.">> - }, - #{ - name => rocksdb_aggressive, - requires => [hb_store_rocksdb], - opts => #{ - store => #{ <<"store-module">> => hb_store_rocksdb, - <<"name">> => <> - }, - scheduling_mode => aggressive, - port => Port + 3 - }, - desc => <<"RocksDB store, aggressive conf.">> - }, - #{ - name => rocksdb_extreme_aggressive_h3, - requires => [http3], - opts => #{ - store => #{ <<"store-module">> => hb_store_rocksdb, - <<"name">> => - << - Base/binary, - "run-", - (integer_to_binary(Port+4))/binary - >> - }, - scheduling_mode => aggressive, - protocol => http3, - workers => 100 - }, - desc => <<"100xRocksDB store, aggressive conf, http/3.">> - } - ]. -``` - ---- - -*Generated from [dev_scheduler.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler.erl)* diff --git a/docs/book/src/dev_scheduler_cache.erl.md b/docs/book/src/dev_scheduler_cache.erl.md deleted file mode 100644 index 8ed7256cb..000000000 --- a/docs/book/src/dev_scheduler_cache.erl.md +++ /dev/null @@ -1,645 +0,0 @@ -# dev_scheduler_cache - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_cache.erl) - -A module that provides a cache for scheduler assignments and locations. - ---- - -## Exported Functions - -- `latest/2` -- `list/2` -- `read_location/2` -- `read/3` -- `write_location/2` -- `write_spawn/2` -- `write/2` - ---- - -### opts - -A module that provides a cache for scheduler assignments and locations. -Merge the scheduler store with the main store. Used before writing - -```erlang -opts(Opts) -> - Opts#{ - store => - hb_opts:get( - scheduler_store, - hb_opts:get(store, no_viable_store, Opts), - Opts - ) - }. -``` - -### write - -Write an assignment message into the cache. - -```erlang -write(RawAssignment, RawOpts) -> - Assignment = hb_cache:ensure_all_loaded(RawAssignment, RawOpts), - Opts = opts(RawOpts), - Store = hb_opts:get(store, no_viable_store, Opts), - % Write the message into the main cache - ProcID = hb_ao:get(<<"process">>, Assignment, Opts), - Slot = hb_ao:get(<<"slot">>, Assignment, Opts), - ?event( - {writing_assignment, - {proc_id, ProcID}, - {slot, Slot}, - {assignment, Assignment} - } - ), - case hb_cache:write(Assignment, Opts) of - {ok, RootPath} -> - % Create symlinks from the message on the process and the - % slot on the process to the underlying data. -``` - -### write_spawn - -Write the initial assignment message to the cache. - -```erlang -write_spawn(RawInitMessage, Opts) -> - InitMessage = hb_cache:ensure_all_loaded(RawInitMessage, Opts), - hb_cache:write(InitMessage, opts(Opts)). -``` - -### read - -Get an assignment message from the cache. - -```erlang -read(ProcID, Slot, Opts) when is_integer(Slot) -> - read(ProcID, hb_util:bin(Slot), Opts); -``` - -### read - -Get an assignment message from the cache. - -```erlang -read(ProcID, Slot, RawOpts) -> - Opts = opts(RawOpts), - Store = hb_opts:get(store, no_viable_store, Opts), - ResolvedPath = - P2 = hb_store:resolve( - Store, - P1 = hb_store:path(Store, [ - ?SCHEDULER_CACHE_PREFIX, - "assignments", - hb_util:human_id(ProcID), - Slot - ]) - ), - ?event( - {read_assignment, - {proc_id, ProcID}, - {slot, Slot}, - {store, Store} - } - ), - ?event({resolved_path, {p1, P1}, {p2, P2}, {resolved, ResolvedPath}}), - case hb_cache:read(ResolvedPath, Opts) of - {ok, Assignment} -> - % If the slot key is not present, the format of the assignment is - % AOS2, so we need to convert it to the canonical format. -``` - -### list - -Get the assignments for a process. - -```erlang -list(ProcID, RawOpts) -> - Opts = opts(RawOpts), - hb_cache:list_numbered( - hb_store:path(hb_opts:get(store, no_viable_store, Opts), [ - ?SCHEDULER_CACHE_PREFIX, - "assignments", - hb_util:human_id(ProcID) - ]), - Opts - ). -``` - -### latest - -Get the latest assignment from the cache. - -```erlang -latest(ProcID, RawOpts) -> - Opts = opts(RawOpts), - ?event({getting_assignments_from_cache, {proc_id, ProcID}, {opts, Opts}}), - case dev_scheduler_cache:list(ProcID, Opts) of - [] -> - ?event({no_assignments_in_cache, {proc_id, ProcID}}), - not_found; - Assignments -> - AssignmentNum = lists:max(Assignments), - ?event( - {found_assignment_from_cache, - {proc_id, ProcID}, - {assignment_num, AssignmentNum} - } - ), - {ok, Assignment} = dev_scheduler_cache:read( - ProcID, - AssignmentNum, - Opts - ), - { - AssignmentNum, - hb_ao:get( - <<"hash-chain">>, Assignment, #{ hashpath => ignore }) - } - end. -``` - -### read_location - -Read the latest known scheduler location for an address. - -```erlang -read_location(Address, RawOpts) -> - Opts = opts(RawOpts), - Res = - hb_cache:read( - hb_store:path(hb_opts:get(store, no_viable_store, Opts), [ - ?SCHEDULER_CACHE_PREFIX, - "locations", - hb_util:human_id(Address) - ]), - Opts - ), - Event = - case Res of - {ok, _} -> found_in_store; - not_found -> not_found_in_store; - _ -> local_lookup_unexpected_result - end, - ?event(scheduler_location, {Event, {address, Address}, {res, Res}}), - Res. -``` - -### write_location - -Write the latest known scheduler location for an address. - -```erlang -write_location(LocationMsg, RawOpts) -> - Opts = opts(RawOpts), - Signers = hb_message:signers(LocationMsg, Opts), - ?event( - scheduler_location, - {caching_locally, - {signers, Signers}, - {location_msg, LocationMsg} - } - ), - case hb_cache:write(LocationMsg, Opts) of - {ok, RootPath} -> - lists:foreach( - fun(Signer) -> - hb_store:make_link( - hb_opts:get(store, no_viable_store, Opts), - RootPath, - hb_store:path( - hb_opts:get(store, no_viable_store, Opts), - [ - ?SCHEDULER_CACHE_PREFIX, - "locations", - hb_util:human_id(Signer) - ] - ) - ) - end, - Signers - ), - ok; - false -> - % The message is not valid, so we don't cache it. -``` - -### volatile_schedule_test - -Test that a volatile schedule is lost on restart. - -```erlang -volatile_schedule_test() -> - VolStore = hb_test_utils:test_store(hb_store_fs, <<"volatile-sched">>), - NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"non-volatile-sched">>), - Opts = #{ - store => [NonVolStore], - scheduler_store => [VolStore] - }, - hb_store:start(VolStore), - hb_store:start(NonVolStore), - Assignment = #{ - <<"variant">> => <<"ao.N.1">>, - <<"process">> => ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), - <<"slot">> => 1, - <<"hash-chain">> => <<"test-hash-chain">> - }, - ?assertEqual(ok, write(Assignment, Opts)), - ?assertMatch({1, _}, latest(ProcID, Opts)), - ?assertEqual({ok, Assignment}, read(ProcID, 1, Opts)), - hb_store:stop(VolStore), - hb_store:reset(VolStore), - hb_store:start(VolStore), - ?assertMatch(not_found, latest(ProcID, Opts)), - ?assertMatch(not_found, read(ProcID, 1, Opts)). -``` - -### concurrent_scheduler_write_test - -Test concurrent writes to scheduler store from multiple processes. - -```erlang -concurrent_scheduler_write_test() -> - VolStore = hb_test_utils:test_store(hb_store_fs, <<"concurrent-vol">>), - NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"concurrent-nonvol">>), - Opts = #{ - store => [NonVolStore], - scheduler_store => [VolStore] - }, - hb_store:start(VolStore), - hb_store:start(NonVolStore), - Workers = 50, - ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), - Parent = self(), - lists:foreach(fun(Slot) -> - spawn_link(fun() -> - Assignment = #{ - <<"process">> => ProcID, - <<"slot">> => Slot, - <<"hash-chain">> => - <<"concurrent-test-", (integer_to_binary(Slot))/binary>> - }, - Result = write(Assignment, Opts), - Parent ! {write_result, Slot, Result} - end) - end, lists:seq(1, Workers)), - Results = - lists:map( - fun(Slot) -> - receive - {write_result, Slot, Result} -> - ?event(testing, {write_result, Slot, Result}), - Result - after 5000 -> - timeout - end - end, - lists:seq(1, Workers) - ), - ?event(testing, {concurrent_write_results, Results,Workers}), - ?assertEqual(lists:duplicate(Workers, ok), Results), - AllSlots = list(ProcID, Opts), - ?event(testing, {all_slots, AllSlots}), - ?assertEqual(Workers, length(AllSlots)), - ?assertEqual(lists:seq(1, Workers), lists:sort(AllSlots)). -``` - -### concurrent_read_write_test - -Test concurrent reads during writes to detect race conditions. - -```erlang -concurrent_read_write_test() -> - VolStore = hb_test_utils:test_store(hb_store_fs, <<"race-vol">>), - NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"race-nonvol">>), - Opts = #{ - store => [NonVolStore], - scheduler_store => [VolStore] - }, - hb_store:start(VolStore), - hb_store:start(NonVolStore), - ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), - Parent = self(), - ?event(testing, {concurrent_test_proc_id, ProcID}), - spawn_link(fun() -> - lists:foreach(fun(Slot) -> - Assignment = #{ - <<"variant">> => <<"ao.N.1">>, - <<"process">> => ProcID, - <<"slot">> => Slot, - <<"hash-chain">> => <<"race-test-", (integer_to_binary(Slot))/binary>> - }, - write(Assignment, Opts), - timer:sleep(1) - end, lists:seq(1, 100)), - ?event(testing, {writer_completed}), - Parent ! writer_done - end), - lists:foreach( - fun(ReaderNum) -> - spawn_link(fun() -> - ReadResults = lists:map(fun(Slot) -> - timer:sleep(rand:uniform(5)), - case read(ProcID, Slot, Opts) of - {ok, _} -> success; - not_found -> not_found - end - end, lists:seq(1, 100)), - SuccessCount = length([R || R <- ReadResults, R == success]), - ?event(testing, {reader_done, ReaderNum, SuccessCount}), - Parent ! {reader_done, ReaderNum, ReadResults} - end) - end, - lists:seq(1, 10) - ), - receive - writer_done -> ok - after 15000 -> - ?assert(false) - end, - AllReaderResults = lists:map(fun(ReaderNum) -> - receive - {reader_done, ReaderNum, Results} -> Results - after 5000 -> - ?assert(false), - [] - end - end, lists:seq(1, 10)), - FinalSlots = list(ProcID, Opts), - ?event(testing, {final_verification, {slots_found, length(FinalSlots)}}), - ?assertEqual(100, length(FinalSlots)), - ?assertEqual(lists:seq(1, 100), lists:sort(FinalSlots)), - TotalSuccessfulReads = lists:sum([ - length([R || R <- Results, R == success]) || Results <- AllReaderResults - ]), - ?event(testing, { - concurrent_read_stats, - {total_successful_reads, TotalSuccessfulReads} - }), - ?assert(TotalSuccessfulReads > 0). -``` - -### large_assignment_volume_test - -Test writing a large volume of assignments to stress memory. Helps - -```erlang -large_assignment_volume_test() -> - VolStore = hb_test_utils:test_store(hb_store_fs, <<"volume-vol">>), - NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"volume-nonvol">>), - Opts = #{ - store => [NonVolStore], - scheduler_store => [VolStore] - }, - hb_store:start(VolStore), - hb_store:start(NonVolStore), - VolumeSize = 1000, - ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), - StartTime = erlang:monotonic_time(millisecond), - lists:foreach( - fun(Slot) -> - Assignment = #{ - <<"variant">> => <<"ao.N.1">>, - <<"process">> => ProcID, - <<"slot">> => Slot, - <<"hash-chain">> => crypto:strong_rand_bytes(64) - }, - ?assertEqual(ok, write(Assignment, Opts)) - end, - lists:seq(1, VolumeSize) - ), - EndTime = erlang:monotonic_time(millisecond), - ?event(testing, {large_volume_write_time, EndTime - StartTime}), - AllSlots = list(ProcID, Opts), - ?assertEqual(VolumeSize, length(AllSlots)), - ?assertEqual(lists:seq(1, VolumeSize), lists:sort(AllSlots)), - ReadStartTime = erlang:monotonic_time(millisecond), - lists:foreach(fun(Slot) -> - ?assertMatch({ok, _}, read(ProcID, Slot, Opts)) - end, lists:seq(1, VolumeSize)), - ReadEndTime = erlang:monotonic_time(millisecond), - ?event(testing, {large_volume_read_time, ReadEndTime - ReadStartTime}). -``` - -### rapid_restart_test - -Test rapid store restarts under load. - -```erlang -rapid_restart_test() -> - VolStore = hb_test_utils:test_store(hb_store_fs, <<"restart-vol">>), - NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"restart-nonvol">>), - Opts = #{ - store => [NonVolStore], - scheduler_store => [VolStore] - }, - hb_store:start(VolStore), - hb_store:start(NonVolStore), - ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), - lists:foreach( - fun(Cycle) -> - lists:foreach( - fun(Slot) -> - Assignment = #{ - <<"variant">> => <<"ao.N.1">>, - <<"process">> => ProcID, - <<"slot">> => Slot + (Cycle * 10), - <<"hash-chain">> => - <<"restart-cycle-", (integer_to_binary(Cycle))/binary>> - }, - ?assertEqual(ok, write(Assignment, Opts)) - end, - lists:seq(1, 10) - ), - SlotsBeforeRestart = list(ProcID, Opts), - ?assertMatch([_|_], SlotsBeforeRestart), - ?event(testing, { - restart_cycle, Cycle, {slots_before, length(SlotsBeforeRestart)} - }), - hb_store:stop(VolStore), - timer:sleep(10), - hb_store:reset(VolStore), - hb_store:start(VolStore), - SlotsAfterRestart = list(ProcID, Opts), - ?assertEqual([], SlotsAfterRestart), - ?event({restart_verified, Cycle, {slots_after, length(SlotsAfterRestart)}}) - end, - lists:seq(1, 5) - ). -``` - -### mixed_store_reset_operations_test - -Test scheduler store behavior during reset store operations. - -```erlang -mixed_store_reset_operations_test() -> - VolStore = hb_test_utils:test_store(hb_store_fs, <<"mixed-vol">>), - NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"mixed-nonvol">>), - Opts = #{ - store => [NonVolStore], - scheduler_store => [VolStore] - }, - hb_store:start(VolStore), - hb_store:start(NonVolStore), - ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), - Assignment1 = #{ - <<"variant">> => <<"ao.N.1">>, - <<"process">> => ProcID, - <<"slot">> => 1, - <<"hash-chain">> => <<"mixed-test-1">> - }, - ?assertEqual(ok, write(Assignment1, Opts)), - ?event(testing, {assignment_written, ProcID}), - hb_store:reset(NonVolStore), - ReadAfterNonVolReset = read(ProcID, 1, Opts), - ?assertMatch({ok, _}, ReadAfterNonVolReset), - ?event(testing, {after_nonvol_reset, ReadAfterNonVolReset}), - hb_store:reset(VolStore), - ReadAfterVolReset = read(ProcID, 1, Opts), - ?assertEqual(not_found, ReadAfterVolReset), - ?event(testing, {after_vol_reset, ReadAfterVolReset}). -``` - -### invalid_assignment_stress_test - -Test handling of invalid assignment data. - -```erlang -invalid_assignment_stress_test() -> - VolStore = hb_test_utils:test_store(hb_store_fs, <<"invalid-vol">>), - NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"invalid-nonvol">>), - Opts = #{ - store => [NonVolStore], - scheduler_store => [VolStore] - }, - hb_store:start(VolStore), - hb_store:start(NonVolStore), - InvalidAssignments = [ - #{}, - #{<<"process">> => <<"invalid">>}, - #{<<"slot">> => 1}, - #{<<"process">> => <<>>, <<"slot">> => 1}, - #{<<"process">> => <<"valid">>, <<"slot">> => -1}, - #{<<"process">> => <<"valid">>, <<"slot">> => <<"not-integer">>} - ], - ?event(testing, {testing_invalid_assignments, length(InvalidAssignments)}), - Results = lists:map(fun(Assignment) -> - Result = try - write(Assignment, Opts) - catch - _:_ -> error - end, - ?assertNotEqual(ok, Result), - Result - end, InvalidAssignments), - ErrorCount = length([R || R <- Results, R == error]), - ?event( - {invalid_assignment_results, - {errors, ErrorCount}, - {total, length(InvalidAssignments)} - } - ), - ?assertEqual(6, ErrorCount). -``` - -### scheduler_location_stress_test - -Test scheduler location operations under stress. - -```erlang -scheduler_location_stress_test() -> - VolStore = hb_test_utils:test_store(hb_store_fs, <<"location-vol">>), - NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"location-nonvol">>), - Wallet = ar_wallet:new(), - Opts = #{ - store => [NonVolStore], - scheduler_store => [VolStore], - priv_wallet => Wallet - }, - hb_store:start(VolStore), - hb_store:start(NonVolStore), - LocationCount = 10, - ?event(testing, {location_stress_test_starting, LocationCount}), - Results = - lists:map( - fun(N) -> - LocationMsg = #{ - <<"scheduler">> => - hb_util:human_id(ar_wallet:to_address(Wallet)), - <<"location">> => - << - "http://scheduler", - (integer_to_binary(N))/binary, - ".com" - >>, - <<"timestamp">> => erlang:system_time(millisecond), - <<"ttl">> => 3600000 - }, - Result = - try - write_location(LocationMsg, Opts) - catch - Res -> - ?event(testing, {location_write_error, {error, Res}}), - ok - end, - ?assert(Result == ok orelse element(1, Result) == error), - Result - end, - lists:seq(1, LocationCount) - ), - SuccessCount = length([R || R <- Results, R == ok]), - ?event( - {location_stress_results, - {successes, SuccessCount}, - {total, LocationCount} - } - ). -``` - -### volatile_store_corruption_test - -Test system behavior with corrupted data in volatile store. - -```erlang -volatile_store_corruption_test() -> - VolStore = hb_test_utils:test_store(hb_store_fs, <<"corruption-vol">>), - NonVolStore = hb_test_utils:test_store(hb_store_fs, <<"corruption-nonvol">>), - Opts = #{ - store => [NonVolStore], - scheduler_store => [VolStore] - }, - hb_store:start(VolStore), - hb_store:start(NonVolStore), - ProcID = hb_util:human_id(crypto:strong_rand_bytes(32)), - Assignment = #{ - <<"variant">> => <<"ao.N.1">>, - <<"process">> => ProcID, - <<"slot">> => 1, - <<"hash-chain">> => <<"corruption-test">> - }, - ?assertEqual(ok, write(Assignment, Opts)), - ReadBeforeCorruption = read(ProcID, 1, Opts), - ?assertMatch({ok, _}, ReadBeforeCorruption), - ?event(testing, {before_corruption, ReadBeforeCorruption}), - hb_store:reset(VolStore), - ?event(testing, {volatile_store_reset}), - ReadAfterCorruption = read(ProcID, 1, Opts), - SlotsAfterCorruption = list(ProcID, Opts), - LatestAfterCorruption = latest(ProcID, Opts), - ?assertEqual(not_found, ReadAfterCorruption), - ?assertEqual([], SlotsAfterCorruption), - ?assertEqual(not_found, LatestAfterCorruption), - ?event(testing, - { corruption_recovery_verified, - { read, ReadAfterCorruption }, - { list, length(SlotsAfterCorruption) }, - { latest, LatestAfterCorruption } -``` - ---- - -*Generated from [dev_scheduler_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_cache.erl)* diff --git a/docs/book/src/dev_scheduler_formats.erl.md b/docs/book/src/dev_scheduler_formats.erl.md deleted file mode 100644 index 33793cd1d..000000000 --- a/docs/book/src/dev_scheduler_formats.erl.md +++ /dev/null @@ -1,309 +0,0 @@ -# dev_scheduler_formats - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_formats.erl) - -This module is used by dev_scheduler in order to produce outputs that -are compatible with various forms of AO clients. It features two main formats: -- `application/json` -- `application/http` -The `application/json` format is a legacy format that is not recommended for -new integrations of the AO protocol. - ---- - -## Exported Functions - -- `aos2_normalize_types/1` -- `aos2_to_assignment/2` -- `aos2_to_assignments/3` -- `assignments_to_aos2/4` -- `assignments_to_bundle/4` - ---- - -### assignments_to_bundle - -This module is used by dev_scheduler in order to produce outputs that -Generate a `GET /schedule` response for a process as HTTP-sig bundles. - -```erlang -assignments_to_bundle(ProcID, Assignments, More, Opts) -> - TimeInfo = ar_timestamp:get(), - assignments_to_bundle(ProcID, Assignments, More, TimeInfo, Opts). -``` - -### assignments_to_bundle - -```erlang -assignments_to_bundle(ProcID, Assignments, More, TimeInfo, RawOpts) -> - Opts = format_opts(RawOpts), - {Timestamp, Height, Hash} = TimeInfo, - {ok, #{ - <<"type">> => <<"schedule">>, - <<"process">> => hb_util:human_id(ProcID), - <<"continues">> => hb_util:atom(More), - <<"timestamp">> => hb_util:int(Timestamp), - <<"block-height">> => hb_util:int(Height), - <<"block-hash">> => hb_util:human_id(Hash), - <<"assignments">> => - hb_maps:from_list( - lists:map( - fun(Assignment) -> - { - hb_ao:get( - <<"slot">>, - Assignment, - Opts#{ hashpath => ignore } - ), - Assignment - } - end, - Assignments - ) - ) - }}. -``` - -### assignments_to_aos2 - -```erlang -assignments_to_aos2(ProcID, Assignments, More, RawOpts) when is_map(Assignments) -> - assignments_to_aos2( - ProcID, - hb_util:message_to_ordered_list(Assignments), - More, - format_opts(RawOpts) - ); -``` - -### assignments_to_aos2 - -```erlang -assignments_to_aos2(ProcID, Assignments, More, RawOpts) -> - Opts = format_opts(RawOpts), - {Timestamp, Height, Hash} = ar_timestamp:get(), - BodyStruct = - #{ - <<"page_info">> => - #{ - <<"process">> => hb_util:human_id(ProcID), - <<"has_next_page">> => More, - <<"timestamp">> => list_to_binary(integer_to_list(Timestamp)), - <<"block-height">> => list_to_binary(integer_to_list(Height)), - <<"block-hash">> => hb_util:human_id(Hash) - }, - <<"edges">> => - lists:map( - fun(Assignment) -> - #{ - <<"cursor">> => cursor(Assignment, Opts), - <<"node">> => assignment_to_aos2(Assignment, Opts) - } - end, - Assignments - ) - }, - Encoded = hb_json:encode(BodyStruct), - ?event({body_struct, BodyStruct}), - ?event({encoded, {explicit, Encoded}}), - {ok, - #{ - <<"content-type">> => <<"application/json">>, - <<"body">> => Encoded - } - }. -``` - -### cursor - -Generate a cursor for an assignment. This should be the slot number, at -Convert an assignment to an AOS2-compatible JSON structure. - -```erlang -cursor(Assignment, RawOpts) -> - Opts = format_opts(RawOpts), - hb_ao:get(<<"slot">>, Assignment, Opts). -``` - -### assignment_to_aos2 - -Generate a cursor for an assignment. This should be the slot number, at -Convert an assignment to an AOS2-compatible JSON structure. - -```erlang -assignment_to_aos2(Assignment, RawOpts) -> - Opts = format_opts(RawOpts), - Message = hb_ao:get(<<"body">>, Assignment, Opts), - AssignmentWithoutBody = hb_maps:without([<<"body">>], Assignment, Opts), - #{ - <<"message">> => - dev_json_iface:message_to_json_struct(Message, Opts), - <<"assignment">> => - dev_json_iface:message_to_json_struct(AssignmentWithoutBody, Opts) - }. -``` - -### aos2_to_assignments - -Convert an AOS2-style JSON structure to a normalized HyperBEAM - -```erlang -aos2_to_assignments(ProcID, Body, RawOpts) -> - Opts = format_opts(RawOpts), - Assignments = hb_maps:get(<<"edges">>, Body, Opts, Opts), - ?event({raw_assignments, Assignments}), - ParsedAssignments = - lists:map( - fun(A) -> aos2_to_assignment(A, Opts) end, - Assignments - ), - ?event({parsed_assignments, ParsedAssignments}), - TimeInfo = - case ParsedAssignments of - [] -> {0, 0, hb_util:encode(<<0:256>>)}; - _ -> - Last = lists:last(ParsedAssignments), - { - hb_ao:get(<<"timestamp">>, Last, Opts), - hb_ao:get(<<"block-height">>, Last, Opts), - hb_ao:get(<<"block-hash">>, Last, Opts) - } - end, - assignments_to_bundle(ProcID, ParsedAssignments, false, TimeInfo, Opts). -``` - -### aos2_to_assignment - -Create and normalize an assignment from an AOS2-style JSON structure. -The `hb_gateway_client` module expects all JSON structures to at least - -```erlang -aos2_to_assignment(A, RawOpts) -> - Opts = format_opts(RawOpts), - % Unwrap the node if it is provided - Node = hb_maps:get(<<"node">>, A, A, Opts), - ?event({node, Node}), - {ok, Assignment} = - hb_gateway_client:result_to_message( - aos2_normalize_data(hb_maps:get(<<"assignment">>, Node, undefined, Opts)), - Opts - ), - NormalizedAssignment = aos2_normalize_types(Assignment), - {ok, Message} = - case hb_maps:get(<<"message">>, Node, undefined, Opts) of - null -> - MessageID = hb_maps:get(<<"message">>, Assignment, undefined, Opts), - ?event(error, {scheduler_did_not_provide_message, MessageID}), - case hb_cache:read(MessageID, Opts) of - {ok, Msg} -> {ok, Msg}; - {error, _} -> - throw({error, - {message_not_given_by_scheduler_or_cache, - MessageID} - } - ) - end; - Body -> - hb_gateway_client:result_to_message( - aos2_normalize_data(Body), - Opts - ) - end, - NormalizedMessage = aos2_normalize_types(Message), - ?event({message, Message}), - NormalizedAssignment#{ <<"body">> => NormalizedMessage }. -``` - -### aos2_normalize_data - -Create and normalize an assignment from an AOS2-style JSON structure. -The `hb_gateway_client` module expects all JSON structures to at least - -```erlang -aos2_normalize_data(JSONStruct) -> - case JSONStruct of - #{<<"data">> := _} -> JSONStruct; - _ -> JSONStruct#{ <<"data">> => <<>> } - end. -``` - -### aos2_normalize_types - -Normalize an AOS2 formatted message to ensure that all field NAMES and - -```erlang -aos2_normalize_types(Msg = #{ <<"timestamp">> := TS }) when is_binary(TS) -> - aos2_normalize_types(Msg#{ <<"timestamp">> => hb_util:int(TS) }); -``` - -### aos2_normalize_types - -Normalize an AOS2 formatted message to ensure that all field NAMES and - -```erlang -aos2_normalize_types(Msg = #{ <<"nonce">> := Nonce }) - when is_binary(Nonce) and not is_map_key(<<"slot">>, Msg) -> - aos2_normalize_types( - Msg#{ <<"slot">> => hb_util:int(Nonce) } - ); -``` - -### aos2_normalize_types - -Normalize an AOS2 formatted message to ensure that all field NAMES and - -```erlang -aos2_normalize_types(Msg = #{ <<"epoch">> := DS }) when is_binary(DS) -> - aos2_normalize_types(Msg#{ <<"epoch">> => hb_util:int(DS) }); -``` - -### aos2_normalize_types - -Normalize an AOS2 formatted message to ensure that all field NAMES and - -```erlang -aos2_normalize_types(Msg = #{ <<"slot">> := Slot }) when is_binary(Slot) -> - aos2_normalize_types(Msg#{ <<"slot">> => hb_util:int(Slot) }); -``` - -### aos2_normalize_types - -Normalize an AOS2 formatted message to ensure that all field NAMES and - -```erlang -aos2_normalize_types(Msg) when not is_map_key(<<"block-hash">>, Msg) -> - ?event({missing_block_hash, Msg}), - aos2_normalize_types(Msg#{ <<"block-hash">> => hb_util:encode(<<0:256>>) }); -``` - -### aos2_normalize_types - -Normalize an AOS2 formatted message to ensure that all field NAMES and - -```erlang -aos2_normalize_types(Msg) -> - ?event( - { - aos2_normalized_types, - {msg, Msg}, - {anchor, hb_ao:get(<<"anchor">>, Msg, <<>>, #{})} - } - ), - Msg. -``` - -### format_opts - -For all scheduler format operations, we do not calculate hashpaths, - -```erlang -format_opts(Opts) -> - Opts#{ - hashpath => ignore, - cache_control => [<<"no-cache">>, <<"no-store">>], - await_inprogress => false -``` - ---- - -*Generated from [dev_scheduler_formats.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_formats.erl)* diff --git a/docs/book/src/dev_scheduler_registry.erl.md b/docs/book/src/dev_scheduler_registry.erl.md deleted file mode 100644 index 566936c31..000000000 --- a/docs/book/src/dev_scheduler_registry.erl.md +++ /dev/null @@ -1,186 +0,0 @@ -# dev_scheduler_registry - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_registry.erl) - -A simple registry for local services in AO, using pg. Currently, -only SU processes are supported. - ---- - -## Exported Functions - -- `find/1` -- `find/2` -- `find/3` -- `get_processes/0` -- `get_wallet/0` -- `start/0` - ---- - -### start - -```erlang -start() -> - hb_name:start(), - ok. -``` - -### get_wallet - -```erlang -get_wallet() -> - % TODO: We might want to use a different wallet per SU later. -``` - -### find - -Find a process associated with the processor ID in the local registry -Find a process associated with the processor ID in the local registry - -```erlang -find(ProcID) -> find(ProcID, false). -``` - -### find - -Find a process associated with the processor ID in the local registry -Find a process associated with the processor ID in the local registry - -```erlang -find(ProcID, ProcMsgOrFalse) -> - find(ProcID, ProcMsgOrFalse, #{ priv_wallet => hb:wallet() }). -``` - -### find - -Same as `find/2` but with additional options passed when spawning a - -```erlang -find(ProcID, ProcMsgOrFalse, Opts) -> - case hb_name:lookup({<<"scheduler@1.0">>, ProcID}) of - undefined -> maybe_new_proc(ProcID, ProcMsgOrFalse, Opts); - Pid -> Pid - end. -``` - -### get_processes - -Return a list of all currently registered ProcID. - -```erlang -get_processes() -> - ?event({getting_processes, hb_name:all()}), - [ ProcID || {{<<"scheduler@1.0">>, ProcID}, _} <- hb_name:all() ]. -``` - -### maybe_new_proc - -Return a list of all currently registered ProcID. - -```erlang -maybe_new_proc(_ProcID, false, _Opts) -> not_found; -``` - -### maybe_new_proc - -Return a list of all currently registered ProcID. - -```erlang -maybe_new_proc(ProcID, ProcMsg, Opts) -> - dev_scheduler_server:start(ProcID, ProcMsg, Opts). -``` - -### test_opts - -```erlang -test_opts() -> - #{ - store => hb_test_utils:test_store(), - priv_wallet => hb:wallet() - }. -``` - -### generate_test_procs - -```erlang -generate_test_procs(Opts) -> - [ - hb_message:commit( - #{ - <<"type">> => <<"Process">>, - <<"image">> => <<0:(1024*32)>> - }, - Opts - ), - hb_message:commit( - #{ - <<"type">> => <<"Process">>, - <<"image">> => <<0:(1024*32)>> - }, - Opts - ) - ]. -``` - -### find_non_existent_process_test - -```erlang -find_non_existent_process_test() -> - Opts = test_opts(), - [Proc1, _Proc2] = generate_test_procs(Opts), - start(), - ?assertEqual(not_found, ?MODULE:find(hb_message:id(Proc1, all))). -``` - -### create_and_find_process_test - -```erlang -create_and_find_process_test() -> - Opts = test_opts(), - [Proc1, _Proc2] = generate_test_procs(Opts), - ID = hb_message:id(Proc1, all, Opts), - start(), - Pid1 = ?MODULE:find(ID, Proc1), - ?assert(is_pid(Pid1)), - ?assertEqual(Pid1, ?MODULE:find(ID, Proc1)). -``` - -### create_multiple_processes_test - -```erlang -create_multiple_processes_test() -> - Opts = test_opts(), - [Proc1, Proc2] = generate_test_procs(Opts), - start(), - ID1 = hb_message:id(Proc1, all, Opts), - ID2 = hb_message:id(Proc2, all, Opts), - Pid1 = ?MODULE:find(ID1, Proc1), - Pid2 = ?MODULE:find(ID2, Proc2), - ?assert(is_pid(Pid1)), - ?assert(is_pid(Pid2)), - ?assertNotEqual(Pid1, Pid2), - ?assertEqual(Pid1, ?MODULE:find(ID1, Proc1)), - ?assertEqual(Pid2, ?MODULE:find(ID2, Proc2)). -``` - -### get_all_processes_test - -```erlang -get_all_processes_test() -> - Opts = test_opts(), - [Proc1, Proc2] = generate_test_procs(Opts), - start(), - ID1 = hb_message:id(Proc1, all, Opts), - ID2 = hb_message:id(Proc2, all, Opts), - ?MODULE:find(ID1, Proc1), - ?MODULE:find(ID2, Proc2), - Processes = ?MODULE:get_processes(), - ?assert(length(Processes) >= 2), - ?event({processes, Processes}), - ?assert(lists:member(ID1, Processes)), -``` - ---- - -*Generated from [dev_scheduler_registry.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_registry.erl)* diff --git a/docs/book/src/dev_scheduler_server.erl.md b/docs/book/src/dev_scheduler_server.erl.md deleted file mode 100644 index 20a84c69d..000000000 --- a/docs/book/src/dev_scheduler_server.erl.md +++ /dev/null @@ -1,238 +0,0 @@ -# dev_scheduler_server - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_server.erl) - -A long-lived server that schedules messages for a process. -It acts as a deliberate 'bottleneck' to prevent the server accidentally -assigning multiple messages to the same slot. - ---- - -## Exported Functions - -- `info/1` -- `schedule/2` -- `start/3` -- `stop/1` - ---- - -### start - -A long-lived server that schedules messages for a process. -Start a scheduling server for a given computation. - -```erlang -start(ProcID, Proc, Opts) -> - ?event(scheduling, {starting_scheduling_server, {proc_id, ProcID}}), - spawn_link( - fun() -> - % Before we start, register the scheduler name. -``` - -### commitment_wallets - -Determine the appropriate list of keys to use to commit assignments for - -```erlang -commitment_wallets(ProcMsg, Opts) -> - SchedulerVal = - hb_ao:get_first( - [ - {ProcMsg, <<"scheduler">>}, - {ProcMsg, <<"scheduler-location">>} - ], - [], - Opts - ), - lists:filtermap( - fun(Scheduler) -> - case hb_opts:as(Scheduler, Opts) of - {ok, #{ priv_wallet := Wallet }} -> {true, Wallet}; - _ -> false - end - end, - dev_scheduler:parse_schedulers(SchedulerVal) - ). -``` - -### schedule - -Call the appropriate scheduling server to assign a message. - -```erlang -schedule(AOProcID, Message) when is_binary(AOProcID) -> - schedule(dev_scheduler_registry:find(AOProcID), Message); -``` - -### schedule - -Call the appropriate scheduling server to assign a message. - -```erlang -schedule(ErlangProcID, Message) -> - ?event( - {scheduling_message, - {proc_id, ErlangProcID}, - {message, Message}, - {is_alive, is_process_alive(ErlangProcID)} - } - ), - AbortTime = scheduler_time() + ?DEFAULT_TIMEOUT, - ErlangProcID ! {schedule, Message, self(), AbortTime}, - receive - {scheduled, Message, Assignment} -> - Assignment - after ?DEFAULT_TIMEOUT -> - throw({scheduler_timeout, {proc_id, ErlangProcID}, {message, Message}}) - end. -``` - -### info - -Get the current slot from the scheduling server. - -```erlang -info(ProcID) -> - ?event({getting_info, {proc_id, ProcID}}), - ProcID ! {info, self()}, - receive {info, Info} -> Info end. -``` - -### stop - -```erlang -stop(ProcID) -> - ?event({stopping_scheduling_server, {proc_id, ProcID}}), - ProcID ! stop. -``` - -### server - -The main loop of the server. Simply waits for messages to assign and - -```erlang -server(State) -> - receive - {schedule, Message, Reply, AbortTime} -> - case SchedTime = scheduler_time() > AbortTime of - true -> - % Ignore scheduling requests if they are too old. The - % `abort-time' signals to us that the client has already - % given up on the request, so in order to maintain - % predictability we ignore it. -``` - -### assign - -Assign a message to the next slot. - -```erlang -assign(State, Message, ReplyPID) -> - try - do_assign(State, Message, ReplyPID) - catch - _Class:Reason:Stack -> - ?event({error_scheduling, Reason, Stack}), - State - end. -``` - -### do_assign - -Generate and store the actual assignment message. - -```erlang -do_assign(State, Message, ReplyPID) -> - % Ensure that only committed keys from the message are included in the - % assignment. -``` - -### commit_assignment - -Commit to the assignment using all of our appropriate wallets. - -```erlang -commit_assignment(BaseAssignment, State) -> - Wallets = maps:get(wallets, State), - Opts = maps:get(opts, State), - lists:foldr( - fun(Wallet, Assignment) -> - hb_message:commit(Assignment, Opts#{ priv_wallet => Wallet }) - end, - BaseAssignment, - Wallets - ). -``` - -### maybe_inform_recipient - -Potentially inform the caller that the assignment has been scheduled. - -```erlang -maybe_inform_recipient(Mode, ReplyPID, Message, Assignment, State) -> - case maps:get(mode, State) of - Mode -> ReplyPID ! {scheduled, Message, Assignment}; - _ -> ok - end. -``` - -### next_hashchain - -Create the next element in a chain of hashes that links this and prior - -```erlang -next_hashchain(HashChain, Message, Opts) -> - ?event({creating_next_hashchain, {hash_chain, HashChain}, {message, Message}}), - ID = hb_message:id(Message, all, Opts), - crypto:hash( - sha256, - << HashChain/binary, ID/binary >> - ). -``` - -### scheduler_time - -Return the current time in milliseconds. - -```erlang -scheduler_time() -> - erlang:system_time(millisecond). -``` - -### new_proc_test - -Test the basic functionality of the server. - -```erlang -new_proc_test() -> - Wallet = ar_wallet:new(), - SignedItem = hb_message:commit( - #{ <<"data">> => <<"test">>, <<"random-key">> => rand:uniform(10000) }, - #{ priv_wallet => Wallet } - ), - SignedItem2 = hb_message:commit( - #{ <<"data">> => <<"test2">> }, - #{ priv_wallet => Wallet } - ), - SignedItem3 = hb_message:commit( - #{ - <<"data">> => <<"test2">>, - <<"deep-key">> => - #{ <<"data">> => <<"test3">> } - }, - #{ priv_wallet => Wallet } - ), - dev_scheduler_registry:find(hb_message:id(SignedItem, all), SignedItem), - schedule(ID = hb_message:id(SignedItem, all), SignedItem), - schedule(ID, SignedItem2), - schedule(ID, SignedItem3), - ?assertMatch( - #{ current := 2 }, - dev_scheduler_server:info(dev_scheduler_registry:find(ID)) - ). -``` - ---- - -*Generated from [dev_scheduler_server.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_scheduler_server.erl)* diff --git a/docs/book/src/dev_secret.erl.md b/docs/book/src/dev_secret.erl.md deleted file mode 100644 index 70846ff82..000000000 --- a/docs/book/src/dev_secret.erl.md +++ /dev/null @@ -1,845 +0,0 @@ -# dev_secret - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_secret.erl) - -A device that allows a node to create, export, and commit messages with -secrets that are stored on the node itself. Users of this device must specify -an `access-control` message which requests are validated against before -access to secrets is granted. -This device is intended for use in situations in which the node is trusted -by the user, for example if it is running on their own machine or in a -TEE-protected environment that they deem to be secure. -# Authentication Flow -Each secret is associated with an `access-control` message and a list of -`controllers` that may access it. The `access-control` system is pluggable --- users may configure their messages to call any AO-Core device that is -executable on the host node. The default `access-control` message uses the -`~cookie@1.0` device's `generate` and `verify` keys to authenticate users. -During secret generation: -1. This device creates the secret and determines its `committer` address. -2. The device invokes the caller's `access-control` message with the `commit` - path and the `keyid` in the request. -3. The `access-control` message sets up authentication (e.g., creates cookies, - secrets) and returns a response, containing a commitment with a `keyid` - field. This `keyid` is used to identify the user's 'access secret' which - grants them the ability to use the device's 'hidden' secret in the future. -4. This device stores both the secret and the initialized `access-control` - message, as well as its other metadata. -5. This device returns the initialized `access-control` message with the - secret's `keyid` added to the `body` field. -During secret operations (commit, export, etc.): -1. This device retrieves the stored `access-control` message for the - secret either from persistent storage or from the node message's private - element. The keyid of the `access secret` is either provided by the - user in the request, or is determined from a provided `secret` parameter - in the request. -2. This device calls the `access-control` message with path `verify` and - the user's request. -3. The `access-control` message verifies the request (e.g., checks cookies, - provided authentication credentials, etc.). -4. If verification passes, the device performs the requested operation. -5. If verification fails, a 400 error is returned. -# Access Control Message Requirements -Access control messages are fully customizable by callers, but must support -two paths: -`/commit`: Called during secret generation to bind the `access-control` - template message to the given `keyid` (secret reference). - - Input: Request message containing `keyid` field with the secret's `keyid` - in the `body` field. - - Output: Response message with authentication setup (cookies, tokens, etc.). - This message will be used as the `Base` message for the `verify` - path. -`/verify`: Called before allowing an operation that requires access to a - secret to proceed. - - Base: The initialized `access-control` message from the `commit` path. - - Request: Caller's request message with authentication credentials. - - Output: `false` if an error has occurred. If the request is valid, the - `access-control` message should return either `true` or a modification - of the request message which will be used for any subsequent - operations. -The default `access-control` message is `~cookie@1.0`, which uses HTTP -cookies with secrets to authenticate users. -# Secret Generation Parameters -The following parameters are supported by the `generate` key: -``` -/generate - - `access-control` (optional): The `access-control` message to use. - Defaults to `#{<<"device">> => <<"cookie@1.0">>}`. - - `keyid` (optional): The `keyid` of the secret to generate. If not - provided, the secret's address will be used as the name. - - `persist` (optional): How the node should persist the secret. Options: - - `client`: The secret is generated on the server, but not persisted. - The full secret key is returned for the user to store. - - `in-memory`: The wallet is generated on the server and persisted only - in local memory, never written to disk. - - `non-volatile`: The wallet is persisted to non-volatile storage on - the node. The store used by this option is segmented from - the node's main storage, configurable via the `priv_store` - node message option. - - `controllers` (optional): A list of controllers that may access the - secret. Defaults to the node's `wallet_admin` option if set, - or its operator address if not. - - `required-controllers` (optional): The number of controllers that must - sign the secret for it to be valid. Defaults to `1`. - The response will contain authentication setup (such as cookies) from the - `access-control` message, plus the secret's `keyid` in the `body` field. - The secret's key is not returned to the user unless the `persist` option - is set to `client`. If it is, the `~cookie@1.0` device will be employed - to set the user's cookie with the secret. -/import - Parameters: - - `key` (optional): The JSON-encoded secret to import. - - `cookie` (optional): A structured-fields cookie containing a map with - a `key` field which is a JSON-encoded secret. - - `access-control` (optional): The `access-control` message to use. - - `persist` (optional): How the node should persist the secret. The - supported options are as with the `generate` key. - Imports a secret for hosting from the user. Executes as `generate` does, - except that it expects the key to store to be provided either directly - via the `key` parameter as a `keyid` field in the cookie Structured-Fields - map. Support for loading the key from the cookie is provided such that - a previously-generated secret by the user can have its persistence mode - changed. -/list - Parameters: - - `keyids` (optional): A list of `keyid`s to list. If not provided, - all secrets will be listed via the `keyid` that is must be provided - in order to access them. - Lists all hosted secrets on the node by the `keyid` that is used to - access them. If `keyids` is provided, only the secrets with those - `keyid`s will be listed. -/commit - Parameters: - - `keyid` (optional): The `keyid` of the secret to commit with. - - Authentication credentials as required by the `access-control` message. - Commits the given message using the specified secret after authentication. - If no `keyid` parameter is provided, the request's authentication data - (such as cookies) must contain secret identification. -/export - Parameters: - - `keyids` (optional): A list of `keyid`s to export, or `all` to - export all secrets for which the request passes authentication. - Exports a given secret or set of secrets. If multiple secrets are - requested, the result is a message with form `keyid => #{ `key` => - JSON-encoded secret, `access-control` => `access-control` message, - `controllers` => [address, ...], `required-controllers` => integer, - `persist` => `client` | `in-memory` | `non-volatile` }'. - A secret will be exported if: - - The given request passes each requested secret's `access-control` - message; or - - The request passes each requested secret's `controllers` parameter - checks. -/sync - Parameters: - - `node`: The peer node to pull secrets from. - - `as` (optional): The identity it should use when signing its request - to the remote peer. - - `keyids` (optional): A list of `keyid`s to export, or `all` to load - every available secret. Defaults to `all`. - Attempts to download all (or a given subset of) secrets from the given - node and import them. If the `keyids` parameter is provided, only the - secrets with those `keyid`s will be imported. The `as` parameter is - used to inform the node which key it should use to sign its request to - the remote peer, such that its request validates against the secret's - `access-control` messages on the remote peer. -''' - ---- - -## Exported Functions - -- `commit/3` -- `export/3` -- `generate/3` -- `import/3` -- `list/3` -- `sync/3` - ---- - -### generate - -A device that allows a node to create, export, and commit messages with -Generate a new wallet for a user and register it on the node. If the - -```erlang -generate(Base, Request, Opts) -> - case request_to_wallets(Base, Request, Opts) of - [] -> - % No wallets found, create a new one. -``` - -### import - -Import a wallet for hosting on the node. Expects the keys to be either - -```erlang -import(Base, Request, Opts) -> - Wallets = - case hb_maps:find(<<"key">>, Request, Opts) of - {ok, Keys} when is_list(Keys) -> - [ wallet_from_key(Key) || Key <- Keys ]; - {ok, Key} -> - [ wallet_from_key(hb_escape:decode_quotes(Key)) ]; - error -> - request_to_wallets(Base, Request, Opts) - end, - case Wallets of - [] -> - {error, <<"No viable wallets found to import.">>}; - Wallets -> - import_wallets(Wallets, Base, Request, Opts) - end. -``` - -### import_wallets - -Register a series of wallets, returning a summary message with the - -```erlang -import_wallets(Wallets, Base, Request, Opts) -> - Res = - lists:foldl( - fun(Wallet, Acc) -> - case register_wallet(Wallet, Base, Request, Opts) of - {ok, RegRes} -> - % Merge the private element of the registration response - % into the accumulator. -``` - -### wallet_from_key - -Transform a wallet key serialized form into a wallet. - -```erlang -wallet_from_key(Key) when is_binary(Key) -> - ar_wallet:from_json(Key); -``` - -### wallet_from_key - -Transform a wallet key serialized form into a wallet. - -```erlang -wallet_from_key(Key) -> - Key. -``` - -### register_wallet - -Register a wallet on the node. - -```erlang -register_wallet(Wallet, Base, Request, Opts) -> - % Find the wallet's address. -``` - -### persist_registered_wallet - -Persist a wallet and return the auth response. Optionally takes a - -```erlang -persist_registered_wallet(WalletDetails, Opts) -> - persist_registered_wallet(WalletDetails, #{}, Opts). -``` - -### persist_registered_wallet - -```erlang -persist_registered_wallet(WalletDetails, RespBase, Opts) -> - % Add the wallet address as the body of the response. -``` - -### list - -List all hosted wallets - -```erlang -list(_Base, _Request, Opts) -> - {ok, list_wallets(Opts)}. -``` - -### commit - -Sign a message with a wallet. - -```erlang -commit(Base, Request, Opts) -> - ?event({commit_invoked, {base, Base}, {request, Request}}), - case request_to_wallets(Base, Request, Opts) of - [] -> {error, <<"No wallets found to sign with.">>}; - WalletDetailsList -> - ?event( - {commit_signing, - {request, Request}, - {wallet_list, WalletDetailsList} - } - ), - { - ok, - lists:foldl( - fun(WalletDetails, Acc) -> - ?event( - {invoking_commit_message, - {message, Acc}, - {wallet, WalletDetails} - } - ), - commit_message(Acc, WalletDetails, Opts) - end, - Base, - WalletDetailsList - ) - } - end. -``` - -### request_to_wallets - -Take a request and return the wallets it references. Performs validation - -```erlang -request_to_wallets(Base, Request, Opts) -> - % Get the wallet references or keys from the request or cookie. -``` - -### load_and_verify - -Load a wallet from a keyid and verify we have the authority to access it. - -```erlang -load_and_verify({wallet, WalletKey}, _Base, _Request, _Opts) -> - % Return the wallet key. -``` - -### load_and_verify - -```erlang -load_and_verify({secret, KeyID, _}, _Base, Request, Opts) -> - % Get the wallet from the node's options. -``` - -### verify_controllers - -Validate if a calling message has the required `controllers` for the - -```erlang -verify_controllers(WalletDetails, Request, Opts) -> - RequiredControllers = - hb_util:int(hb_maps:get(<<"required-controllers">>, WalletDetails, 1, Opts)), - Controllers = - parse_controllers( - hb_maps:get(<<"controllers">>, WalletDetails, [], Opts), - Opts - ), - PresentControllers = - lists:filter( - fun(Signer) -> - lists:member(Signer, Controllers) - end, - hb_message:signers(Request, Opts) - ), - length(PresentControllers) >= RequiredControllers. -``` - -### verify_auth - -Verify a wallet for a given request. - -```erlang -verify_auth(WalletDetails, Req, Opts) -> - AuthBase = hb_maps:get(<<"access-control">>, WalletDetails, #{}, Opts), - AuthRequest = - Req#{ - <<"path">> => <<"verify">>, - <<"committer">> => - hb_maps:get(<<"committer">>, WalletDetails, undefined, Opts) - }, - ?event({verify_wallet, {auth_base, AuthBase}, {request, AuthRequest}}), - hb_ao:resolve(AuthBase, AuthRequest, Opts). -``` - -### wallets_from_cookie - -Parse cookie from a message to extract wallets. - -```erlang -wallets_from_cookie(Msg, Opts) -> - % Parse the cookie as a Structured-Fields map. -``` - -### commit_message - -Sign a message using hb_message:commit, taking either a wallet as a - -```erlang -commit_message(Message, NonMap, Opts) when not is_map(NonMap) -> - commit_message(Message, #{ <<"wallet">> => NonMap }, Opts); -``` - -### commit_message - -Sign a message using hb_message:commit, taking either a wallet as a - -```erlang -commit_message(Message, #{ <<"wallet">> := Key }, Opts) when is_binary(Key) -> - commit_message(Message, ar_wallet:from_json(Key), Opts); -``` - -### commit_message - -Sign a message using hb_message:commit, taking either a wallet as a - -```erlang -commit_message(Message, #{ <<"wallet">> := Key }, Opts) -> - ?event({committing_with_proxy, {message, Message}, {wallet, Key}}), - hb_message:commit(Message, Opts#{ priv_wallet => Key }). -``` - -### export - -Export wallets from a request. The request should contain a source of - -```erlang -export(Base, Request, Opts) -> - PrivOpts = priv_store_opts(Opts), - ModReq = - case hb_ao:get(<<"keyids">>, Request, not_found, Opts) of - <<"all">> -> - AllLocalWallets = list_wallets(Opts), - Request#{ <<"keyids">> => AllLocalWallets }; - _ -> Request - end, - ?event({export, {base, Base}, {request, ModReq}}), - case request_to_wallets(Base, ModReq, Opts) of - [] -> {error, <<"No wallets found to export.">>}; - Wallets -> - { - ok, - lists:map( - fun(Wallet) -> - Loaded = hb_cache:ensure_all_loaded(Wallet, PrivOpts), - ?event({exported, {wallet, Loaded}}), - Loaded - end, - Wallets - ) - } - end. -``` - -### sync - -Sync wallets from a remote node - -```erlang -sync(_Base, Request, Opts) -> - case hb_ao:get(<<"node">>, Request, undefined, Opts) of - undefined -> - {error, <<"Node not specified.">>}; - Node -> - Wallets = hb_maps:get(<<"keyids">>, Request, <<"all">>, Opts), - SignAsOpts = - case hb_ao:get(<<"as">>, Request, undefined, Opts) of - undefined -> Opts; - SignAs -> hb_opts:as(SignAs, Opts) - end, - ExportRequest = - (hb_message:commit( - #{ <<"keyids">> => Wallets }, - SignAsOpts - ))#{ <<"path">> => <<"/~secret@1.0/export">> }, - ?event({sync, {export_req, ExportRequest}}), - case hb_http:get(Node, ExportRequest, SignAsOpts) of - {ok, ExportResponse} -> - ExportedWallets = export_response_to_list(ExportResponse, #{}), - ?event({sync, {received_wallets, ExportedWallets}}), - % Import each wallet. Ignore wallet imports that fail. -``` - -### secrets_to_keyids - -Convert a key to a wallet reference. - -```erlang -secrets_to_keyids(Secrets) when is_list(Secrets) -> - [ hd(secrets_to_keyids(Secret)) || Secret <- Secrets ]; -``` - -### secrets_to_keyids - -Convert a key to a wallet reference. -Parse the exportable setting for a wallet and return a list of addresses - -```erlang -secrets_to_keyids(Secret) when is_binary(Secret) -> - ?event({secrets_to_keyids, {secret, Secret}}), - KeyID = dev_codec_httpsig_keyid:secret_key_to_committer(Secret), - [ {secret, <<"secret:", KeyID/binary>>, Secret} ]. -``` - -### parse_controllers - -Convert a key to a wallet reference. -Parse the exportable setting for a wallet and return a list of addresses - -```erlang -parse_controllers(default, Opts) -> - case hb_opts:get(wallet_admin, undefined, Opts) of - undefined -> - case hb_opts:get(operator, undefined, Opts) of - undefined -> - [hb_util:human_id(hb_opts:get(priv_wallet, undefined, Opts))]; - Op -> [hb_util:human_id(Op)] - end; - Admin -> [Admin] - end; -``` - -### parse_controllers - -Convert a key to a wallet reference. -Parse the exportable setting for a wallet and return a list of addresses - -```erlang -parse_controllers(true, Opts) -> parse_controllers(default, Opts); -``` - -### parse_controllers - -Convert a key to a wallet reference. -Parse the exportable setting for a wallet and return a list of addresses - -```erlang -parse_controllers(false, _Opts) -> []; -``` - -### parse_controllers - -Convert a key to a wallet reference. -Parse the exportable setting for a wallet and return a list of addresses - -```erlang -parse_controllers(Addresses, _Opts) when is_list(Addresses) -> Addresses; -``` - -### parse_controllers - -Convert a key to a wallet reference. -Parse the exportable setting for a wallet and return a list of addresses -Store a wallet in the appropriate location. - -```erlang -parse_controllers(Address, _Opts) when is_binary(Address) -> [Address]. -``` - -### store_wallet - -Convert a key to a wallet reference. -Parse the exportable setting for a wallet and return a list of addresses -Store a wallet in the appropriate location. - -```erlang -store_wallet(in_memory, KeyID, Details, Opts) -> - % Get existing wallets - CurrentWallets = hb_opts:get(priv_wallet_hosted, #{}, Opts), - % Add new wallet - UpdatedWallets = CurrentWallets#{ KeyID => Details }, - ?event({wallet_store, {updated_wallets, UpdatedWallets}}), - % Update the node's options with the new wallets. -``` - -### store_wallet - -```erlang -store_wallet(non_volatile, KeyID, Details, Opts) -> - % Find the private store of the node. -``` - -### find_wallet - -Find the wallet by name or address in the node's options. - -```erlang -find_wallet(KeyID, Opts) -> - case find_wallet(in_memory, KeyID, Opts) of - not_found -> find_wallet(non_volatile, KeyID, Opts); - Wallet -> Wallet - end. -``` - -### find_wallet - -Loop over the wallets and find the reference to the wallet. - -```erlang -find_wallet(in_memory, KeyID, Opts) -> - Wallets = hb_opts:get(priv_wallet_hosted, #{}, Opts), - ?event({find_wallet, {keyid, KeyID}, {wallets, Wallets}}), - case hb_maps:find(KeyID, Wallets, Opts) of - {ok, Wallet} -> Wallet; - error -> not_found - end; -``` - -### find_wallet - -Loop over the wallets and find the reference to the wallet. - -```erlang -find_wallet(non_volatile, KeyID, Opts) -> - PrivOpts = priv_store_opts(Opts), - Store = hb_opts:get(priv_store, undefined, PrivOpts), - Resolved = hb_store:resolve(Store, <<"wallet@1.0/", KeyID/binary>>), - case hb_cache:read(Resolved, PrivOpts) of - {ok, Wallet} -> - WalletDetails = hb_maps:get(KeyID, Wallet, not_found, PrivOpts), - hb_cache:ensure_all_loaded(WalletDetails, PrivOpts); - _ -> not_found - end. -``` - -### list_wallets - -Generate a list of all hosted wallets. - -```erlang -list_wallets(Opts) -> - list_wallets(in_memory, Opts) ++ list_wallets(non_volatile, Opts). -``` - -### list_wallets - -```erlang -list_wallets(in_memory, Opts) -> - hb_maps:keys(hb_opts:get(priv_wallet_hosted, #{}, Opts)); -``` - -### list_wallets - -Generate a new `Opts` message with the `priv_store` as the only `store` - -```erlang -list_wallets(non_volatile, Opts) -> - PrivOpts = priv_store_opts(Opts), - hb_cache:ensure_all_loaded(hb_cache:list(<<"wallet@1.0/">>, PrivOpts), PrivOpts). -``` - -### priv_store_opts - -Generate a new `Opts` message with the `priv_store` as the only `store` - -```erlang -priv_store_opts(Opts) -> - hb_private:opts(Opts). -``` - -### export_response_to_list - -Convert an export response into a list of wallet details. This is - -```erlang -export_response_to_list(ExportResponse, Opts) -> - hb_util:numbered_keys_to_list(ExportResponse, Opts). -``` - -### addresses_to_binary - -Convert a list of addresses to a binary string. If the input is a - -```erlang -addresses_to_binary(Addresses) when is_list(Addresses) -> - hb_util:bin(string:join( - lists:map(fun hb_util:list/1, Addresses), - ", " - )); -``` - -### addresses_to_binary - -Convert a list of addresses to a binary string. If the input is a - -```erlang -addresses_to_binary(Address) when is_binary(Address) -> - Address. -``` - -### binary_to_addresses - -Convert a binary string to a list of addresses. If the input is a - -```erlang -binary_to_addresses(AddressesBin) when is_binary(AddressesBin) -> - binary:split(AddressesBin, <<",">>, [global]); -``` - -### binary_to_addresses - -Convert a binary string to a list of addresses. If the input is a - -```erlang -binary_to_addresses(Addresses) when is_list(Addresses) -> - Addresses. -``` - -### test_wallet_generate_and_verify - -Helper function to test wallet generation and verification flow. - -```erlang -test_wallet_generate_and_verify(GeneratePath, ExpectedName, CommitParams) -> - Node = hb_http_server:start_node(#{ - priv_wallet => ar_wallet:new() - }), - % Generate wallet with specified parameters - {ok, GenResponse} = hb_http:get(Node, GeneratePath, #{}), - % Should get wallet name in body, wallet-address, and auth cookie - ?assertMatch(#{<<"body">> := _}, GenResponse), - WalletAddr = maps:get(<<"wallet-address">>, GenResponse), - case ExpectedName of - undefined -> - % For unnamed wallets, just check it's a non-empty binary - ?assert(is_binary(WalletAddr) andalso byte_size(WalletAddr) > 0); - _ -> - % For named wallets, check exact match - ?assertEqual(ExpectedName, WalletAddr) - end, - ?assertMatch(#{ <<"priv">> := #{ <<"cookie">> := _ } }, GenResponse), - #{ <<"priv">> := Priv } = GenResponse, - % Now verify by signing a message - TestMessage = - maps:merge( - #{ - <<"device">> => <<"secret@1.0">>, - <<"path">> => <<"commit">>, - <<"body">> => <<"Test message">>, - <<"priv">> => Priv - }, - CommitParams - ), - ?event({signing_with_cookie, {test_message, TestMessage}}), - {ok, SignedMessage} = hb_http:post(Node, TestMessage, #{}), - % Should return signed message with correct signer - ?assertMatch(#{ <<"body">> := <<"Test message">> }, SignedMessage), - ?assert(hb_message:signers(SignedMessage, #{}) =:= [WalletAddr]). -``` - -### client_persist_generate_and_verify_test - -```erlang -client_persist_generate_and_verify_test() -> - test_wallet_generate_and_verify( - <<"/~secret@1.0/generate?persist=client">>, - undefined, - #{} - ). -``` - -### cookie_wallet_generate_and_verify_test - -```erlang -cookie_wallet_generate_and_verify_test() -> - test_wallet_generate_and_verify( - <<"/~secret@1.0/generate?persist=in-memory">>, - undefined, - #{} - ). -``` - -### non_volatile_persist_generate_and_verify_test - -```erlang -non_volatile_persist_generate_and_verify_test() -> - test_wallet_generate_and_verify( - <<"/~secret@1.0/generate?persist=non-volatile">>, - undefined, - #{} - ). -``` - -### import_wallet_with_key_test - -```erlang -import_wallet_with_key_test() -> - Node = hb_http_server:start_node(#{ - priv_wallet => ar_wallet:new() - }), - % Create a test wallet key to import (in real scenario from user). -``` - -### list_wallets_test - -```erlang -list_wallets_test() -> - Node = hb_http_server:start_node(#{ - priv_wallet => ar_wallet:new() - }), - % Generate some wallets first. -``` - -### commit_with_cookie_wallet_test - -```erlang -commit_with_cookie_wallet_test() -> - Node = hb_http_server:start_node(#{ - priv_wallet => ar_wallet:new() - }), - % Generate a client wallet to get a cookie with full wallet key. -``` - -### export_wallet_test - -```erlang -export_wallet_test() -> - Node = hb_http_server:start_node(#{}), - % Generate a wallet to export. -``` - -### export_non_volatile_wallet_test - -```erlang -export_non_volatile_wallet_test() -> - Node = hb_http_server:start_node(#{ - priv_wallet => ar_wallet:new() - }), - % Generate a wallet to export. -``` - -### export_individual_batch_wallets_test - -```erlang -export_individual_batch_wallets_test() -> - Node = - hb_http_server:start_node( - AdminOpts = - #{ - priv_wallet => AdminWallet = ar_wallet:new() - } - ), - % Generate multiple wallets and collect auth cookies. -``` - -### export_batch_all_wallets_test - -```erlang -export_batch_all_wallets_test() -> - % Remove all previous cached wallets. -``` - -### sync_wallets_test - -```erlang -sync_wallets_test() -> - % Remove all previous cached wallets. -``` - -### sync_non_volatile_wallets_test - -```erlang -sync_non_volatile_wallets_test() -> - % Remove all the previous cached wallets. -``` - ---- - -*Generated from [dev_secret.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_secret.erl)* diff --git a/docs/book/src/dev_simple_pay.erl.md b/docs/book/src/dev_simple_pay.erl.md deleted file mode 100644 index 6a2dc03ec..000000000 --- a/docs/book/src/dev_simple_pay.erl.md +++ /dev/null @@ -1,408 +0,0 @@ -# dev_simple_pay - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_simple_pay.erl) - -A simple device that allows the operator to specify a price for a -request and then charge the user for it, on a per route and optionally -per message basis. -The device's pricing rules are as follows: -1. If the request is from the operator, the cost is 0. -2. If the request matches one of the `router_opts/offered` routes, the - explicit price of the route is used. -3. Else, the price is calculated by counting the number of messages in the - request, and multiplying by the `simple_pay_price` node option, plus the - price of the apply subrequest if applicable. Subrequests are priced by - recursively calling `estimate/3` upon them. In the case of an `apply@1.0` - subrequest, the two initiating apply messages are not counted towards the - message count price. -The device's ledger is stored in the node message at `simple_pay_ledger`, -and can be topped-up by either the operator, or an external device. The -price is specified in the node message at `simple_pay_price`. -This device acts as both a pricing device and a ledger device, by p4's -definition. - ---- - -## Exported Functions - -- `balance/3` -- `charge/3` -- `estimate/3` -- `topup/3` - ---- - -### estimate - -A simple device that allows the operator to specify a price for a -Estimate the cost of the request, using the rules outlined in the - -```erlang -estimate(_Base, EstimateReq, NodeMsg) -> - Req = hb_ao:get(<<"request">>, EstimateReq, NodeMsg#{ hashpath => ignore }), - case is_operator(Req, NodeMsg) of - true -> - ?event(payment, - {estimate_preprocessing, caller_is_operator} - ), - {ok, 0}; - false -> - ?event(payment, {starting_estimate, {req, Req}}), - ReqSequence = hb_singleton:from(Req, NodeMsg), - ?event(payment, - {estimating_cost, - {singleton, Req}, - {request_sequence, ReqSequence} - } - ), - % Get the user's request to match against router registration options - case price_from_routes(Req, NodeMsg) of - no_matches -> - {ok, ApplyPrice, SeqWithoutApply} = apply_price(ReqSequence, NodeMsg), - MessageCountPrice = price_from_count(SeqWithoutApply, NodeMsg), - Price = MessageCountPrice + ApplyPrice, - ?event(payment, - {calculated_generic_route_price, - {price, Price}, - {message_count_price, MessageCountPrice}, - {apply_price, ApplyPrice} - }), - {ok, Price}; - Price -> - ?event(payment, - {calculated_specific_route_price, - {price, Price} - } - ), - {ok, Price} - end - end. -``` - -### apply_price - -If the request is for the `apply@1.0` device, we should price the - -```erlang -apply_price([{as, Device, Msg} | Rest], NodeMsg) -> - apply_price([Msg#{ <<"device">> => Device } | Rest], NodeMsg); -``` - -### apply_price - -If the request is for the `apply@1.0` device, we should price the - -```erlang -apply_price( - [Req = #{ <<"device">> := <<"apply@1.0">> }, #{ <<"path">> := Path } | Rest], - NodeMsg - ) -> - UserPath = hb_maps:get(Path, Req, <<"">>, NodeMsg), - UserMessage = - case hb_maps:find(<<"source">>, Req, NodeMsg) of - {ok, Source} -> hb_maps:get(Source, Req, Req, NodeMsg); - error -> Req - end, - UserRequest = - hb_maps:without( - [<<"device">>], - UserMessage#{ <<"path">> => UserPath } - ), - ?event(payment, {estimating_price_of_subrequest, {req, UserRequest}}), - {ok, Price} = estimate(#{}, #{ <<"request">> => UserRequest }, NodeMsg), - ?event(payment, {price_of_apply_subrequest, {price, Price}}), - {ok, Price, Rest}; -``` - -### apply_price - -If the request is for the `apply@1.0` device, we should price the - -```erlang -apply_price(Seq, _) -> - {ok, 0, Seq}. -``` - -### price_from_routes - -Calculate the price of a request based on the offered routes, if - -```erlang -price_from_routes(UserRequest, NodeMsg) -> - RouterOpts = hb_opts:get(<<"router_opts">>, #{}, NodeMsg), - Routes = hb_maps:get(<<"offered">>, RouterOpts, [], NodeMsg), - MatchRes = - dev_router:match( - #{ <<"routes">> => Routes }, - UserRequest, - NodeMsg - ), - case MatchRes of - {ok, OfferedRoute} -> - Price = hb_maps:get(<<"price">>, OfferedRoute, 0, NodeMsg), - ?event(payment, {price_from_routes, {price, Price}}), - Price; - _ -> - no_matches - end. -``` - -### price_from_count - -Calculate the price of a request based on the number of messages in - -```erlang -price_from_count(Messages, NodeMsg) -> - Price = - hb_util:int(hb_opts:get(simple_pay_price, 1, NodeMsg)) - * length(Messages), - ?event(payment, {price_from_count, {price, Price}, {count, length(Messages)}}), - Price. -``` - -### charge - -Preprocess a request by checking the ledger and charging the user. We - -```erlang -charge(_, RawReq, NodeMsg) -> - ?event(payment, {charge, RawReq}), - Req = hb_ao:get(<<"request">>, RawReq, NodeMsg#{ hashpath => ignore }), - case hb_message:signers(Req, NodeMsg) of - [] -> - ?event(payment, {charge, {error, <<"No signers">>}}), - {ok, false}; - [Signer] -> - UserBalance = get_balance(Signer, NodeMsg), - Price = hb_ao:get(<<"quantity">>, RawReq, 0, NodeMsg), - ?event(payment, - {charge, - {user, Signer}, - {balance, UserBalance}, - {price, Price} - }), - {ok, _} = - set_balance( - Signer, - NewBalance = UserBalance - Price, - NodeMsg - ), - case NewBalance >= 0 of - true -> - {ok, true}; - false -> - ?event(payment, - {charge, - {user, Signer}, - {balance, UserBalance}, - {price, Price} - } - ), - {error, #{ - <<"status">> => 402, - <<"body">> => <<"Insufficient funds. " - "User balance before charge: ", - (hb_util:bin(UserBalance))/binary, - ". Price of request: ", - (hb_util:bin(Price))/binary, - ". New balance: ", - (hb_util:bin(NewBalance))/binary, - ".">> - }} - end; - MultipleSigners -> - ?event(payment, {charge, {error_multiple_signers, MultipleSigners}}), - {error, #{ - <<"status">> => 400, - <<"body">> => <<"Multiple signers in charge.">> - }} - end. -``` - -### balance - -Get the balance of a user in the ledger. - -```erlang -balance(_, RawReq, NodeMsg) -> - Target = - case hb_ao:get(<<"request">>, RawReq, NodeMsg#{ hashpath => ignore }) of - not_found -> - case hb_message:signers(RawReq, NodeMsg) of - [] -> hb_ao:get(<<"target">>, RawReq, undefined, NodeMsg); - [Signer] -> Signer - end; - Req -> hd(hb_message:signers(Req, NodeMsg)) - end, - {ok, get_balance(Target, NodeMsg)}. -``` - -### set_balance - -Adjust a user's balance, normalizing their wallet ID first. - -```erlang -set_balance(Signer, Amount, NodeMsg) -> - NormSigner = hb_util:human_id(Signer), - Ledger = hb_opts:get(simple_pay_ledger, #{}, NodeMsg), - ?event(payment, - {modifying_balance, - {user, NormSigner}, - {amount, Amount}, - {ledger_before, Ledger} - } - ), - hb_http_server:set_opts( - #{}, - NewMsg = NodeMsg#{ - simple_pay_ledger => - hb_ao:set( - Ledger, - NormSigner, - Amount, - NodeMsg - ) - } - ), - {ok, NewMsg}. -``` - -### get_balance - -Get the balance of a user in the ledger. - -```erlang -get_balance(Signer, NodeMsg) -> - NormSigner = hb_util:human_id(Signer), - Ledger = hb_opts:get(simple_pay_ledger, #{}, NodeMsg), - hb_ao:get(NormSigner, Ledger, 0, NodeMsg). -``` - -### topup - -Top up the user's balance in the ledger. - -```erlang -topup(_, Req, NodeMsg) -> - ?event({topup, {req, Req}, {node_msg, NodeMsg}}), - case is_operator(Req, NodeMsg) of - false -> {error, <<"Unauthorized">>}; - true -> - Amount = hb_ao:get(<<"amount">>, Req, 0, NodeMsg), - Recipient = hb_ao:get(<<"recipient">>, Req, undefined, NodeMsg), - CurrentBalance = get_balance(Recipient, NodeMsg), - ?event(payment, - {topup, - {amount, Amount}, - {recipient, Recipient}, - {balance, CurrentBalance}, - {expected_new_balance, CurrentBalance + Amount} - }), - {ok, NewNodeMsg} = - set_balance( - Recipient, - CurrentBalance + Amount, - NodeMsg - ), - % Briefly wait for the ledger to be updated. -``` - -### is_operator - -Check if the request is from the operator. - -```erlang -is_operator(Req, NodeMsg) -> - is_operator(Req, NodeMsg, hb_opts:get(operator, undefined, NodeMsg)). -``` - -### is_operator - -```erlang -is_operator(Req, NodeMsg, OperatorAddr) when ?IS_ID(OperatorAddr) -> - Signers = hb_message:signers(Req, NodeMsg), - HumanOperatorAddr = hb_util:human_id(OperatorAddr), - lists:any( - fun(Signer) -> - HumanOperatorAddr =:= hb_util:human_id(Signer) - end, - Signers - ); -``` - -### is_operator - -```erlang -is_operator(_, _, _) -> - false. -``` - -### test_opts - -```erlang -test_opts(Ledger) -> - Wallet = ar_wallet:new(), - Address = hb_util:human_id(ar_wallet:to_address(Wallet)), - ProcessorMsg = - #{ - <<"device">> => <<"p4@1.0">>, - <<"ledger-device">> => <<"simple-pay@1.0">>, - <<"pricing-device">> => <<"simple-pay@1.0">> - }, - { - Address, - Wallet, - #{ - simple_pay_ledger => Ledger, - simple_pay_price => 10, - operator => Address, - on => #{ - <<"request">> => ProcessorMsg, - <<"response">> => ProcessorMsg - } - } - }. -``` - -### get_balance_and_top_up_test - -```erlang -get_balance_and_top_up_test() -> - ClientWallet = ar_wallet:new(), - ClientAddress = hb_util:human_id(ar_wallet:to_address(ClientWallet)), - {HostAddress, HostWallet, Opts} = test_opts(#{ ClientAddress => 100 }), - Node = hb_http_server:start_node(Opts), - ?event({host_address, HostAddress}), - ?event({client_address, ClientAddress}), - {ok, Res} = - hb_http:get( - Node, - Req = hb_message:commit( - #{<<"path">> => <<"/~simple-pay@1.0/balance">>}, - Opts#{ priv_wallet => ClientWallet } - ), - Opts - ), - ?event({req_signers, hb_message:signers(Req, Opts)}), - % Balance is given during the request, before the charge is made, so we - % should expect to see the original balance. -``` - -### apply_price_test - -```erlang -apply_price_test() -> - ClientWallet = ar_wallet:new(), - ClientAddress = hb_util:human_id(ar_wallet:to_address(ClientWallet)), - ClientOpts = #{ priv_wallet => ClientWallet }, - {HostAddress, _HostWallet, Opts} = - test_opts(#{ ClientAddress => 100 }), - Node = hb_http_server:start_node(Opts), - ?event({host_address, HostAddress}), - ?event({client_address, ClientAddress}), - % The balance should now be 80, as the check will have charged us 20. -``` - ---- - -*Generated from [dev_simple_pay.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_simple_pay.erl)* diff --git a/docs/book/src/dev_snp.erl.md b/docs/book/src/dev_snp.erl.md deleted file mode 100644 index e9f8c3518..000000000 --- a/docs/book/src/dev_snp.erl.md +++ /dev/null @@ -1,693 +0,0 @@ -# dev_snp - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_snp.erl) - -This device provides an interface for validating and generating AMD SEV-SNP -commitment reports. -AMD SEV-SNP (Secure Encrypted Virtualization - Secure Nested Paging) is a -hardware-based security technology that provides confidential computing -capabilities. This module handles the cryptographic validation of attestation -reports and the generation of commitment reports for trusted execution environments. -The device supports two main operations: -1. Verification of remote node attestation reports with comprehensive validation -2. Generation of local attestation reports for proving node identity and software integrity - ---- - -## Exported Functions - -- `generate/3` -- `verify/3` - ---- - -### verify - -This device provides an interface for validating and generating AMD SEV-SNP -Verify an AMD SEV-SNP commitment report message. - -```erlang --spec verify(M1 :: term(), M2 :: term(), NodeOpts :: map()) -> - {ok, binary()} | {error, term()}. -``` - -```erlang -verify(M1, M2, NodeOpts) -> - ?event(snp_verify, verify_called), - maybe - {ok, {Msg, Address, NodeMsgID, ReportJSON, MsgWithJSONReport}} - ?= extract_and_normalize_message(M2, NodeOpts), - % Perform all validation steps - {ok, NonceResult} ?= verify_nonce(Address, NodeMsgID, Msg, NodeOpts), - {ok, SigResult} ?= - verify_signature_and_address( - MsgWithJSONReport, - Address, - NodeOpts - ), - {ok, DebugResult} ?= verify_debug_disabled(Msg), - {ok, TrustedResult} ?= verify_trusted_software(M1, Msg, NodeOpts), - {ok, MeasurementResult} ?= verify_measurement(Msg, ReportJSON, NodeOpts), - {ok, ReportResult} ?= verify_report_integrity(ReportJSON), - Valid = lists:all( - fun(Bool) -> Bool end, - [ - NonceResult, - SigResult, - DebugResult, - TrustedResult, - MeasurementResult, - ReportResult - ] - ), - ?event({final_validation_result, Valid}), - {ok, hb_util:bin(Valid)} - else - {error, Reason} -> {error, Reason} - end. -``` - -### generate - -Generate an AMD SEV-SNP commitment report and emit it as a message. - -```erlang --spec generate(M1 :: term(), M2 :: term(), Opts :: map()) -> - {ok, map()} | {error, term()}. -``` - -```erlang -generate(_M1, _M2, Opts) -> - maybe - LoadedOpts = hb_cache:ensure_all_loaded(Opts, Opts), - ?event({generate_opts, {explicit, LoadedOpts}}), - % Validate wallet availability - {ok, ValidWallet} ?= - case hb_opts:get(priv_wallet, no_viable_wallet, LoadedOpts) of - no_viable_wallet -> {error, no_wallet_available}; - Wallet -> {ok, Wallet} - end, - % Generate address and node message components - Address = hb_util:human_id(ar_wallet:to_address(ValidWallet)), - NodeMsg = hb_private:reset(LoadedOpts), - {ok, PublicNodeMsgID} ?= dev_message:id( - NodeMsg, - #{ <<"committers">> => <<"none">> }, - LoadedOpts - ), - RawPublicNodeMsgID = hb_util:native_id(PublicNodeMsgID), - ?event({snp_node_msg, NodeMsg}), - % Generate the commitment report components - ?event({snp_address, byte_size(Address)}), - ReportData = generate_nonce(Address, RawPublicNodeMsgID), - ?event({snp_report_data, byte_size(ReportData)}), - % Extract local hashes - {ok, ValidLocalHashes} ?= - case hb_opts:get(snp_trusted, [#{}], LoadedOpts) of - [] -> {error, no_trusted_configs}; - [FirstConfig | _] -> {ok, FirstConfig}; - _ -> {error, invalid_trusted_configs_format} - end, - ?event(snp_local_hashes, {explicit, ValidLocalHashes}), - % Generate the hardware attestation report - {ok, ReportJSON} ?= case get(mock_snp_nif_enabled) of - true -> - % Return mocked response for testing - MockResponse = get(mock_snp_nif_response), - {ok, MockResponse}; - _ -> - % Call actual NIF function - dev_snp_nif:generate_attestation_report( - ReportData, - ?REPORT_DATA_VERSION - ) - end, - ?event({snp_report_json, ReportJSON}), - ?event({snp_report_generated, {nonce, ReportData}, {report, ReportJSON}}), - % Package the complete report message - ReportMsg = #{ - <<"local-hashes">> => ValidLocalHashes, - <<"nonce">> => hb_util:encode(ReportData), - <<"address">> => Address, - <<"node-message">> => NodeMsg, - <<"report">> => ReportJSON - }, - ?event({snp_report_msg, ReportMsg}), - {ok, ReportMsg} - else - {error, Reason} -> {error, Reason}; - Error -> {error, Error} - end. -``` - -### extract_and_normalize_message - -Extract and normalize the SNP commitment message from the input. - -```erlang --spec extract_and_normalize_message(M2 :: term(), NodeOpts :: map()) -> - {ok, {map(), binary(), binary(), binary(), map()}} | {error, term()}. -``` - -```erlang -extract_and_normalize_message(M2, NodeOpts) -> - maybe - % Search for a `body' key in the message, and if found use it as the source - % of the report. If not found, use the message itself as the source. -``` - -### extract_node_message_id - -Extract the node message ID from the SNP message. - -```erlang --spec extract_node_message_id(Msg :: map(), NodeOpts :: map()) -> - {ok, binary()} | {error, missing_node_msg_id}. -``` - -```erlang -extract_node_message_id(Msg, NodeOpts) -> - case {hb_ao:get(<<"node-message">>, Msg, NodeOpts#{ hashpath => ignore }), - hb_ao:get(<<"node-message-id">>, Msg, NodeOpts)} of - {undefined, undefined} -> - {error, missing_node_msg_id}; - {undefined, ID} -> - {ok, ID}; - {NodeMsg, _} -> - dev_message:id(NodeMsg, #{}, NodeOpts) - end. -``` - -### verify_nonce - -Verify that the nonce in the report matches the expected value. - -```erlang --spec verify_nonce(Address :: binary(), NodeMsgID :: binary(), - Msg :: map(), NodeOpts :: map()) -> {ok, true} | {error, nonce_mismatch}. -``` - -```erlang -verify_nonce(Address, NodeMsgID, Msg, NodeOpts) -> - Nonce = hb_util:decode(hb_ao:get(<<"nonce">>, Msg, NodeOpts)), - ?event({snp_nonce, Nonce}), - NonceMatches = report_data_matches(Address, NodeMsgID, Nonce), - ?event({nonce_matches, NonceMatches}), - case NonceMatches of - true -> {ok, true}; - false -> {error, nonce_mismatch} - end. -``` - -### verify_signature_and_address - -Verify that the message signature and signing address are valid. - -```erlang --spec verify_signature_and_address(MsgWithJSONReport :: map(), - Address :: binary(), NodeOpts :: map()) -> - {ok, true} | {error, signature_or_address_invalid}. -``` - -```erlang -verify_signature_and_address(MsgWithJSONReport, Address, NodeOpts) -> - Signers = hb_message:signers(MsgWithJSONReport, NodeOpts), - ?event({snp_signers, {explicit, Signers}}), - SigIsValid = hb_message:verify(MsgWithJSONReport, Signers), - ?event({snp_sig_is_valid, SigIsValid}), - AddressIsValid = lists:member(Address, Signers), - ?event({address_is_valid, AddressIsValid, {signer, Signers}, {address, Address}}), - case SigIsValid andalso AddressIsValid of - true -> {ok, true}; - false -> {error, signature_or_address_invalid} - end. -``` - -### verify_trusted_software - -Verify that the software configuration is trusted. - -```erlang --spec verify_trusted_software(M1 :: term(), Msg :: map(), NodeOpts :: map()) -> - {ok, true} | {error, untrusted_software}. -``` - -```erlang -verify_trusted_software(M1, Msg, NodeOpts) -> - {ok, IsTrustedSoftware} = execute_is_trusted(M1, Msg, NodeOpts), - ?event({trusted_software, IsTrustedSoftware}), - case IsTrustedSoftware of - true -> {ok, true}; - false -> {error, untrusted_software} - end. -``` - -### verify_measurement - -Verify that the measurement in the SNP report is valid. - -```erlang --spec verify_measurement(Msg :: map(), ReportJSON :: binary(), - NodeOpts :: map()) -> {ok, true} | {error, measurement_invalid}. -``` - -```erlang -verify_measurement(Msg, ReportJSON, NodeOpts) -> - Args = extract_measurement_args(Msg, NodeOpts), - ?event({args, { explicit, Args}}), - {ok, Expected} = dev_snp_nif:compute_launch_digest(Args), - ExpectedBin = list_to_binary(Expected), - ?event({expected_measurement, {explicit, Expected}}), - Measurement = hb_ao:get(<<"measurement">>, Msg, NodeOpts), - ?event({measurement, {explicit,Measurement}}), - {Status, MeasurementIsValid} = - dev_snp_nif:verify_measurement( - ReportJSON, - ExpectedBin - ), - ?event({status, Status}), - ?event({measurement_is_valid, MeasurementIsValid}), - case MeasurementIsValid of - true -> {ok, true}; - false -> {error, measurement_invalid} - end. -``` - -### verify_report_integrity - -Verify the integrity of the SNP report's digital signature. - -```erlang --spec verify_report_integrity(ReportJSON :: binary()) -> - {ok, true} | {error, report_signature_invalid}. -``` - -```erlang -verify_report_integrity(ReportJSON) -> - {ok, ReportIsValid} = dev_snp_nif:verify_signature(ReportJSON), - ?event({report_is_valid, ReportIsValid}), - case ReportIsValid of - true -> {ok, true}; - false -> {error, report_signature_invalid} - end. -``` - -### execute_is_trusted - -Validate that all software hashes match trusted configurations. - -```erlang --spec execute_is_trusted(M1 :: term(), Msg :: map(), NodeOpts :: map()) -> - {ok, boolean()}. -``` - -```erlang -execute_is_trusted(_M1, Msg, NodeOpts) -> - FilteredLocalHashes = get_filtered_local_hashes(Msg, NodeOpts), - TrustedSoftware = hb_opts:get(snp_trusted, [#{}], NodeOpts), - ?event({trusted_software, {explicit, TrustedSoftware}}), - IsTrusted = - is_software_trusted( - FilteredLocalHashes, - TrustedSoftware, - NodeOpts - ), - ?event({is_all_software_trusted, IsTrusted}), - {ok, IsTrusted}. -``` - -### report_data_matches - -Validate that the report data matches the expected nonce. - -```erlang --spec report_data_matches(Address :: binary(), NodeMsgID :: binary(), - ReportData :: binary()) -> boolean(). -``` - -```erlang -report_data_matches(Address, NodeMsgID, ReportData) -> - ?event({generated_nonce, {explicit, generate_nonce(Address, NodeMsgID)}}), - ?event({expected_nonce, {explicit, ReportData}}), - generate_nonce(Address, NodeMsgID) == ReportData. -``` - -### get_test_hashes - -```erlang --spec generate_nonce(RawAddress :: binary(), RawNodeMsgID :: binary()) -> binary(). -generate_nonce(RawAddress, RawNodeMsgID) -> - Address = hb_util:native_id(RawAddress), - NodeMsgID = hb_util:native_id(RawNodeMsgID), - << Address/binary, NodeMsgID/binary >>. -``` - -```erlang -get_test_hashes() -> - #{ - <<"vcpus">> => ?TEST_VCPUS_COUNT, - <<"vcpu_type">> => ?TEST_VCPU_TYPE, - <<"vmm_type">> => ?TEST_VMM_TYPE, - <<"guest_features">> => ?TEST_GUEST_FEATURES, - <<"firmware">> => ?TEST_FIRMWARE_HASH, - <<"kernel">> => ?TEST_KERNEL_HASH, - <<"initrd">> => ?TEST_INITRD_HASH, - <<"append">> => ?TEST_APPEND_HASH - }. -``` - -### setup_test_nodes - -```erlang -setup_test_nodes() -> - ProxyWallet = hb:wallet(<<"test/admissible-report-wallet.json">>), - ProxyOpts = #{ - store => hb_opts:get(store), - priv_wallet => ProxyWallet - }, - _ReportNode = hb_http_server:start_node(ProxyOpts), - VerifyingNode = hb_http_server:start_node(#{ - priv_wallet => ar_wallet:new(), - store => hb_opts:get(store), - snp_trusted => [ - #{ - <<"vcpus">> => ?TEST_VCPUS_COUNT, - <<"vcpu_type">> => ?TEST_VCPU_TYPE, - <<"vmm_type">> => ?TEST_VMM_TYPE, - <<"guest_features">> => ?TEST_GUEST_FEATURES, - <<"firmware">> => ?TEST_FIRMWARE_HASH, - <<"kernel">> => ?TEST_KERNEL_HASH, - <<"initrd">> => ?TEST_INITRD_HASH, - <<"append">> => ?TEST_APPEND_HASH - } - ], - snp_enforced_keys => [ - vcpu_type, vmm_type, guest_features, - firmware, kernel, initrd, append - ] - }), - {ProxyOpts, VerifyingNode}. -``` - -### execute_is_trusted_exact_match_should_fail_test - -```erlang --spec load_test_report_data() -> binary(). -load_test_report_data() -> - TestFile = <<"test/admissible-report.json">>, - case file:read_file(TestFile) of - {ok, Data} -> - Data; - {error, enoent} -> - throw({error, {file_not_found, TestFile}}); - {error, Reason} -> - throw({error, {file_read_error, TestFile, Reason}}) - end. -``` - -```erlang -execute_is_trusted_exact_match_should_fail_test() -> - % Test case: Exact match with trusted software should fail when vcpus differ - Msg = #{ - <<"local-hashes">> => (get_test_hashes())#{ - <<"vcpus">> => 16 - } - }, - NodeOpts = #{ - snp_trusted => [get_test_hashes()], - snp_enforced_keys => [ - vcpus, vcpu_type, vmm_type, guest_features, - firmware, kernel, initrd, append - ] - }, - {ok, Result} = execute_is_trusted(#{}, Msg, NodeOpts), - ?assertEqual(false, Result). -``` - -### execute_is_trusted_subset_match_should_pass_test - -```erlang -execute_is_trusted_subset_match_should_pass_test() -> - % Test case: Match with subset of keys in trusted software should pass - Msg = #{ - <<"local-hashes">> => (get_test_hashes())#{ - <<"vcpus">> => 16 - } - }, - NodeOpts = #{ - snp_trusted => [get_test_hashes()], - snp_enforced_keys => [ - vcpu_type, vmm_type, guest_features, - firmware, kernel, initrd, append - ] - }, - {ok, Result} = execute_is_trusted(#{}, Msg, NodeOpts), - ?assertEqual(true, Result). -``` - -### verify_test - -```erlang -verify_test() -> - % Note: If this test fails, it may be because the unsigned ID of the node - % message in `test/admissible-report.eterm` has changed. If the format ever - % changes, this value will need to be updated. Recalculate the unsigned ID - % of the `Request/node-message' field, decode `Request/address', concatenate - % the two, and encode. The result will be the new `Request/nonce' value. -``` - -### generate_success_test - -Test successful report generation with valid configuration. - -```erlang -generate_success_test() -> - % Set up test configuration - TestWallet = ar_wallet:new(), - TestOpts = #{ - priv_wallet => TestWallet, - snp_trusted => [#{ - <<"vcpus">> => ?TEST_VCPUS_COUNT, - <<"vcpu_type">> => ?TEST_VCPU_TYPE, - <<"firmware">> => ?TEST_FIRMWARE_HASH, - <<"kernel">> => ?TEST_KERNEL_HASH - }] - }, - % Load test report data from file - TestReportJSON = load_test_report_data(), - % Mock the NIF function to return test data - ok = mock_snp_nif(TestReportJSON), - try - % Call generate function - {ok, Result} = generate(#{}, #{}, TestOpts), - % Verify the result structure - ?assert(is_map(Result)), - ?assert(maps:is_key(<<"local-hashes">>, Result)), - ?assert(maps:is_key(<<"nonce">>, Result)), - ?assert(maps:is_key(<<"address">>, Result)), - ?assert(maps:is_key(<<"node-message">>, Result)), - ?assert(maps:is_key(<<"report">>, Result)), - % Verify the report content - ?assertEqual(TestReportJSON, maps:get(<<"report">>, Result)), - % Verify local hashes match the first trusted config - ExpectedHashes = maps:get(<<"local-hashes">>, Result), - ?assertEqual(?TEST_VCPUS_COUNT, maps:get(<<"vcpus">>, ExpectedHashes)), - ?assertEqual(?TEST_VCPU_TYPE, maps:get(<<"vcpu_type">>, ExpectedHashes)), - % Verify nonce is properly encoded - Nonce = maps:get(<<"nonce">>, Result), - ?assert(is_binary(Nonce)), - ?assert(byte_size(Nonce) > 0), - % Verify address is present and properly formatted - Address = maps:get(<<"address">>, Result), - ?assert(is_binary(Address)), - ?assert(byte_size(Address) > 0) - after - % Clean up mock - unmock_snp_nif() - end. -``` - -### generate_missing_wallet_test - -Test error handling when wallet is missing. - -```erlang -generate_missing_wallet_test() -> - TestOpts = #{ - % No priv_wallet provided - snp_trusted => [#{ <<"firmware">> => ?TEST_FIRMWARE_HASH }] - }, - % Mock the NIF function (shouldn't be called) - ok = mock_snp_nif(<<"dummy_report">>), - try - % Call generate function - should fail - Result = generate(#{}, #{}, TestOpts), - ?assertMatch({error, no_wallet_available}, Result) - after - unmock_snp_nif() - end. -``` - -### generate_missing_trusted_configs_test - -Test error handling when trusted configurations are missing. - -```erlang -generate_missing_trusted_configs_test() -> - TestWallet = ar_wallet:new(), - TestOpts = #{ - priv_wallet => TestWallet, - snp_trusted => [] % Empty trusted configs - }, - % Mock the NIF function (shouldn't be called) - ok = mock_snp_nif(<<"dummy_report">>), - try - % Call generate function - should fail - Result = generate(#{}, #{}, TestOpts), - ?assertMatch({error, no_trusted_configs}, Result) - after - unmock_snp_nif() - end. -``` - -### verify_mock_generate_success_test_ - -Test successful round-trip: generate then verify with same configuration. - -```erlang -verify_mock_generate_success_test_() -> - { timeout, 30, fun verify_mock_generate_success/0 }. -``` - -### verify_mock_generate_success - -```erlang -verify_mock_generate_success() -> - % Set up test configuration - TestWallet = ar_wallet:new(), - TestTrustedConfig = #{ - <<"vcpus">> => 32, - <<"vcpu_type">> => ?TEST_VCPU_TYPE, - <<"vmm_type">> => ?TEST_VMM_TYPE, - <<"guest_features">> => ?TEST_GUEST_FEATURES, - <<"firmware">> => ?TEST_FIRMWARE_HASH, - <<"kernel">> => ?TEST_KERNEL_HASH, - <<"initrd">> => ?TEST_INITRD_HASH, - <<"append">> => ?TEST_APPEND_HASH - }, - GenerateOpts = #{ - priv_wallet => TestWallet, - snp_trusted => [TestTrustedConfig] - }, - % Load test report data and set up mock - TestReportJSON = load_test_report_data(), - ok = mock_snp_nif(TestReportJSON), - try - % Step 1: Generate a test report using mocked SNP - {ok, GeneratedMsg} = generate(#{}, #{}, GenerateOpts), - % Verify the generated message structure - ?assert(is_map(GeneratedMsg)), - ?assert(maps:is_key(<<"report">>, GeneratedMsg)), - ?assert(maps:is_key(<<"address">>, GeneratedMsg)), - ?assert(maps:is_key(<<"nonce">>, GeneratedMsg)), - % Step 2: Set up verification options with the same trusted config - VerifyOpts = #{ - snp_trusted => [TestTrustedConfig], - snp_enforced_keys => [vcpu_type, vmm_type, guest_features, - firmware, kernel, initrd, append] - }, - % Step 3: Verify the generated report - {ok, VerifyResult} = - verify( - #{}, - hb_message:commit(GeneratedMsg, GenerateOpts), - VerifyOpts - ), - % Step 4: Assert that verification succeeds - ?assertEqual(<<"true">>, VerifyResult), - % Additional validation: verify specific fields - ReportData = maps:get(<<"report">>, GeneratedMsg), - ?assertEqual(TestReportJSON, ReportData), - LocalHashes = maps:get(<<"local-hashes">>, GeneratedMsg), - ?assertEqual(TestTrustedConfig, LocalHashes) - after - % Clean up mock - unmock_snp_nif() - end. -``` - -### verify_mock_generate_wrong_config_test_ - -Test verification failure when using wrong trusted configuration. - -```erlang -verify_mock_generate_wrong_config_test_() -> - { timeout, 30, fun verify_mock_generate_wrong_config/0 }. -``` - -### verify_mock_generate_wrong_config - -```erlang -verify_mock_generate_wrong_config() -> - % Set up test configuration for generation - TestWallet = ar_wallet:new(), - GenerateTrustedConfig = #{ - <<"vcpus">> => ?TEST_VCPUS_COUNT, - <<"vcpu_type">> => ?TEST_VCPU_TYPE, - <<"vmm_type">> => ?TEST_VMM_TYPE, - <<"guest_features">> => ?TEST_GUEST_FEATURES, - <<"firmware">> => ?TEST_FIRMWARE_HASH, - <<"kernel">> => ?TEST_KERNEL_HASH, - <<"initrd">> => ?TEST_INITRD_HASH, - <<"append">> => ?TEST_APPEND_HASH - }, - GenerateOpts = #{ - priv_wallet => TestWallet, - snp_trusted => [GenerateTrustedConfig] - }, - % Load test report data and set up mock - TestReportJSON = load_test_report_data(), - ok = mock_snp_nif(TestReportJSON), - try - % Step 1: Generate a test report - {ok, GeneratedMsg} = generate(#{}, #{}, GenerateOpts), - % Step 2: Set up verification with DIFFERENT trusted config - WrongTrustedConfig = #{ - <<"vcpus">> => 32, % Different from generation config - <<"vcpu_type">> => 3, % Different from generation config - <<"firmware">> => <<"different_firmware_hash">>, - <<"kernel">> => <<"different_kernel_hash">> - }, - VerifyOpts = #{ - snp_trusted => [WrongTrustedConfig], - snp_enforced_keys => [vcpus, vcpu_type, firmware, kernel] - }, - % Step 3: Verify the generated report with wrong config - VerifyResult = - verify( - #{}, - hb_message:commit(GeneratedMsg, GenerateOpts), - VerifyOpts - ), - ?event({verify_result, {explicit, VerifyResult}}), - % Step 4: Assert that verification fails (either as error or false result) - case VerifyResult of - {ok, <<"false">>} -> - % Verification completed but returned false (all validations ran) - ok; - {error, _Reason} -> - % Verification failed early (expected for wrong config) - ok; - Other -> - % Unexpected result - should fail the test - ?assertEqual({ok, <<"false">>}, Other) - end - after - % Clean up mock - unmock_snp_nif() - end. -``` - ---- - -*Generated from [dev_snp.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_snp.erl)* diff --git a/docs/book/src/dev_snp_nif.erl.md b/docs/book/src/dev_snp_nif.erl.md deleted file mode 100644 index 99cb197b8..000000000 --- a/docs/book/src/dev_snp_nif.erl.md +++ /dev/null @@ -1,134 +0,0 @@ -# dev_snp_nif - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_snp_nif.erl) - -## Exported Functions - -- `check_snp_support/0` -- `compute_launch_digest/1` -- `generate_attestation_report/2` -- `verify_measurement/2` -- `verify_signature/1` - ---- - -### check_snp_support - -```erlang -check_snp_support() -> - ?NOT_LOADED. -``` - -### generate_attestation_report - -```erlang -generate_attestation_report(_UniqueData, _VMPL) -> - ?NOT_LOADED. -``` - -### compute_launch_digest - -```erlang -compute_launch_digest(_Args) -> - ?NOT_LOADED. -``` - -### verify_measurement - -```erlang -verify_measurement(_Report, _Expected) -> - ?NOT_LOADED. -``` - -### verify_signature - -```erlang -verify_signature(_Report) -> - ?NOT_LOADED. -``` - -### init - -```erlang -init() -> - ?load_nif_from_crate(dev_snp_nif, 0). -``` - -### not_loaded - -```erlang -not_loaded(Line) -> - erlang:nif_error({not_loaded, [{module, ?MODULE}, {line, Line}]}). -``` - -### generate_attestation_report_test - -```erlang -generate_attestation_report_test() -> - %% Call check_support() to determine if SNP is supported - case dev_snp_nif:check_snp_support() of - {ok, true} -> - %% SNP is supported, generate unique data and test commitment report - UniqueData = crypto:strong_rand_bytes(64), - VMPL = 1, - ?assertEqual( - {ok, UniqueData}, - dev_snp_nif:generate_attestation_report(UniqueData, VMPL) - ); - {ok, false} -> - %% SNP is not supported, log event and assert NIF not loaded - ?event("SNP not supported on machine, skipping test..."), - ?assertEqual(ok, ok) - end. -``` - -### compute_launch_digest_test - -```erlang -compute_launch_digest_test() -> - %% Define the data structure - ArgsMap = #{ - vcpus => 32, - vcpu_type => 5, - vmm_type => 1, - guest_features => 16#1, - firmware => "b8c5d4082d5738db6b0fb0294174992738645df70c44cdecf7fad3a62244b788e7e408c582ee48a74b289f3acec78510", - kernel => "69d0cd7d13858e4fcef6bc7797aebd258730f215bc5642c4ad8e4b893cc67576", - initrd => "02e28b6c718bf0a5260d6f34d3c8fe0d71bf5f02af13e1bc695c6bc162120da1", - append => "56e1e5190622c8c6b9daa4fe3ad83f3831c305bb736735bf795b284cb462c9e7" - }, - ?event(ArgsMap), - %% Call the NIF - {ok, Result} = dev_snp_nif:compute_launch_digest(ArgsMap), - %% Expected result - EncTestVector = - <<"wmSDSQYuzE2M3rQcourJnDJHgalADM8TBev3gyjM5ObRNOn8oglvVznFbaWhajU_">>, - ?assertMatch(EncTestVector, hb_util:encode(Result)). -``` - -### verify_measurement_test - -```erlang -verify_measurement_test() -> - %% Define a mock report (JSON string) as binary - {ok, MockReport} = file:read_file("test/snp-measurement.json"), - %% Define the expected measurement (binary) - ExpectedMeasurement = <<94,87,4,197,20,11,255,129,179,197,146,104,8,212,152,248,110,11,60,246,82,254,24,55,201,47,157,229,163,82,108,66,191,138,241,229,40,144,133,170,116,109,17,62,20,241,144,119>>, - %% Call the NIF - Result = dev_snp_nif:verify_measurement(MockReport, ExpectedMeasurement), - ?assertMatch({ok, true}, Result). -``` - -### verify_signature_test - -```erlang -verify_signature_test() -> - %% Define a mock report (JSON string) as binary - {ok, MockAttestation} = file:read_file("test/snp-attestation.json"), - Result = dev_snp_nif:verify_signature(MockAttestation), - ?assertMatch({ok, true}, Result). -``` - ---- - -*Generated from [dev_snp_nif.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_snp_nif.erl)* diff --git a/docs/book/src/dev_stack.erl.md b/docs/book/src/dev_stack.erl.md deleted file mode 100644 index 9424529e2..000000000 --- a/docs/book/src/dev_stack.erl.md +++ /dev/null @@ -1,774 +0,0 @@ -# dev_stack - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_stack.erl) - -A device that contains a stack of other devices, and manages their -execution. It can run in two modes: fold (the default), and map. -In fold mode, it runs upon input messages in the order of their keys. A -stack maintains and passes forward a state (expressed as a message) as it -progresses through devices. -For example, a stack of devices as follows: -
-Device -> Stack
-Device-Stack/1/Name -> Add-One-Device
-Device-Stack/2/Name -> Add-Two-Device
-
-When called with the message: -
-#{ Path = "FuncName", binary => `<<"0">>` }
-
-Will produce the output: -
-#{ Path = "FuncName", binary => `<<"3">>` }
-{ok, #{ bin => `<<"3">>` }}
-
-In map mode, the stack will run over all the devices in the stack, and -combine their results into a single message. Each of the devices' -output values have a key that is the device's name in the `Device-Stack` -(its number if the stack is a list). -You can switch between fold and map modes by setting the `Mode` key in the -`Msg2` to either `Fold` or `Map`, or set it globally for the stack by -setting the `Mode` key in the `Msg1` message. The key in `Msg2` takes -precedence over the key in `Msg1`. -The key that is called upon the device stack is the same key that is used -upon the devices that are contained within it. For example, in the above -scenario we resolve FuncName on the stack, leading FuncName to be called on -Add-One-Device and Add-Two-Device. -A device stack responds to special statuses upon responses as follows: - `skip`: Skips the rest of the device stack for the current pass. - `pass`: Causes the stack to increment its pass number and re-execute - the stack from the first device, maintaining the state - accumulated so far. Only available in fold mode. -In all cases, the device stack will return the accumulated state to the -caller as the result of the call to the stack. -The dev_stack adds additional metadata to the message in order to track -the state of its execution as it progresses through devices. These keys -are as follows: - `Stack-Pass`: The number of times the stack has reset and re-executed - from the first device for the current message. - `Input-Prefix`: The prefix that the device should use for its outputs - and inputs. - `Output-Prefix`: The device that was previously executed. -All counters used by the stack are initialized to 1. -Additionally, as implemented in HyperBEAM, the device stack will honor a -number of options that are passed to it as keys in the message. Each of -these options is also passed through to the devices contained within the -stack during execution. These options include: - `Error-Strategy`: Determines how the stack handles errors from devices. - See `maybe_error/5` for more information. - `Allow-Multipass`: Determines whether the stack is allowed to automatically - re-execute from the first device when the `pass` tag is returned. See - `maybe_pass/3` for more information. -Under-the-hood, dev_stack uses a `default` handler to resolve all calls to -devices, aside `set/2` which it calls itself to mutate the message's `device` -key in order to change which device is currently being executed. This method -allows dev_stack to ensure that the message's HashPath is always correct, -even as it delegates calls to other devices. An example flow for a `dev_stack` -execution is as follows: -
-	/Msg1/AlicesExcitingKey ->
-		dev_stack:execute ->
-			/Msg1/Set?device=/Device-Stack/1 ->
-			/Msg2/AlicesExcitingKey ->
-			/Msg3/Set?device=/Device-Stack/2 ->
-			/Msg4/AlicesExcitingKey
-			... ->
-			/MsgN/Set?device=[This-Device] ->
-		returns {ok, /MsgN+1} ->
-	/MsgN+1
-
-In this example, the `device` key is mutated a number of times, but the -resulting HashPath remains correct and verifiable. - ---- - -## Exported Functions - -- `generate_append_device/1` -- `info/2` -- `input_prefix/3` -- `output_prefix/3` -- `prefix/3` -- `router/4` - ---- - -### info - -A device that contains a stack of other devices, and manages their - -```erlang -info(Msg, Opts) -> - hb_maps:merge( - #{ - handler => fun router/4, - excludes => [<<"set">>, <<"keys">>] - }, - case hb_maps:get(<<"stack-keys">>, Msg, not_found, Opts) of - not_found -> #{}; - StackKeys -> #{ exports => StackKeys } - end - ). -``` - -### prefix - -Return the default prefix for the stack. -Return the input prefix for the stack. - -```erlang -prefix(Msg1, _Msg2, Opts) -> - hb_ao:get(<<"output-prefix">>, {as, dev_message, Msg1}, <<"">>, Opts). -``` - -### input_prefix - -Return the default prefix for the stack. -Return the input prefix for the stack. -Return the output prefix for the stack. - -```erlang -input_prefix(Msg1, _Msg2, Opts) -> - hb_ao:get(<<"input-prefix">>, {as, dev_message, Msg1}, <<"">>, Opts). -``` - -### output_prefix - -Return the default prefix for the stack. -Return the input prefix for the stack. -Return the output prefix for the stack. -The device stack key router. Sends the request to `resolve_stack`, - -```erlang -output_prefix(Msg1, _Msg2, Opts) -> - hb_ao:get(<<"output-prefix">>, {as, dev_message, Msg1}, <<"">>, Opts). -``` - -### router - -Return the default prefix for the stack. -Return the input prefix for the stack. -Return the output prefix for the stack. -The device stack key router. Sends the request to `resolve_stack`, - -```erlang -router(<<"keys">>, Message1, Message2, Opts) -> - ?event({keys_called, {msg1, Message1}, {msg2, Message2}}), - dev_message:keys(Message1, Opts); -``` - -### router - -Return the default prefix for the stack. -Return the input prefix for the stack. -Return the output prefix for the stack. -The device stack key router. Sends the request to `resolve_stack`, - -```erlang -router(Key, Message1, Message2, Opts) -> - case hb_path:matches(Key, <<"transform">>) of - true -> transformer_message(Message1, Opts); - false -> router(Message1, Message2, Opts) - end. -``` - -### router - -```erlang -router(Message1, Message2, Opts) -> - ?event({router_called, {msg1, Message1}, {msg2, Message2}}), - Mode = - case hb_ao:get(<<"mode">>, Message2, not_found, Opts) of - not_found -> - hb_ao:get( - <<"mode">>, - {as, dev_message, Message1}, - <<"Fold">>, - Opts - ); - Msg2Mode -> Msg2Mode - end, - case Mode of - <<"Fold">> -> resolve_fold(Message1, Message2, Opts); - <<"Map">> -> resolve_map(Message1, Message2, Opts) - end. -``` - -### transformer_message - -Return a message which, when given a key, will transform the message - -```erlang -transformer_message(Msg1, Opts) -> - ?event({creating_transformer, {for, Msg1}}), - BaseInfo = info(Msg1, Opts), - {ok, - Msg1#{ - <<"device">> => #{ - info => - fun() -> - hb_maps:merge( - BaseInfo, - #{ - handler => - fun(Key, MsgX1) -> - transform(MsgX1, Key, Opts) - end - }, - Opts - ) - end, - <<"type">> => <<"stack-transformer">> - } - } - }. -``` - -### transform - -Return Message1, transformed such that the device named `Key` from the - -```erlang -transform(Msg1, Key, Opts) -> - % Get the device stack message from Msg1. -``` - -### resolve_fold - -The main device stack execution engine. See the moduledoc for more - -```erlang -resolve_fold(Message1, Message2, Opts) -> - {ok, InitDevMsg} = dev_message:get(<<"device">>, Message1, Opts), - StartingPassValue = - hb_ao:get(<<"pass">>, {as, dev_message, Message1}, unset, Opts), - PreparedMessage = hb_ao:set(Message1, <<"pass">>, 1, Opts), - case resolve_fold(PreparedMessage, Message2, 1, Opts) of - {ok, Raw} when not is_map(Raw) -> - {ok, Raw}; - {ok, Result} -> - dev_message:set( - Result, - #{ - <<"device">> => InitDevMsg, - <<"input-prefix">> => - hb_ao:get( - <<"previous-input-prefix">>, - {as, dev_message, Result}, - undefined, - Opts - ), - <<"output-prefix">> => - hb_ao:get( - <<"previous-output-prefix">>, - {as, dev_message, Result}, - undefined, - Opts - ), - <<"device-key">> => unset, - <<"device-stack-previous">> => unset, - <<"pass">> => StartingPassValue - }, - Opts - ); - Else -> - Else - end. -``` - -### resolve_fold - -```erlang -resolve_fold(Message1, Message2, DevNum, Opts) -> - case transform(Message1, DevNum, Opts) of - {ok, Message3} -> - ?event({stack_execute, DevNum, {msg1, Message3}, {msg2, Message2}}), - case hb_ao:resolve(Message3, Message2, Opts) of - {ok, Message4} when is_map(Message4) -> - ?event({result, ok, DevNum, Message4}), - resolve_fold(Message4, Message2, DevNum + 1, Opts); - {error, not_found} -> - ?event({skipping_device, not_found, DevNum, Message3}), - resolve_fold(Message3, Message2, DevNum + 1, Opts); - {ok, RawResult} -> - ?event({returning_raw_result, RawResult}), - {ok, RawResult}; - {skip, Message4} when is_map(Message4) -> - ?event({result, skip, DevNum, Message4}), - {ok, Message4}; - {pass, Message4} when is_map(Message4) -> - ?event({result, pass, {dev, DevNum}, Message4}), - resolve_fold( - increment_pass(Message4, Opts), - Message2, - 1, - Opts - ); - {error, Info} -> - ?event({result, error, {dev, DevNum}, Info}), - maybe_error(Message1, Message2, DevNum, Info, Opts); - Unexpected -> - ?event({result, unexpected, {dev, DevNum}, Unexpected}), - maybe_error( - Message1, - Message2, - DevNum, - {unexpected_result, Unexpected}, - Opts - ) - end; - not_found -> - ?event({execution_complete, DevNum, Message1}), - {ok, Message1} - end. -``` - -### resolve_map - -Map over the devices in the stack, accumulating the output in a single - -```erlang -resolve_map(Message1, Message2, Opts) -> - ?event({resolving_map, {msg1, Message1}, {msg2, Message2}}), - DevKeys = - hb_ao:get( - <<"device-stack">>, - {as, dev_message, Message1}, - Opts - ), - Res = {ok, - hb_maps:filtermap( - fun(Key, _Dev) -> - {ok, OrigWithDev} = transform(Message1, Key, Opts), - case hb_ao:resolve(OrigWithDev, Message2, Opts) of - {ok, Value} -> {true, Value}; - _ -> false - end - end, - hb_maps:without(?AO_CORE_KEYS, hb_ao:normalize_keys(DevKeys, Opts), Opts), - Opts - ) - }, - Res. -``` - -### increment_pass - -Helper to increment the pass number. - -```erlang -increment_pass(Message, Opts) -> - hb_ao:set( - Message, - #{ <<"pass">> => hb_ao:get(<<"pass">>, {as, dev_message, Message}, 1, Opts) + 1 }, - Opts - ). -``` - -### maybe_error - -```erlang -maybe_error(Message1, Message2, DevNum, Info, Opts) -> - case hb_opts:get(error_strategy, throw, Opts) of - stop -> - {error, {stack_call_failed, Message1, Message2, DevNum, Info}}; - throw -> - erlang:raise( - error, - {device_failed, - {dev_num, DevNum}, - {msg1, Message1}, - {msg2, Message2}, - {info, Info} - }, - [] - ) - end. -``` - -### generate_append_device - -```erlang -generate_append_device(Separator) -> - generate_append_device(Separator, ok). -``` - -### generate_append_device - -```erlang -generate_append_device(Separator, Status) -> - #{ - append => - fun(M1 = #{ <<"pass">> := 3 }, _) -> - % Stop after 3 passes. -``` - -### transform_internal_call_device_test - -Test that the transform function can be called correctly internally - -```erlang -transform_internal_call_device_test() -> - AppendDev = generate_append_device(<<"_">>), - Msg1 = - #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - #{ - <<"1">> => AppendDev, - <<"2">> => <<"message@1.0">> - } - }, - ?assertMatch( - <<"message@1.0">>, - hb_ao:get( - <<"device">>, - element(2, transform(Msg1, <<"2">>, #{})) - ) - ). -``` - -### transform_external_call_device_test - -Ensure we can generate a transformer message that can be called to - -```erlang -transform_external_call_device_test() -> - Msg1 = #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - #{ - <<"make-cool">> => - #{ - info => - fun() -> - #{ - handler => - fun(<<"keys">>, MsgX1) -> - ?event({test_dev_keys_called, MsgX1}), - {ok, hb_maps:keys(MsgX1, #{})}; - (Key, MsgX1) -> - {ok, Value} = - dev_message:get(Key, MsgX1, #{}), - dev_message:set( - MsgX1, - #{ Key => - << Value/binary, "-Cool">> - }, - #{} - ) - end - } - end, - <<"suffix">> => <<"-Cool">> - } - }, - <<"value">> => <<"Super">> - }, - ?assertMatch( - {ok, #{ <<"value">> := <<"Super-Cool">> }}, - hb_ao:resolve(Msg1, #{ - <<"path">> => <<"/transform/make-cool/value">> - }, #{}) - ). -``` - -### example_device_for_stack_test - -```erlang -example_device_for_stack_test() -> - % Test the example device that we use for later stack tests, such that - % we know that an error later is actually from the stack, and not from - % the example device. -``` - -### simple_stack_execute_test - -```erlang -simple_stack_execute_test() -> - Msg = #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - #{ - <<"1">> => generate_append_device(<<"!D1!">>), - <<"2">> => generate_append_device(<<"_D2_">>) - }, - <<"result">> => <<"INIT">> - }, - ?event({stack_executing, test, {explicit, Msg}}), - ?assertMatch( - {ok, #{ <<"result">> := <<"INIT!D1!2_D2_2">> }}, - hb_ao:resolve(Msg, #{ <<"path">> => <<"append">>, <<"bin">> => <<"2">> }, #{}) - ). -``` - -### many_devices_test - -```erlang -many_devices_test() -> - Msg = #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - #{ - <<"1">> => generate_append_device(<<"+D1">>), - <<"2">> => generate_append_device(<<"+D2">>), - <<"3">> => generate_append_device(<<"+D3">>), - <<"4">> => generate_append_device(<<"+D4">>), - <<"5">> => generate_append_device(<<"+D5">>), - <<"6">> => generate_append_device(<<"+D6">>), - <<"7">> => generate_append_device(<<"+D7">>), - <<"8">> => generate_append_device(<<"+D8">>) - }, - <<"result">> => <<"INIT">> - }, - ?assertMatch( - {ok, - #{ - <<"result">> := - <<"INIT+D12+D22+D32+D42+D52+D62+D72+D82">> - } - }, - hb_ao:resolve(Msg, #{ <<"path">> => <<"append">>, <<"bin">> => <<"2">> }, #{}) - ). -``` - -### benchmark_test - -```erlang -benchmark_test() -> - BenchTime = 0.3, - Msg = #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - #{ - <<"1">> => generate_append_device(<<"+D1">>), - <<"2">> => generate_append_device(<<"+D2">>), - <<"3">> => generate_append_device(<<"+D3">>), - <<"4">> => generate_append_device(<<"+D4">>), - <<"5">> => generate_append_device(<<"+D5">>) - }, - <<"result">> => <<"INIT">> - }, - Iterations = - hb_test_utils:benchmark( - fun() -> - hb_ao:resolve(Msg, - #{ - <<"path">> => <<"append">>, - <<"bin">> => <<"2">> - }, - #{} - ), - {count, 5} - end, - BenchTime - ), - hb_test_utils:benchmark_print( - <<"Stack:">>, - <<"resolutions">>, - Iterations, - BenchTime - ), - ?assert(Iterations >= 10). -``` - -### test_prefix_msg - -```erlang -test_prefix_msg() -> - Dev = #{ - prefix_set => - fun(M1, M2, Opts) -> - In = input_prefix(M1, M2, Opts), - Out = output_prefix(M1, M2, Opts), - Key = hb_ao:get(<<"key">>, M2, Opts), - Value = hb_ao:get(<>, M2, Opts), - ?event({setting, {inp, In}, {outp, Out}, {key, Key}, {value, Value}}), - {ok, hb_ao:set( - M1, - <>, - Value, - Opts - )} - end - }, - #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => #{ <<"1">> => Dev, <<"2">> => Dev } - }. -``` - -### no_prefix_test - -```erlang -no_prefix_test() -> - Msg2 = - #{ - <<"path">> => <<"prefix_set">>, - <<"key">> => <<"example">>, - <<"example">> => 1 - }, - {ok, Ex1Msg3} = hb_ao:resolve(test_prefix_msg(), Msg2, #{}), - ?event({ex1, Ex1Msg3}), - ?assertMatch(1, hb_ao:get(<<"example">>, Ex1Msg3, #{})). -``` - -### output_prefix_test - -```erlang -output_prefix_test() -> - Msg1 = - (test_prefix_msg())#{ - <<"output-prefixes">> => #{ <<"1">> => <<"out1/">>, <<"2">> => <<"out2/">> } - }, - Msg2 = - #{ - <<"path">> => <<"prefix_set">>, - <<"key">> => <<"example">>, - <<"example">> => 1 - }, - {ok, Ex2Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), - ?assertMatch(1, - hb_ao:get(<<"out1/example">>, {as, dev_message, Ex2Msg3}, #{})), - ?assertMatch(1, - hb_ao:get(<<"out2/example">>, {as, dev_message, Ex2Msg3}, #{})). -``` - -### input_and_output_prefixes_test - -```erlang -input_and_output_prefixes_test() -> - Msg1 = - (test_prefix_msg())#{ - <<"input-prefixes">> => #{ 1 => <<"in1/">>, 2 => <<"in2/">> }, - <<"output-prefixes">> => #{ 1 => <<"out1/">>, 2 => <<"out2/">> } - }, - Msg2 = - #{ - <<"path">> => <<"prefix_set">>, - <<"key">> => <<"example">>, - <<"in1">> => #{ <<"example">> => 1 }, - <<"in2">> => #{ <<"example">> => 2 } - }, - {ok, Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), - ?assertMatch(1, - hb_ao:get(<<"out1/example">>, {as, dev_message, Msg3}, #{})), - ?assertMatch(2, - hb_ao:get(<<"out2/example">>, {as, dev_message, Msg3}, #{})). -``` - -### input_output_prefixes_passthrough_test - -```erlang -input_output_prefixes_passthrough_test() -> - Msg1 = - (test_prefix_msg())#{ - <<"output-prefix">> => <<"combined-out/">>, - <<"input-prefix">> => <<"combined-in/">> - }, - Msg2 = - #{ - <<"path">> => <<"prefix_set">>, - <<"key">> => <<"example">>, - <<"combined-in">> => #{ <<"example">> => 1 } - }, - {ok, Ex2Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), - ?assertMatch(1, - hb_ao:get( - <<"combined-out/example">>, - {as, dev_message, Ex2Msg3}, - #{} - ) - ). -``` - -### reinvocation_test - -```erlang -reinvocation_test() -> - Msg = #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - #{ - <<"1">> => generate_append_device(<<"+D1">>), - <<"2">> => generate_append_device(<<"+D2">>) - }, - <<"result">> => <<"INIT">> - }, - Res1 = hb_ao:resolve(Msg, #{ <<"path">> => <<"append">>, <<"bin">> => <<"2">> }, #{}), - ?assertMatch( - {ok, #{ <<"result">> := <<"INIT+D12+D22">> }}, - Res1 - ), - {ok, Msg2} = Res1, - Res2 = hb_ao:resolve(Msg2, #{ <<"path">> => <<"append">>, <<"bin">> => <<"3">> }, #{}), - ?assertMatch( - {ok, #{ <<"result">> := <<"INIT+D12+D22+D13+D23">> }}, - Res2 - ). -``` - -### skip_test - -```erlang -skip_test() -> - Msg1 = #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - #{ - <<"1">> => generate_append_device(<<"+D1">>, skip), - <<"2">> => generate_append_device(<<"+D2">>) - }, - <<"result">> => <<"INIT">> - }, - ?assertMatch( - {ok, #{ <<"result">> := <<"INIT+D12">> }}, - hb_ao:resolve( - Msg1, - #{ <<"path">> => <<"append">>, <<"bin">> => <<"2">> }, - #{} - ) - ). -``` - -### pass_test - -```erlang -pass_test() -> - % The append device will return `ok' after 2 passes, so this test - % recursively calls the device by forcing its response to be `pass' - % until that happens. -``` - -### not_found_test - -```erlang -not_found_test() -> - % Ensure that devices not exposing a key are safely skipped. -``` - -### simple_map_test - -```erlang -simple_map_test() -> - Msg = #{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => - #{ - <<"1">> => generate_append_device(<<"+D1">>), - <<"2">> => generate_append_device(<<"+D2">>) - }, - <<"result">> => <<"INIT">> - }, - {ok, Msg3} = - hb_ao:resolve( - Msg, - #{ <<"path">> => <<"append">>, <<"mode">> => <<"Map">>, <<"bin">> => <<"/">> }, - #{} - ), - ?assertMatch(<<"INIT+D1/">>, hb_ao:get(<<"1/result">>, Msg3, #{})), -``` - ---- - -*Generated from [dev_stack.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_stack.erl)* diff --git a/docs/book/src/dev_test.erl.md b/docs/book/src/dev_test.erl.md deleted file mode 100644 index 068f15399..000000000 --- a/docs/book/src/dev_test.erl.md +++ /dev/null @@ -1,333 +0,0 @@ -# dev_test - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_test.erl) - -A simple test device for AO-Core, so that we can test the functionality that -depends on using Erlang's module system. -NOTE: This device is labelled `test-device/1.0` to avoid conflicts with -other testing functionality -- care should equally be taken to avoid -using the `test` key in other settings. - ---- - -## Exported Functions - -- `compute/3` -- `delay/3` -- `increment_counter/3` -- `index/3` -- `info/1` -- `info/3` -- `init/3` -- `load/3` -- `mul/2` -- `postprocess/3` -- `restore/3` -- `snapshot/3` -- `test_func/1` -- `update_state/3` - ---- - -### info - -Exports a default_handler function that can be used to test the - -```erlang -info(_) -> - #{ - <<"default">> => dev_message, - handlers => #{ - <<"info">> => fun info/3, - <<"update_state">> => fun update_state/3, - <<"increment_counter">> => fun increment_counter/3 - } - }. -``` - -### info - -Exports a default_handler function that can be used to test the -Example index handler. - -```erlang -info(_Msg1, _Msg2, _Opts) -> - InfoBody = #{ - <<"description">> => <<"Test device for testing the AO-Core framework">>, - <<"version">> => <<"1.0">>, - <<"paths">> => #{ - <<"info">> => <<"Get device info">>, - <<"test_func">> => <<"Test function">>, - <<"compute">> => <<"Compute function">>, - <<"init">> => <<"Initialize function">>, - <<"restore">> => <<"Restore function">>, - <<"mul">> => <<"Multiply function">>, - <<"snapshot">> => <<"Snapshot function">>, - <<"response">> => <<"Response function">>, - <<"update_state">> => <<"Update state function">> - } - }, - {ok, #{<<"status">> => 200, <<"body">> => InfoBody}}. -``` - -### index - -Exports a default_handler function that can be used to test the -Example index handler. - -```erlang -index(Msg, _Req, Opts) -> - Name = hb_ao:get(<<"name">>, Msg, <<"turtles">>, Opts), - {ok, - #{ - <<"content-type">> => <<"text/html">>, - <<"body">> => <<"i like ", Name/binary, "!">> - } - }. -``` - -### load - -Return a message with the device set to this module. - -```erlang -load(Base, _, _Opts) -> - {ok, Base#{ <<"device">> => <<"test-device@1.0">> }}. -``` - -### test_func - -Return a message with the device set to this module. -Example implementation of a `compute` handler. Makes a running list of - -```erlang -test_func(_) -> - {ok, <<"GOOD_FUNCTION">>}. -``` - -### compute - -Return a message with the device set to this module. -Example implementation of a `compute` handler. Makes a running list of - -```erlang -compute(Msg1, Msg2, Opts) -> - AssignmentSlot = hb_ao:get(<<"slot">>, Msg2, Opts), - Seen = hb_ao:get(<<"already-seen">>, Msg1, Opts), - ?event({compute_called, {msg1, Msg1}, {msg2, Msg2}, {opts, Opts}}), - {ok, - hb_ao:set( - Msg1, - #{ - <<"random-key">> => <<"random-value">>, - <<"results">> => - #{ <<"assignment-slot">> => AssignmentSlot }, - <<"already-seen">> => [AssignmentSlot | Seen] - }, - Opts - ) - }. -``` - -### init - -Example `init/3` handler. Sets the `Already-Seen` key to an empty list. -Example `restore/3` handler. Sets the hidden key `Test/Started` to the - -```erlang -init(Msg, _Msg2, Opts) -> - ?event({init_called_on_dev_test, Msg}), - {ok, hb_ao:set(Msg, #{ <<"already-seen">> => [] }, Opts)}. -``` - -### restore - -Example `init/3` handler. Sets the `Already-Seen` key to an empty list. -Example `restore/3` handler. Sets the hidden key `Test/Started` to the - -```erlang -restore(Msg, _Msg2, Opts) -> - ?event({restore_called_on_dev_test, Msg}), - case hb_ao:get(<<"already-seen">>, Msg, Opts) of - not_found -> - ?event({restore_not_found, Msg}), - {error, <<"No viable state to restore.">>}; - AlreadySeen -> - ?event({restore_found, AlreadySeen}), - {ok, - hb_private:set( - Msg, - #{ <<"test-key/started-state">> => AlreadySeen }, - Opts - ) - } - end. -``` - -### mul - -Example implementation of an `imported` function for a WASM -Do nothing when asked to snapshot. - -```erlang -mul(Msg1, Msg2) -> - ?event(mul_called), - State = hb_ao:get(<<"state">>, Msg1, #{ hashpath => ignore }), - [Arg1, Arg2] = hb_ao:get(<<"args">>, Msg2, #{ hashpath => ignore }), - ?event({mul_called, {state, State}, {args, [Arg1, Arg2]}}), - {ok, #{ <<"state">> => State, <<"results">> => [Arg1 * Arg2] }}. -``` - -### snapshot - -Example implementation of an `imported` function for a WASM -Do nothing when asked to snapshot. - -```erlang -snapshot(Msg1, Msg2, _Opts) -> - ?event({snapshot_called, {msg1, Msg1}, {msg2, Msg2}}), - {ok, #{}}. -``` - -### postprocess - -Set the `postprocessor-called` key to true in the HTTP server. - -```erlang -postprocess(_Msg, #{ <<"body">> := Msgs }, Opts) -> - ?event({postprocess_called, Opts}), - hb_http_server:set_opts(Opts#{ <<"postprocessor-called">> => true }), - {ok, Msgs}. -``` - -### update_state - -Find a test worker's PID and send it an update message. - -```erlang -update_state(_Msg, Msg2, _Opts) -> - case hb_ao:get(<<"test-id">>, Msg2) of - not_found -> - {error, <<"No test ID found in message.">>}; - ID -> - LookupResult = hb_name:lookup({<<"test">>, ID}), - case LookupResult of - undefined -> - {error, <<"No test worker found.">>}; - Pid -> - Pid ! {update, Msg2}, - {ok, Pid} - end - end. -``` - -### increment_counter - -Find a test worker's PID and send it an increment message. - -```erlang -increment_counter(_Msg1, Msg2, _Opts) -> - case hb_ao:get(<<"test-id">>, Msg2) of - not_found -> - {error, <<"No test ID found in message.">>}; - ID -> - LookupResult = hb_name:lookup({<<"test">>, ID}), - case LookupResult of - undefined -> - {error, <<"No test worker found for increment.">>}; - Pid when is_pid(Pid) -> - Pid ! {increment}, - {ok, Pid}; - _ -> % Handle case where registered value isn't a PID - {error, <<"Invalid registration found for test worker.">>} - end - end. -``` - -### delay - -Does nothing, just sleeps `Req/duration or 750` ms and returns the - -```erlang -delay(Msg1, Req, Opts) -> - Duration = - hb_ao:get_first( - [ - {Msg1, <<"duration">>}, - {Req, <<"duration">>} - ], - 750, - Opts - ), - ?event(delay, {delay, {sleeping, Duration}}), - timer:sleep(Duration), - ?event({delay, waking}), - Return = - case hb_ao:get(<<"return">>, Msg1, Opts) of - not_found -> - hb_ao:get(<<"body">>, Req, #{ <<"result">> => <<"slept">> }, Opts); - ReturnMsgs -> - ReturnMsgs - end, - ?event(delay, {returning, Return}), - {ok, Return}. -``` - -### device_with_function_key_module_test - -Tests the resolution of a default function. - -```erlang -device_with_function_key_module_test() -> - Msg = - #{ - <<"device">> => <<"test-device@1.0">> - }, - ?assertEqual( - {ok, <<"GOOD_FUNCTION">>}, - hb_ao:resolve(Msg, test_func, #{}) - ). -``` - -### compute_test - -```erlang -compute_test() -> - Msg0 = #{ <<"device">> => <<"test-device@1.0">> }, - {ok, Msg1} = hb_ao:resolve(Msg0, init, #{}), - Msg2 = - hb_ao:set( - #{ <<"path">> => <<"compute">> }, - #{ - <<"slot">> => 1, - <<"body/number">> => 1337 - }, - #{} - ), - {ok, Msg3} = hb_ao:resolve(Msg1, Msg2, #{}), - ?assertEqual(1, hb_ao:get(<<"results/assignment-slot">>, Msg3, #{})), - Msg4 = - hb_ao:set( - #{ <<"path">> => <<"compute">> }, - #{ - <<"slot">> => 2, - <<"body/number">> => 9001 - }, - #{} - ), - {ok, Msg5} = hb_ao:resolve(Msg3, Msg4, #{}), - ?assertEqual(2, hb_ao:get(<<"results/assignment-slot">>, Msg5, #{})), - ?assertEqual([2, 1], hb_ao:get(<<"already-seen">>, Msg5, #{})). -``` - -### restore_test - -```erlang -restore_test() -> - Msg1 = #{ <<"device">> => <<"test-device@1.0">>, <<"already-seen">> => [1] }, - {ok, Msg3} = hb_ao:resolve(Msg1, <<"restore">>, #{}), -``` - ---- - -*Generated from [dev_test.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_test.erl)* diff --git a/docs/book/src/dev_volume.erl.md b/docs/book/src/dev_volume.erl.md deleted file mode 100644 index 778c8e528..000000000 --- a/docs/book/src/dev_volume.erl.md +++ /dev/null @@ -1,576 +0,0 @@ -# dev_volume - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_volume.erl) - -Secure Volume Management for HyperBEAM Nodes -This module handles encrypted storage operations for HyperBEAM, -providing a robust and secure approach to data persistence. It manages -the complete lifecycle of encrypted volumes from detection to creation, -formatting, and mounting. -Key responsibilities: -- Volume detection and initialization -- Encrypted partition creation and formatting -- Secure mounting using cryptographic keys -- Store path reconfiguration to use mounted volumes -- Automatic handling of various system states - (new device, existing partition, etc.) -The primary entry point is the `mount/3` function, which orchestrates -the entire process based on the provided configuration parameters. This -module works alongside `hb_volume` which provides the low-level -operations for device manipulation. -Security considerations: -- Ensures data at rest is protected through LUKS encryption -- Provides proper volume sanitization and secure mounting -- IMPORTANT: This module only applies configuration set in node options - and does NOT accept disk operations via HTTP requests. It cannot - format arbitrary disks as all operations are safeguarded by host - operating system permissions enforced upon the HyperBEAM environment. - ---- - -## Exported Functions - -- `info/1` -- `info/3` -- `mount/3` -- `public_key/3` - ---- - -### info - -Secure Volume Management for HyperBEAM Nodes -Exported function for getting device info, controls which functions - -```erlang -info(_) -> - ?event(debug_volume, {info, entry, device_info_requested}), - #{ exports => [info, mount, public_key] }. -``` - -### info - -HTTP info response providing information about this device -Handles the complete process of secure encrypted volume mounting. - -```erlang --spec mount(term(), term(), map()) -> - {ok, binary()} | {error, binary()}. -``` - -```erlang -info(_Msg1, _Msg2, _Opts) -> - ?event(debug_volume, {info, http_request, starting}), - InfoBody = #{ - <<"description">> => - <<"Secure Volume Management for HyperBEAM Nodes">>, - <<"version">> => <<"1.0">>, - <<"api">> => #{ - <<"info">> => #{ - <<"description">> => <<"Get device info">> - }, - <<"mount">> => #{ - <<"description">> => <<"Mount an encrypted volume">>, - <<"required_node_opts">> => #{ - <<"priv_volume_key">> => <<"The encryption key">>, - <<"volume_device">> => <<"The base device path">>, - <<"volume_partition">> => <<"The partition path">>, - <<"volume_partition_type">> => <<"The partition type">>, - <<"volume_name">> => - <<"The name for the encrypted volume">>, - <<"volume_mount_point">> => - <<"Where to mount the volume">>, - <<"volume_store_path">> => - <<"The store path on the volume">> - } - }, - <<"public_key">> => #{ - <<"description">> => - <<"Get the node's public key for encrypted key exchange">> - } - } - }, - ?event(debug_volume, {info, http_response, success}), - {ok, #{<<"status">> => 200, <<"body">> => InfoBody}}. -%% -%% -%% -``` - -### mount - -HTTP info response providing information about this device -Handles the complete process of secure encrypted volume mounting. - -```erlang --spec mount(term(), term(), map()) -> - {ok, binary()} | {error, binary()}. -``` - -```erlang -mount(_M1, _M2, Opts) -> - ?event(debug_volume, {mount, entry, starting}), - % Check if an encrypted key was sent in the request - EncryptedKey = hb_opts:get(priv_volume_key, not_found, Opts), - % Determine if we need to decrypt a key or use one from config - SkipDecryption = hb_opts:get(volume_skip_decryption, - <<"false">>, Opts), - Key = case SkipDecryption of - <<"true">> -> - ?event(debug_mount, {mount, skip_decryption, true}), - EncryptedKey; - _ -> - ?event(debug_volume, {decrypt_volume_key}), - case decrypt_volume_key(EncryptedKey, Opts) of - {ok, DecryptedKey} -> DecryptedKey; - {error, DecryptError} -> - ?event(debug_mount, - {mount, key_decrypt_error, DecryptError} - ), - not_found - end - end, - Device = hb_opts:get(volume_device, not_found, Opts), - Partition = hb_opts:get(volume_partition, not_found, Opts), - PartitionType = hb_opts:get(volume_partition_type, not_found, Opts), - VolumeName = hb_opts:get(volume_name, not_found, Opts), - MountPoint = hb_opts:get(volume_mount_point, not_found, Opts), - StorePath = hb_opts:get(volume_store_path, not_found, Opts), - ?event(debug_volume, - {mount, options_extracted, - { - device, Device, partition, Partition, - partition_type, PartitionType, volume_name, VolumeName, - mount_point, MountPoint, store_path, StorePath - } - } - ), - % Check for missing required node options - case hb_opts:check_required_opts([ - {<<"priv_volume_key">>, Key}, - {<<"volume_device">>, Device}, - {<<"volume_partition">>, Partition}, - {<<"volume_partition_type">>, PartitionType}, - {<<"volume_name">>, VolumeName}, - {<<"volume_mount_point">>, MountPoint}, - {<<"volume_store_path">>, StorePath} - ], Opts) of - {ok, _} -> - check_base_device( - Device, Partition, PartitionType, VolumeName, - MountPoint, StorePath, Key, Opts - ); - {error, ErrorMsg} -> - ?event(debug_volume, {mount, required_opts_error, ErrorMsg}), - {error, ErrorMsg} - end. -``` - -### public_key - -Returns the node's public key for secure key exchange. - -```erlang --spec public_key(term(), term(), map()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -public_key(_M1, _M2, Opts) -> - % Retrieve the node's wallet - case hb_opts:get(priv_wallet, undefined, Opts) of - undefined -> - % Node doesn't have a wallet yet - ?event(debug_volume, - {public_key, wallet_error, no_wallet_found} - ), - {error, <<"Node wallet not available">>}; - {{_KeyType, _Priv, Pub}, _PubKey} -> - ?event(debug_volume, - {public_key, wallet_found, key_conversion_starting} - ), - % Convert to a standard RSA format (PKCS#1 or X.509) - RsaPubKey = #'RSAPublicKey'{ - publicExponent = 65537, % Common RSA exponent - modulus = crypto:bytes_to_integer(Pub) - }, - % Convert to DER format - DerEncoded = public_key:der_encode('RSAPublicKey', RsaPubKey), - % Base64 encode for transmission - Base64Key = base64:encode(DerEncoded), - ?event(debug_volume, {public_key, success, key_encoded}), - {ok, #{ - <<"status">> => 200, - <<"public_key">> => Base64Key, - <<"message">> => - <<"Use this public key to encrypt your volume key">> - }} - end. -``` - -### decrypt_volume_key - -Decrypts an encrypted volume key using the node's private key. - -```erlang --spec decrypt_volume_key(binary(), map()) -> - {ok, binary()} | {error, binary()}. -``` - -```erlang -decrypt_volume_key(EncryptedKeyBase64, Opts) -> - % Decode the encrypted key - try - EncryptedKey = base64:decode(EncryptedKeyBase64), - ?event(debug_volume, - {decrypt_volume_key, base64_decoded, success} - ), - % Retrieve the node's wallet with private key - case hb_opts:get(priv_wallet, undefined, Opts) of - undefined -> - ?event(debug_volume, - {decrypt_volume_key, wallet_error, no_wallet} - ), - {error, <<"Node wallet not available for decryption">>}; - {{_KeyType = {rsa, E}, Priv, Pub}, _PubKey} -> - ?event(debug_volume, - {decrypt_volume_key, wallet_found, creating_private_key} - ), - % Create RSA private key record for decryption - RsaPrivKey = #'RSAPrivateKey'{ - publicExponent = E, - modulus = crypto:bytes_to_integer(Pub), - privateExponent = crypto:bytes_to_integer(Priv) - }, - % Decrypt the key - DecryptedKey = - public_key:decrypt_private( - EncryptedKey, - RsaPrivKey - ), - ?event(debug_volume, - {decrypt_volume_key, decryption_success, key_decrypted} - ), - {ok, DecryptedKey} - end - catch - _:Error -> - ?event(debug_volume, - {decrypt_volume_key, decryption_error, Error} - ), - {error, <<"Failed to decrypt volume key">>} - end. -``` - -### check_base_device - -Check if the base device exists and if it does, check if the - -```erlang --spec check_base_device( - term(), term(), term(), term(), term(), term(), term(), map() -) -> {ok, binary()} | {error, binary()}. -``` - -```erlang -check_base_device( - Device, Partition, PartitionType, VolumeName, MountPoint, StorePath, - Key, Opts -) -> - ?event(debug_volume, - {check_base_device, entry, {checking_device, Device}} - ), - case hb_volume:check_for_device(Device) of - false -> - % Base device doesn't exist - ?event(debug_volume, - {check_base_device, device_not_found, Device} - ), - {error, <<"Base device not found">>}; - true -> - ?event(debug_volume, - {check_base_device, device_found, - {proceeding_to_partition_check, Device} - } - ), - check_partition( - Device, Partition, PartitionType, VolumeName, - MountPoint, StorePath, Key, Opts - ) - end. -``` - -### check_partition - -Check if the partition exists. If it does, attempt to mount it. - -```erlang --spec check_partition( - term(), term(), term(), term(), term(), term(), term(), map() -) -> {ok, binary()} | {error, binary()}. -``` - -```erlang -check_partition( - Device, Partition, PartitionType, VolumeName, MountPoint, StorePath, - Key, Opts -) -> - ?event(debug_volume, - {check_partition, entry, {checking_partition, Partition}} - ), - case hb_volume:check_for_device(Partition) of - true -> - ?event(debug_volume, - {check_partition, partition_exists, - {mounting_existing, Partition} - } - ), - % Partition exists, try mounting it - mount_existing_partition( - Partition, Key, MountPoint, VolumeName, StorePath, Opts - ); - false -> - ?event(debug_volume, - {check_partition, partition_not_exists, - {creating_new, Partition} - } - ), - % Partition doesn't exist, create it - create_and_mount_partition( - Device, Partition, PartitionType, Key, - MountPoint, VolumeName, StorePath, Opts - ) - end. -``` - -### mount_existing_partition - -Mount an existing partition. - -```erlang --spec mount_existing_partition( - term(), term(), term(), term(), term(), map() -) -> {ok, binary()} | {error, binary()}. -``` - -```erlang -mount_existing_partition( - Partition, Key, MountPoint, VolumeName, StorePath, Opts -) -> - ?event(debug_volume, - {mount_existing_partition, entry, - {attempting_mount, Partition, MountPoint} - } - ), - case hb_volume:mount_disk(Partition, Key, MountPoint, VolumeName) of - {ok, MountResult} -> - ?event(debug_volume, - {mount_existing_partition, mount_success, MountResult} - ), - update_store_path(StorePath, Opts); - {error, MountError} -> - ?event(debug_volume, - {mount_existing_partition, mount_error, - {error, MountError} - } - ), - {error, <<"Failed to mount volume">>} - end. -``` - -### create_and_mount_partition - -Create, format and mount a new partition. - -```erlang --spec create_and_mount_partition( - term(), term(), term(), term(), term(), term(), term(), map() -) -> {ok, binary()} | {error, binary()}. -``` - -```erlang -create_and_mount_partition( - Device, Partition, PartitionType, Key, - MountPoint, VolumeName, StorePath, Opts -) -> - ?event(debug_volume, - {create_and_mount_partition, entry, - {creating_partition, Device, PartitionType} - } - ), - case hb_volume:create_partition(Device, PartitionType) of - {ok, PartitionResult} -> - ?event(debug_volume, - {create_and_mount_partition, partition_created, - PartitionResult - } - ), - format_and_mount( - Partition, Key, MountPoint, VolumeName, StorePath, Opts - ); - {error, PartitionError} -> - ?event(debug_volume, - {create_and_mount_partition, partition_error, - {error, PartitionError} - } - ), - {error, <<"Failed to create partition">>} - end. -``` - -### format_and_mount - -Format and mount a newly created partition. - -```erlang --spec format_and_mount( - term(), term(), term(), term(), term(), map() -) -> {ok, binary()} | {error, binary()}. -``` - -```erlang -format_and_mount( - Partition, Key, MountPoint, VolumeName, StorePath, Opts -) -> - ?event(debug_volume, - {format_and_mount, entry, {formatting_partition, Partition}} - ), - case hb_volume:format_disk(Partition, Key) of - {ok, FormatResult} -> - ?event(debug_volume, - {format_and_mount, format_success, - {result, FormatResult} - } - ), - mount_formatted_partition( - Partition, Key, MountPoint, VolumeName, StorePath, Opts - ); - {error, FormatError} -> - ?event(debug_volume, - {format_and_mount, format_error, - {error, FormatError} - } - ), - {error, <<"Failed to format disk">>} - end. -``` - -### mount_formatted_partition - -Mount a newly formatted partition. - -```erlang --spec mount_formatted_partition( - term(), term(), term(), term(), term(), map() -) -> {ok, binary()} | {error, binary()}. -``` - -```erlang -mount_formatted_partition( - Partition, Key, MountPoint, VolumeName, StorePath, Opts -) -> - ?event(debug_volume, - {mount_formatted_partition, entry, - {mounting_formatted, Partition, MountPoint} - } - ), - case hb_volume:mount_disk(Partition, Key, MountPoint, VolumeName) of - {ok, RetryMountResult} -> - ?event(debug_volume, - {mount_formatted_partition, mount_success, - {result, RetryMountResult} - } - ), - update_store_path(StorePath, Opts); - {error, RetryMountError} -> - ?event(debug_volume, - {mount_formatted_partition, mount_error, - {error, RetryMountError} - } - ), - {error, <<"Failed to mount newly formatted volume">>} - end. -``` - -### update_store_path - -Update the store path to use the mounted volume. - -```erlang --spec update_store_path(term(), map()) -> - {ok, binary()} | {error, binary()}. -``` - -```erlang -update_store_path(StorePath, Opts) -> - ?event(debug_volume, - {update_store_path, entry, {updating_store, StorePath}} - ), - CurrentStore = hb_opts:get(store, [], Opts), - ?event(debug_volume, - {update_store_path, current_store, CurrentStore} - ), - case hb_volume:change_node_store(StorePath, CurrentStore) of - {ok, #{<<"store">> := NewStore} = StoreResult} -> - ?event(debug_volume, - {update_store_path, store_change_success, - {result, StoreResult} - } - ), - update_node_config(StorePath, NewStore, Opts); - {error, StoreError} -> - ?event(debug_volume, - {update_store_path, store_change_error, - {error, StoreError} - } - ), - {error, <<"Failed to update store">>} - end. -``` - -### update_node_config - -Update the node's configuration with the new store. - -```erlang --spec update_node_config(term(), term(), map()) -> - {ok, binary()} | {error, binary()}. -``` - -```erlang -update_node_config(StorePath, NewStore, Opts) -> - ?event(debug_volume, - {update_node_config, entry, - {updating_config, StorePath, NewStore} - } - ), - GenesisWasmDBDir = - hb_opts:get( - genesis_wasm_db_dir, - "cache-mainnet/genesis-wasm", - Opts - ), - ?event(debug_volume, - {update_node_config, genesis_dir, GenesisWasmDBDir} - ), - BinaryGenesisWasmDBDir = list_to_binary(GenesisWasmDBDir), - FullGenesisPath = - <>, - ?event(debug_volume, - {update_node_config, full_path_created, FullGenesisPath} - ), - ok = - hb_http_server:set_opts( - Opts#{ - store => NewStore, - genesis_wasm_db_dir => FullGenesisPath - } - ), - ?event(debug_volume, - {update_node_config, config_updated, success} - ), -``` - ---- - -*Generated from [dev_volume.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_volume.erl)* diff --git a/docs/book/src/dev_wasi.erl.md b/docs/book/src/dev_wasi.erl.md deleted file mode 100644 index 2281d3110..000000000 --- a/docs/book/src/dev_wasi.erl.md +++ /dev/null @@ -1,364 +0,0 @@ -# dev_wasi - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_wasi.erl) - -A virtual filesystem device. -Implements a file-system-as-map structure, which is traversible externally. -Each file is a binary and each directory is an AO-Core message. -Additionally, this module adds a series of WASI-preview-1 compatible -functions for accessing the filesystem as imported functions by WASM -modules. - ---- - -## Exported Functions - -- `clock_time_get/3` -- `compute/1` -- `fd_read/3` -- `fd_write/3` -- `init/3` -- `path_open/3` -- `stdout/1` - ---- - -### init - -A virtual filesystem device. -On-boot, initialize the virtual file system with: - -```erlang -init(M1, _M2, Opts) -> - ?event(running_init), - MsgWithLib = - hb_ao:set( - M1, - #{ - <<"wasm/stdlib/wasi_snapshot_preview1">> => - #{ <<"device">> => <<"wasi@1.0">>} - }, - Opts - ), - MsgWithFDs = - hb_ao:set( - MsgWithLib, - <<"file-descriptors">>, - ?INIT_FDS, - Opts - ), - CompleteMsg = - hb_ao:set( - MsgWithFDs, - <<"vfs">>, - ?INIT_VFS, - Opts - ), - {ok, CompleteMsg}. -``` - -### compute - -```erlang -compute(Msg1) -> - {ok, Msg1}. -``` - -### stdout - -Return the stdout buffer from a state message. -Adds a file descriptor to the state message. - -```erlang -stdout(M) -> - hb_ao:get(<<"vfs/dev/stdout">>, M). -%path_open(M, Instance, [FDPtr, LookupFlag, PathPtr|_]) -> -``` - -### path_open - -Return the stdout buffer from a state message. -Adds a file descriptor to the state message. - -```erlang -path_open(Msg1, Msg2, Opts) -> - FDs = hb_ao:get(<<"file-descriptors">>, Msg1, Opts), - Instance = hb_private:get(<<"instance">>, Msg1, Opts), - [FDPtr, LookupFlag, PathPtr|_] = hb_ao:get(<<"args">>, Msg2, Opts), - ?event({path_open, FDPtr, LookupFlag, PathPtr}), - Path = hb_beamr_io:read_string(Instance, PathPtr), - ?event({path_open, Path}), - FD = #{ - <<"index">> := Index - } = - case hb_ao:get(<<"vfs/", Path/binary>>, Msg1, Opts) of - not_found -> - #{ - <<"index">> => length(hb_ao:keys(FDs)) + 1, - <<"filename">> => Path, - <<"offset">> => 0 - }; - F -> F - end, - { - ok, - #{ - <<"state">> => - hb_ao:set( - Msg1, - <<"vfs/", Path/binary>>, - FD - ), - <<"results">> => [0, Index] - } - }. -``` - -### fd_write - -WASM stdlib implementation of `fd_write`, using the WASI-p1 standard - -```erlang -fd_write(Msg1, Msg2, Opts) -> - State = hb_ao:get(<<"state">>, Msg1, Opts), - Instance = hb_private:get(<<"wasm/instance">>, State, Opts), - [FD, Ptr, Vecs, RetPtr|_] = hb_ao:get(<<"args">>, Msg2, Opts), - ?event({fd_write, {fd, FD}, {ptr, Ptr}, {vecs, Vecs}, {retptr, RetPtr}}), - Signature = hb_ao:get(<<"func-sig">>, Msg2, Opts), - ?event({signature, Signature}), - fd_write(State, Instance, [FD, Ptr, Vecs, RetPtr], 0, Opts). -``` - -### fd_write - -```erlang -fd_write(S, Instance, [_, _Ptr, 0, RetPtr], BytesWritten, _Opts) -> - hb_beamr_io:write( - Instance, - RetPtr, - <> - ), - {ok, #{ <<"state">> => S, <<"results">> => [0] }}; -``` - -### fd_write - -```erlang -fd_write(S, Instance, [FDnum, Ptr, Vecs, RetPtr], BytesWritten, Opts) -> - FDNumStr = integer_to_binary(FDnum), - FD = hb_ao:get(<<"file-descriptors/", FDNumStr/binary>>, S, Opts), - Filename = hb_ao:get(<<"filename">>, FD, Opts), - StartOffset = hb_ao:get(<<"offset">>, FD, Opts), - {VecPtr, Len} = parse_iovec(Instance, Ptr), - {ok, Data} = hb_beamr_io:read(Instance, VecPtr, Len), - Before = - binary:part( - OrigData = hb_ao:get(<<"data">>, FD, Opts), - 0, - StartOffset - ), - After = - binary:part(OrigData, StartOffset, byte_size(OrigData) - StartOffset), - S1 = - hb_ao:set( - S, - <<"file-descriptors/", FDNumStr/binary, "/offset">>, - StartOffset + byte_size(Data), - Opts - ), - S2 = - hb_ao:set( - S1, - <<"vfs/", Filename/binary>>, - <>, - Opts - ), - fd_write( - S2, - Instance, - [FD, Ptr + 16, Vecs - 1, RetPtr], - BytesWritten + byte_size(Data), - Opts - ). -``` - -### fd_read - -Read from a file using the WASI-p1 standard interface. - -```erlang -fd_read(Msg1, Msg2, Opts) -> - State = hb_ao:get(<<"state">>, Msg1, Opts), - Instance = hb_private:get(<<"wasm/instance">>, State, Opts), - [FD, VecsPtr, NumVecs, RetPtr|_] = hb_ao:get(<<"args">>, Msg2, Opts), - Signature = hb_ao:get(<<"func-sig">>, Msg2, Opts), - ?event({signature, Signature}), - fd_read(State, Instance, [FD, VecsPtr, NumVecs, RetPtr], 0, Opts). -``` - -### fd_read - -```erlang -fd_read(S, Instance, [FD, _VecsPtr, 0, RetPtr], BytesRead, _Opts) -> - ?event({{completed_read, FD, BytesRead}}), - hb_beamr_io:write(Instance, RetPtr, - <>), - {ok, #{ <<"state">> => S, <<"results">> => [0] }}; -``` - -### fd_read - -```erlang -fd_read(S, Instance, [FDNum, VecsPtr, NumVecs, RetPtr], BytesRead, Opts) -> - ?event({fd_read, FDNum, VecsPtr, NumVecs, RetPtr}), - % Parse the request - FDNumStr = integer_to_binary(FDNum), - Filename = - hb_ao:get( - <<"file-descriptors/", FDNumStr/binary, "/filename">>, S, Opts), - {VecPtr, Len} = parse_iovec(Instance, VecsPtr), - % Read the bytes from the file - Data = hb_ao:get(<<"vfs/", Filename/binary>>, S, Opts), - Offset = - hb_ao:get( - <<"file-descriptors/", FDNumStr/binary, "/offset">>, S, Opts), - ReadSize = min(Len, byte_size(Data) - Offset), - Bin = binary:part(Data, Offset, ReadSize), - % Write the bytes to the WASM Instance - ok = hb_beamr_io:write(Instance, VecPtr, Bin), - fd_read( - hb_ao:set( - S, - <<"file-descriptors/", FDNumStr/binary, "/offset">>, - Offset + ReadSize, - Opts - ), - Instance, - [FDNum, VecsPtr + 16, NumVecs - 1, RetPtr], - BytesRead + ReadSize, - Opts - ). -``` - -### parse_iovec - -Parse an iovec in WASI-preview-1 format. - -```erlang -parse_iovec(Instance, Ptr) -> - {ok, VecStruct} = hb_beamr_io:read(Instance, Ptr, 16), - << - BinPtr:64/little-unsigned-integer, - Len:64/little-unsigned-integer - >> = VecStruct, - {BinPtr, Len}. -``` - -### clock_time_get - -```erlang -clock_time_get(Msg1, _Msg2, Opts) -> - ?event({clock_time_get, {returning, 1}}), - State = hb_ao:get(<<"state">>, Msg1, Opts), - {ok, #{ <<"state">> => State, <<"results">> => [1] }}. -%%% Tests -``` - -### init - -```erlang -init() -> - application:ensure_all_started(hb). -``` - -### generate_wasi_stack - -```erlang -generate_wasi_stack(File, Func, Params) -> - init(), - Msg0 = dev_wasm:cache_wasm_image(File), - Msg1 = Msg0#{ - <<"device">> => <<"stack@1.0">>, - <<"device-stack">> => [<<"wasi@1.0">>, <<"wasm-64@1.0">>], - <<"output-prefixes">> => [<<"wasm">>, <<"wasm">>], - <<"stack-keys">> => [<<"init">>, <<"compute">>], - <<"function">> => Func, - <<"params">> => Params - }, - {ok, Msg2} = hb_ao:resolve(Msg1, <<"init">>, #{}), - Msg2. -``` - -### vfs_is_serializable_test - -```erlang -vfs_is_serializable_test() -> - StackMsg = generate_wasi_stack("test/test-print.wasm", <<"hello">>, []), - VFSMsg = hb_ao:get(<<"vfs">>, StackMsg), - VFSMsg2 = - hb_message:minimize( - hb_message:convert( - hb_message:convert(VFSMsg, <<"httpsig@1.0">>, #{}), - <<"structured@1.0">>, - <<"httpsig@1.0">>, - #{}) - ), - ?assert(hb_message:match(VFSMsg, VFSMsg2)). -``` - -### wasi_stack_is_serializable_test - -```erlang -wasi_stack_is_serializable_test() -> - Msg = generate_wasi_stack("test/test-print.wasm", <<"hello">>, []), - HTTPSigMsg = hb_message:convert(Msg, <<"httpsig@1.0">>, #{}), - Msg2 = hb_message:convert(HTTPSigMsg, <<"structured@1.0">>, <<"httpsig@1.0">>, #{}), - ?assert(hb_message:match(Msg, Msg2)). -``` - -### basic_aos_exec_test - -```erlang -basic_aos_exec_test() -> - Init = generate_wasi_stack("test/aos-2-pure-xs.wasm", <<"handle">>, []), - Msg = gen_test_aos_msg("return 1 + 1"), - Env = gen_test_env(), - Instance = hb_private:get(<<"wasm/instance">>, Init, #{}), - {ok, Ptr1} = hb_beamr_io:malloc(Instance, byte_size(Msg)), - ?assertNotEqual(0, Ptr1), - hb_beamr_io:write(Instance, Ptr1, Msg), - {ok, Ptr2} = hb_beamr_io:malloc(Instance, byte_size(Env)), - ?assertNotEqual(0, Ptr2), - hb_beamr_io:write(Instance, Ptr2, Env), - % Read the strings to validate they are correctly passed - {ok, MsgBin} = hb_beamr_io:read(Instance, Ptr1, byte_size(Msg)), - {ok, EnvBin} = hb_beamr_io:read(Instance, Ptr2, byte_size(Env)), - ?assertEqual(Env, EnvBin), - ?assertEqual(Msg, MsgBin), - Ready = Init#{ <<"parameters">> => [Ptr1, Ptr2] }, - {ok, StateRes} = hb_ao:resolve(Ready, <<"compute">>, #{}), - [Ptr] = hb_ao:get(<<"results/wasm/output">>, StateRes), - {ok, Output} = hb_beamr_io:read_string(Instance, Ptr), - ?event({got_output, Output}), - #{ <<"response">> := #{ <<"Output">> := #{ <<"data">> := Data }} } - = hb_json:decode(Output), - ?assertEqual(<<"2">>, Data). -%%% Test Helpers -``` - -### gen_test_env - -```erlang -gen_test_env() -> - <<"{\"Process\":{\"Id\":\"AOS\",\"Owner\":\"FOOBAR\",\"Tags\":[{\"name\":\"Name\",\"value\":\"Thomas\"}, {\"name\":\"Authority\",\"value\":\"FOOBAR\"}]}}\0">>. -``` - -### gen_test_aos_msg - -```erlang -gen_test_aos_msg(Command) -> -``` - ---- - -*Generated from [dev_wasi.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_wasi.erl)* diff --git a/docs/book/src/dev_wasm.erl.md b/docs/book/src/dev_wasm.erl.md deleted file mode 100644 index 130fc5ef8..000000000 --- a/docs/book/src/dev_wasm.erl.md +++ /dev/null @@ -1,458 +0,0 @@ -# dev_wasm - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_wasm.erl) - -A device that executes a WASM image on messages using the Memory-64 -preview standard. In the backend, this device uses `beamr`: An Erlang wrapper -for WAMR, the WebAssembly Micro Runtime. -The device has the following requirements and interface: -
-    M1/Init ->
-        Assumes:
-            M1/process
-            M1/[Prefix]/image
-        Generates:
-            /priv/[Prefix]/instance
-            /priv/[Prefix]/import-resolver
-        Side-effects:
-            Creates a WASM executor loaded in memory of the HyperBEAM node.
-    M1/Compute ->
-        Assumes:
-            M1/priv/[Prefix]/instance
-            M1/priv/[Prefix]/import-resolver
-            M1/process
-            M2/message
-            M2/message/function OR M1/function
-            M2/message/parameters OR M1/parameters
-        Generates:
-            /results/[Prefix]/type
-            /results/[Prefix]/output
-        Side-effects:
-            Calls the WASM executor with the message and process.
-    M1/[Prefix]/state ->
-        Assumes:
-            M1/priv/[Prefix]/instance
-        Generates:
-            Raw binary WASM state
-
- ---- - -## Exported Functions - -- `cache_wasm_image/1` -- `cache_wasm_image/2` -- `compute/3` -- `import/3` -- `info/2` -- `init/3` -- `instance/3` -- `normalize/3` -- `snapshot/3` -- `terminate/3` - ---- - -### info - -A device that executes a WASM image on messages using the Memory-64 -Export all functions aside the `instance/3` function. - -```erlang -info(_Msg1, _Opts) -> - #{ - excludes => [instance] - }. -``` - -### init - -Boot a WASM image on the image stated in the `process/image` field of - -```erlang -init(M1, M2, Opts) -> - ?event(running_init), - % Where we should read initial parameters from. -``` - -### default_import_resolver - -Take a BEAMR import call and resolve it using `hb_ao`. - -```erlang -default_import_resolver(Msg1, Msg2, Opts) -> - #{ - instance := WASM, - module := Module, - func := Func, - args := Args, - func_sig := Signature - } = Msg2, - Prefix = dev_stack:prefix(Msg1, Msg2, Opts), - {ok, Msg3} = - hb_ao:resolve( - hb_private:set( - Msg1, - #{ <> => WASM }, - Opts - ), - #{ - <<"path">> => <<"import">>, - <<"module">> => list_to_binary(Module), - <<"func">> => list_to_binary(Func), - <<"args">> => Args, - <<"func-sig">> => list_to_binary(Signature) - }, - Opts - ), - NextState = hb_ao:get(state, Msg3, Opts), - Response = hb_ao:get(results, Msg3, Opts), - {ok, Response, NextState}. -``` - -### compute - -Call the WASM executor with a message that has been prepared by a prior - -```erlang -compute(RawM1, M2, Opts) -> - % Normalize the message to have an open WASM instance, but no literal `State'. -``` - -### normalize - -Normalize the message to have an open WASM instance, but no literal -Serialize the WASM state to a binary. - -```erlang -normalize(RawM1, M2, Opts) -> - ?event({normalize_raw_m1, RawM1}), - M3 = - case instance(RawM1, M2, Opts) of - not_found -> - DeviceKey = - case hb_ao:get(<<"device-key">>, RawM1, Opts) of - not_found -> []; - Key -> [Key] - end, - ?event( - {no_instance_attempting_to_get_snapshot, - {msg1, RawM1}, {device_key, DeviceKey} - } - ), - Memory = - hb_ao:get( - [<<"snapshot">>] ++ DeviceKey ++ [<<"body">>], - {as, dev_message, RawM1}, - Opts - ), - case Memory of - not_found -> throw({error, no_wasm_instance_or_snapshot}); - State -> - {ok, M1} = init(RawM1, State, Opts), - Res = hb_beamr:deserialize(instance(M1, M2, Opts), State), - ?event(snapshot, {wasm_deserialized, {result, Res}}), - M1 - end; - _ -> - ?event(wasm_instance_found_not_deserializing), - RawM1 - end, - dev_message:set(M3, #{ <<"snapshot">> => unset }, Opts). -``` - -### snapshot - -Normalize the message to have an open WASM instance, but no literal -Serialize the WASM state to a binary. - -```erlang -snapshot(M1, M2, Opts) -> - ?event(snapshot, generating_snapshot), - Instance = instance(M1, M2, Opts), - {ok, Serialized} = hb_beamr:serialize(Instance), - {ok, - #{ - <<"body">> => Serialized - } - }. -``` - -### terminate - -Tear down the WASM executor. - -```erlang -terminate(M1, M2, Opts) -> - ?event(terminate_called_on_dev_wasm), - Prefix = dev_stack:prefix(M1, M2, Opts), - Instance = instance(M1, M2, Opts), - hb_beamr:stop(Instance), - {ok, hb_private:set(M1, - #{ - <> => unset - }, - Opts - )}. -``` - -### instance - -Get the WASM instance from the message. Note that this function is exported - -```erlang -instance(M1, M2, Opts) -> - Prefix = dev_stack:prefix(M1, M2, Opts), - Path = <>, - ?event({searching_for_instance, Path, M1}), - hb_private:get(Path, M1, Opts#{ hashpath => ignore }). -``` - -### import - -Handle standard library calls by: - -```erlang -import(Msg1, Msg2, Opts) -> - % 1. Adjust the path to the stdlib. -``` - -### undefined_import_stub - -Log the call to the standard library as an event, and write the - -```erlang -undefined_import_stub(Msg1, Msg2, Opts) -> - ?event({unimplemented_dev_wasm_call, {msg1, Msg1}, {msg2, Msg2}}), - Prefix = dev_stack:prefix(Msg1, Msg2, Opts), - UndefinedCallsPath = - <<"state/results/", Prefix/binary, "/undefined-calls">>, - Msg3 = hb_ao:set( - Msg1, - #{ - UndefinedCallsPath => - [ - Msg2 - | - case hb_ao:get(UndefinedCallsPath, Msg1, Opts) of - not_found -> []; - X -> X - end - ] - }, - Opts - ), - {ok, #{ state => Msg3, results => [0] }}. -``` - -### init - -```erlang -init() -> - application:ensure_all_started(hb), - hb:init(). -``` - -### input_prefix_test - -```erlang -input_prefix_test() -> - init(), - #{ <<"image">> := ImageID } = cache_wasm_image("test/test.wasm"), - Msg1 = - #{ - <<"device">> => <<"wasm-64@1.0">>, - <<"input-prefix">> => <<"test-in">>, - <<"test-in">> => #{ <<"image">> => ImageID } - }, - {ok, Msg2} = hb_ao:resolve(Msg1, <<"init">>, #{}), - ?event({after_init, Msg2}), - Priv = hb_private:from_message(Msg2), - ?assertMatch( - {ok, Instance} when is_pid(Instance), - hb_ao:resolve(Priv, <<"instance">>, #{}) - ), - ?assertMatch( - {ok, Fun} when is_function(Fun), - hb_ao:resolve(Priv, <<"import-resolver">>, #{}) - ). -``` - -### process_prefixes_test - -Test that realistic prefixing for a `dev_process` works -- - -```erlang -process_prefixes_test() -> - init(), - Msg1 = - #{ - <<"device">> => <<"wasm-64@1.0">>, - <<"output-prefix">> => <<"wasm">>, - <<"input-prefix">> => <<"process">>, - <<"process">> => cache_wasm_image("test/test.wasm") - }, - {ok, Msg3} = hb_ao:resolve(Msg1, <<"init">>, #{}), - ?event({after_init, Msg3}), - Priv = hb_private:from_message(Msg3), - ?assertMatch( - {ok, Instance} when is_pid(Instance), - hb_ao:resolve(Priv, <<"wasm/instance">>, #{}) - ), - ?assertMatch( - {ok, Fun} when is_function(Fun), - hb_ao:resolve(Priv, <<"wasm/import-resolver">>, #{}) - ). -``` - -### init_test - -```erlang -init_test() -> - init(), - Msg = cache_wasm_image("test/test.wasm"), - {ok, Msg1} = hb_ao:resolve(Msg, <<"init">>, #{}), - ?event({after_init, Msg1}), - Priv = hb_private:from_message(Msg1), - ?assertMatch( - {ok, Instance} when is_pid(Instance), - hb_ao:resolve(Priv, <<"instance">>, #{}) - ), - ?assertMatch( - {ok, Fun} when is_function(Fun), - hb_ao:resolve(Priv, <<"import-resolver">>, #{}) - ). -``` - -### basic_execution_test - -```erlang -basic_execution_test() -> - ?assertEqual( - {ok, [120.0]}, - test_run_wasm("test/test.wasm", <<"fac">>, [5.0], #{}) - ). -``` - -### basic_execution_64_test - -```erlang -basic_execution_64_test() -> - ?assertEqual( - {ok, [120.0]}, - test_run_wasm("test/test-64.wasm", <<"fac">>, [5.0], #{}) - ). -``` - -### imported_function_test - -```erlang -imported_function_test() -> - ?assertEqual( - {ok, [32]}, - test_run_wasm( - "test/pow_calculator.wasm", - <<"pow">>, - [2, 5], - #{ - <<"stdlib/my_lib">> => - #{ <<"device">> => <<"test-device@1.0">> } - } - ) - ). -``` - -### benchmark_test - -```erlang -benchmark_test() -> - BenchTime = 0.5, - init(), - Msg0 = cache_wasm_image("test/test-64.wasm"), - {ok, Msg1} = hb_ao:resolve(Msg0, <<"init">>, #{}), - Msg2 = - hb_maps:merge( - Msg1, - #{ - <<"function">> => <<"fac">>, - <<"parameters">> => [5.0] - }, - #{} - ), - Iterations = - hb_test_utils:benchmark( - fun() -> - hb_ao:resolve(Msg2, <<"compute">>, #{}) - end, - BenchTime - ), - ?event(benchmark, {scheduled, Iterations}), - hb_test_utils:benchmark_print( - <<"Through AO-Core:">>, - <<"resolutions">>, - Iterations, - BenchTime - ), - ?assert(Iterations > 5), - ok. -``` - -### state_export_and_restore_test - -```erlang -state_export_and_restore_test() -> - init(), - % Generate a WASM message. We use the pow_calculator because it has a - % reasonable amount of memory to work with. -``` - -### cache_wasm_image - -```erlang -cache_wasm_image(Image) -> - cache_wasm_image(Image, #{}). -``` - -### cache_wasm_image - -```erlang -cache_wasm_image(Image, Opts) -> - {ok, Bin} = file:read_file(Image), - Msg = #{ <<"body">> => Bin }, - {ok, ID} = hb_cache:write(Msg, Opts), - #{ - <<"device">> => <<"wasm-64@1.0">>, - <<"image">> => ID - }. -``` - -### test_run_wasm - -```erlang -test_run_wasm(File, Func, Params, AdditionalMsg) -> - init(), - Msg0 = cache_wasm_image(File), - {ok, Msg1} = hb_ao:resolve(Msg0, <<"init">>, #{}), - ?event({after_init, Msg1}), - Msg2 = - hb_maps:merge( - Msg1, - hb_ao:set( - #{ - <<"function">> => Func, - <<"parameters">> => Params - }, - AdditionalMsg, - #{ hashpath => ignore } - ), - #{} - ), - ?event({after_setup, Msg2}), - {ok, StateRes} = hb_ao:resolve(Msg2, <<"compute">>, #{}), - ?event({after_resolve, StateRes}), - hb_ao:resolve(StateRes, <<"results/output">>, #{}). -``` - ---- - -*Generated from [dev_wasm.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_wasm.erl)* diff --git a/docs/book/src/dev_whois.erl.md b/docs/book/src/dev_whois.erl.md deleted file mode 100644 index 86026ae48..000000000 --- a/docs/book/src/dev_whois.erl.md +++ /dev/null @@ -1,94 +0,0 @@ -# dev_whois - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_whois.erl) - -A device for returning the IP/host information of a requester or -itself. - ---- - -## Exported Functions - -- `echo/3` -- `ensure_host/1` -- `node/3` - ---- - -### echo - -A device for returning the IP/host information of a requester or -Return the calculated host information for the requester. -Return the host information for the node. Sets the `host` key in the - -```erlang -echo(_, Req, Opts) -> - {ok, hb_maps:get(<<"ao-peer">>, Req, <<"unknown">>, Opts)}. -``` - -### node - -A device for returning the IP/host information of a requester or -Return the calculated host information for the requester. -Return the host information for the node. Sets the `host` key in the - -```erlang -node(_, _, Opts) -> - case ensure_host(Opts) of - {ok, NewOpts} -> - {ok, hb_opts:get(host, <<"unknown">>, NewOpts)}; - Error -> - Error - end. -``` - -### ensure_host - -Return the node message ensuring that the host is set. If it is not, we - -```erlang -ensure_host(Opts) -> - case hb_opts:get(host, <<"unknown">>, Opts) of - <<"unknown">> -> - case bootstrap_node_echo(Opts) of - {ok, Host} -> - % Set the host information in the persisted node message. -``` - -### bootstrap_node_echo - -Find the local host information from the specified bootstrap node. - -```erlang -bootstrap_node_echo(Opts) -> - case hb_opts:get(host_bootstrap_node, false, Opts) of - false -> - {error, <<"No bootstrap node configured.">>}; - BootstrapNode -> - hb_http:get(BootstrapNode, <<"/~whois@1.0/echo">>, Opts) - end. -``` - -### find_self_test - -```erlang -find_self_test() -> - BoostrapNode = - hb_http_server:start_node(#{ - priv_wallet => ar_wallet:new() - }), - PeerNode = - hb_http_server:start_node(#{ - port => Port = rand:uniform(40000) + 10000, - priv_wallet => ar_wallet:new(), - host_bootstrap_node => BoostrapNode, - http_client => httpc - }), - ?event({nodes, {peer, PeerNode}, {bootstrap, BoostrapNode}}), - {ok, ReceivedPeerHost} = hb_http:get(PeerNode, <<"/~whois@1.0/node">>, #{}), - ?event({find_self_test, ReceivedPeerHost}), -``` - ---- - -*Generated from [dev_whois.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/dev_whois.erl)* diff --git a/docs/book/src/hb.erl.md b/docs/book/src/hb.erl.md deleted file mode 100644 index 6c5b84d88..000000000 --- a/docs/book/src/hb.erl.md +++ /dev/null @@ -1,423 +0,0 @@ -# hb - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb.erl) - -Hyperbeam is a decentralized node implementing the AO-Core protocol -on top of Arweave. -This protocol offers a computation layer for executing arbitrary logic on -top of the network's data. -Arweave is built to offer a robust, permanent storage layer for static data -over time. It can be seen as a globally distributed key-value store that -allows users to lookup IDs to retrieve data at any point in time: - `Arweave(ID) => Message` -Hyperbeam adds another layer of functionality on top of Arweave's protocol: -Allowing users to store and retrieve not only arbitrary bytes, but also to -perform execution of computation upon that data: - `Hyperbeam(Message1, Message2) => Message3` -When Hyperbeam executes a message, it will return a new message containing -the result of that execution, as well as signed commitments of its -correctness. If the computation that is executed is deterministic, recipients -of the new message are able to verify that the computation was performed -correctly. The new message may be stored back to Arweave if desired, -forming a permanent, verifiable, and decentralized log of computation. -The mechanisms described above form the basis of a decentralized and -verifiable compute engine without any relevant protocol-enforced -scalability limits. It is an implementation of a global, shared -supercomputer. -Hyperbeam can be used for an extremely large variety of applications, from -serving static Arweave data with signed commitments of correctness, to -executing smart contracts that have _built-in_ HTTP APIs. The Hyperbeam -node implementation implements AO, an Actor-Oriented process-based -environment for orchestrating computation over Arweave messages in order to -facilitate the execution of more traditional, consensus-based smart -contracts. -The core abstractions of the Hyperbeam node are broadly as follows: -1. The `hb` and `hb_opts` modules manage the node's configuration, - environment variables, and debugging tools. -2. The `hb_http` and `hb_http_server` modules manage all HTTP-related - functionality. `hb_http_server` handles turning received HTTP requests - into messages and applying those messages with the appropriate devices. - `hb_http` handles making requests and responding with messages. `cowboy` - is used to implement the underlying HTTP server. -3. `hb_ao` implements the computation logic of the node: A mechanism - for resolving messages to other messages, via the application of logic - implemented in `devices`. `hb_ao` also manages the loading of Erlang - modules for each device into the node's environment. There are many - different default devices implemented in the hyperbeam node, using the - namespace `dev_*`. Some of the critical components are: - - `dev_message`: The default handler for all messages that do not - specify their own device. The message device is also used to resolve - keys that are not implemented by the device specified in a message, - unless otherwise signalled. - - `dev_stack`: The device responsible for creating and executing stacks - of other devices on messages that request it. There are many uses for - this device, one of which is the resolution of AO processes. - - `dev_p4`: The device responsible for managing payments for the services - provided by the node. -4. `hb_store`, `hb_cache` and the store implementations forms a layered - system for managing the node's access to persistent storage. `hb_cache` - is used as a resolution mechanism for reading and writing messages, while - `hb_store` provides an abstraction over the underlying persistent key-value - byte storage mechanisms. Example `hb_store` mechanisms can be found in - `hb_store_fs` and `hb_store_remote_node`. -5. `ar_*` modules implement functionality related to the base-layer Arweave - protocol and are largely unchanged from their counterparts in the Arweave - node codebase presently maintained by the Digital History Association - (@dha-team/Arweave). -You can find documentation of a similar form to this note in each of the core -modules of the hyperbeam node. - ---- - -## Exported Functions - -- `address/0` -- `build/0` -- `debug_wait/4` -- `deploy_scripts/0` -- `init/0` -- `no_prod/3` -- `now/0` -- `read/1` -- `read/2` -- `start_mainnet/0` -- `start_mainnet/1` -- `start_simple_pay/0` -- `start_simple_pay/1` -- `start_simple_pay/2` -- `topup/3` -- `topup/4` -- `wallet/0` -- `wallet/1` - ---- - -### init - -Hyperbeam is a decentralized node implementing the AO-Core protocol -Initialize system-wide settings for the hyperbeam node. - -```erlang -init() -> - hb_name:start(), - ?event({setting_debug_stack_depth, hb_opts:get(debug_stack_depth)}), - Old = erlang:system_flag(backtrace_depth, hb_opts:get(debug_stack_depth)), - ?event({old_system_stack_depth, Old}), - ok. -``` - -### start_mainnet - -Start a mainnet server without payments. - -```erlang -start_mainnet() -> - start_mainnet(hb_opts:get(port)). -``` - -### start_mainnet - -```erlang -start_mainnet(Port) when is_integer(Port) -> - start_mainnet(#{ port => Port }); -``` - -### start_mainnet - -Start a server with a `simple-pay@1.0` pre-processor. - -```erlang -start_mainnet(Opts) -> - application:ensure_all_started([ - kernel, - stdlib, - inets, - ssl, - ranch, - cowboy, - gun, - os_mon - ]), - Wallet = hb:wallet(hb_opts:get(priv_key_location, no_viable_wallet_path, Opts)), - BaseOpts = hb_http_server:set_default_opts(Opts), - hb_http_server:start_node( - FinalOpts = - BaseOpts#{ - store => #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-mainnet">> }, - priv_wallet => Wallet - } - ), - Address = - case hb_opts:get(address, no_address, FinalOpts) of - no_address -> <<"[ !!! no-address !!! ]">>; - Addr -> Addr - end, - io:format( - "Started mainnet node at http://localhost:~p~n" - "Operator: ~s~n", - [hb_maps:get(port, Opts, undefined, Opts), Address] - ), - <<"http://localhost:", (integer_to_binary(hb_maps:get(port, Opts, undefined, Opts)))/binary>>. -``` - -### start_simple_pay - -Start a server with a `simple-pay@1.0` pre-processor. - -```erlang -start_simple_pay() -> - start_simple_pay(address()). -``` - -### start_simple_pay - -```erlang -start_simple_pay(Addr) -> - rand:seed(default), - start_simple_pay(Addr, 10000 + rand:uniform(50000)). -``` - -### start_simple_pay - -```erlang -start_simple_pay(Addr, Port) -> - do_start_simple_pay(#{ port => Port, operator => Addr }). -``` - -### do_start_simple_pay - -Upload all scripts from the `scripts` directory to the node to Arweave, - -```erlang -do_start_simple_pay(Opts) -> - application:ensure_all_started([ - kernel, - stdlib, - inets, - ssl, - ranch, - cowboy, - gun, - os_mon - ]), - Port = hb_maps:get(port, Opts, undefined, Opts), - Processor = - #{ - <<"device">> => <<"p4@1.0">>, - <<"ledger-device">> => <<"simple-pay@1.0">>, - <<"pricing-device">> => <<"simple-pay@1.0">> - }, - hb_http_server:start_node( - Opts#{ - on => #{ - <<"request">> => Processor, - <<"response">> => Processor - } - } - ), - io:format( - "Started simple-pay node at http://localhost:~p~n" - "Operator: ~s~n", - [Port, address()] - ), - <<"http://localhost:", (integer_to_binary(Port))/binary>>. -``` - -### deploy_scripts - -Upload all scripts from the `scripts` directory to the node to Arweave, - -```erlang -deploy_scripts() -> - deploy_scripts("scripts/"). -``` - -### deploy_scripts - -Upload all scripts from the `scripts` directory to the node to Arweave, - -```erlang -deploy_scripts(Dir) -> - Files = filelib:wildcard(Dir ++ "*.lua"), - lists:foreach(fun(File) -> - {ok, Script} = file:read_file(File), - Msg = - hb_message:commit( - #{ - <<"data-protocol">> => <<"ao">>, - <<"variant">> => <<"ao.N.1">>, - <<"type">> => <<"module">>, - <<"content-type">> => <<"application/lua">>, - <<"name">> => hb_util:bin(File), - <<"body">> => Script - }, - wallet(), - <<"ans104@1.0">> - ), - {Status, _} = hb_client:upload(Msg, #{}, <<"ans104@1.0">>), - io:format( - "~s: ~s (upload status: ~p)~n", - [File, hb_util:id(Msg), Status] - ) - end, Files), - ok. -``` - -### topup - -Helper for topping up a user's balance on a simple-pay node. - -```erlang -topup(Node, Amount, Recipient) -> - topup(Node, Amount, Recipient, wallet()). -``` - -### topup - -```erlang -topup(Node, Amount, Recipient, Wallet) -> - Message = hb_message:commit( - #{ - <<"path">> => <<"/~simple-pay@1.0/topup">>, - <<"amount">> => Amount, - <<"recipient">> => Recipient - }, - Wallet - ), - hb_http:get(Node, Message, #{}). -``` - -### wallet - -```erlang -wallet() -> - wallet(hb_opts:get(priv_key_location)). -``` - -### wallet - -```erlang -wallet(Location) -> - wallet(Location, #{}). -``` - -### wallet - -```erlang -wallet(Location, Opts) -> - case file:read_file_info(Location) of - {ok, _} -> - ar_wallet:load_keyfile(Location, Opts); - {error, _} -> - Res = ar_wallet:new_keyfile(?DEFAULT_KEY_TYPE, Location), - ?event({created_new_keyfile, Location, address(Res)}), - Res - end. -``` - -### address - -Get the address of a wallet. Defaults to the address of the wallet - -```erlang -address() -> address(wallet()). -``` - -### address - -Get the address of a wallet. Defaults to the address of the wallet - -```erlang -address(Wallet) when is_tuple(Wallet) -> - hb_util:encode(ar_wallet:to_address(Wallet)); -``` - -### address - -Get the address of a wallet. Defaults to the address of the wallet -Debugging function to read a message from the cache. - -```erlang -address(Location) -> address(wallet(Location)). -``` - -### read - -Get the address of a wallet. Defaults to the address of the wallet -Debugging function to read a message from the cache. - -```erlang -read(ID) -> read(ID, local). -``` - -### read - -Get the address of a wallet. Defaults to the address of the wallet -Debugging function to read a message from the cache. - -```erlang -read(ID, ScopeAtom) when is_atom(ScopeAtom) -> - read(ID, hb_store:scope(hb_opts:get(store), ScopeAtom)); -``` - -### read - -Get the address of a wallet. Defaults to the address of the wallet -Debugging function to read a message from the cache. - -```erlang -read(ID, Store) -> - hb_cache:read(Store, hb_util:id(ID)). -``` - -### no_prod - -Utility function to throw an error if the current mode is prod and - -```erlang -no_prod(X, Mod, Line) -> - case hb_opts:get(mode) of - prod -> - io:format(standard_error, - "=== DANGER: NON-PROD READY CODE INVOKED IN PROD ===~n", []), - io:format(standard_error, "~w:~w: ~p~n", [Mod, Line, X]), - case hb_opts:get(exit_on_no_prod) of - true -> init:stop(); - false -> throw(X) - end; - _ -> X - end. -``` - -### now - -Utility function to get the current time in milliseconds. - -```erlang -now() -> - erlang:system_time(millisecond). -``` - -### build - -Utility function to hot-recompile and load the hyperbeam environment. -Utility function to wait for a given amount of time, printing a debug - -```erlang -build() -> - r3:do(compile, [{dir, "src"}]). -``` - -### debug_wait - -Utility function to hot-recompile and load the hyperbeam environment. -Utility function to wait for a given amount of time, printing a debug - -```erlang -debug_wait(T, Mod, Func, Line) -> - ?event(wait, {debug_wait, {T, Mod, Func, Line}}), -``` - ---- - -*Generated from [hb.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb.erl)* diff --git a/docs/book/src/hb_ao.erl.md b/docs/book/src/hb_ao.erl.md deleted file mode 100644 index 8aeb61f6e..000000000 --- a/docs/book/src/hb_ao.erl.md +++ /dev/null @@ -1,1509 +0,0 @@ -# hb_ao - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_ao.erl) - -This module is the root of the device call logic of the -AO-Core protocol in HyperBEAM. -At the implementation level, every message is simply a collection of keys, -dictated by its `Device`, that can be resolved in order to yield their -values. Each key may contain a link to another message or a raw value: - `ao(BaseMessage, RequestMessage) -> {Status, Result}` -Under-the-hood, `AO-Core(BaseMessage, RequestMessage)` leads to a lookup of -the `device` key of the base message, followed by the evaluation of -`DeviceMod:PathPart(BaseMessage, RequestMessage)`, which defines the user -compute to be performed. If `BaseMessage` does not specify a device, -`~message@1.0` is assumed. The key to resolve is specified by the `path` -field of the message. -After each output, the `HashPath` is updated to include the `RequestMessage` -that was executed upon it. -Because each message implies a device that can resolve its keys, as well -as generating a merkle tree of the computation that led to the result, -you can see the AO-Core protocol as a system for cryptographically chaining -the execution of `combinators`. See `docs/ao-core-protocol.md` for more -information about AO-Core. -The `key(BaseMessage, RequestMessage)` pattern is repeated throughout the -HyperBEAM codebase, sometimes with `BaseMessage` replaced with `Msg1`, `M1` -or similar, and `RequestMessage` replaced with `Msg2`, `M2`, etc. -The result of any computation can be either a new message or a raw literal -value (a binary, integer, float, atom, or list of such values). -Devices can be expressed as either modules or maps. They can also be -referenced by an Arweave ID, which can be used to load a device from -the network (depending on the value of the `load_remote_devices` and -`trusted_device_signers` environment settings). -HyperBEAM device implementations are defined as follows: -
-    DevMod:ExportedFunc : Key resolution functions. All are assumed to be
-                          device keys (thus, present in every message that
-                          uses it) unless specified by `DevMod:info()`.
-                          Each function takes a set of parameters
-                          of the form `DevMod:KeyHandler(Msg1, Msg2, Opts)`.
-                          Each of these arguments can be ommitted if not
-                          needed. Non-exported functions are not assumed
-                          to be device keys.
-    DevMod:info : Optional. Returns a map of options for the device. All 
-                  options are optional and assumed to be the defaults if 
-                  not specified. This function can accept a `Message1` as 
-                  an argument, allowing it to specify its functionality 
-                  based on a specific message if appropriate.
-    info/exports : Overrides the export list of the Erlang module, such that
-                  only the functions in this list are assumed to be device
-                  keys. Defaults to all of the functions that DevMod 
-                  exports in the Erlang environment.
-    info/excludes : A list of keys that should not be resolved by the device,
-                    despite being present in the Erlang module exports list.
-    info/handler : A function that should be used to handle _all_ keys for 
-                   messages using the device.
-    info/default : A function that should be used to handle all keys that
-                   are not explicitly implemented by the device. Defaults to
-                   the `dev_message` device, which contains general keys for 
-                   interacting with messages.
-    info/default_mod : A different device module that should be used to
-                   handle all keys that are not explicitly implemented
-                   by the device. Defaults to the `dev_message` device.
-    info/grouper : A function that returns the concurrency 'group' name for
-                   an execution. Executions with the same group name will
-                   be executed by sending a message to the associated process
-                   and waiting for a response. This allows you to control 
-                   concurrency of execution and to allow executions to share
-                   in-memory state as applicable. Default: A derivation of
-                   Msg1+Msg2. This means that concurrent calls for the same
-                   output will lead to only a single execution.
-    info/worker : A function that should be run as the 'server' loop of
-                  the executor for interactions using the device.
-The HyperBEAM resolver also takes a number of runtime options that change
-the way that the environment operates:
-`update_hashpath`:  Whether to add the `Msg2` to `HashPath` for the `Msg3`.
-					Default: true.
-`add_key`:          Whether to add the key to the start of the arguments.
-					Default: ``.
-
- ---- - -## Exported Functions - -- `deep_set/4` -- `find_exported_function/5` -- `force_message/2` -- `get_first/2` -- `get_first/3` -- `get/2` -- `get/3` -- `get/4` -- `info/2` -- `is_exported/4` -- `keys/1` -- `keys/2` -- `keys/3` -- `load_device/2` -- `message_to_device/2` -- `message_to_fun/3` -- `normalize_key/1` -- `normalize_key/2` -- `normalize_keys/1` -- `normalize_keys/2` -- `remove/2` -- `remove/3` -- `resolve_many/2` -- `resolve/2` -- `resolve/3` -- `set/3` -- `set/4` -- `truncate_args/2` - ---- - -### resolve - -This module is the root of the device call logic of the -Get the value of a message's key by running its associated device - -```erlang -resolve(Path, Opts) when is_binary(Path) -> - resolve(#{ <<"path">> => Path }, Opts); -``` - -### resolve - -This module is the root of the device call logic of the -Get the value of a message's key by running its associated device - -```erlang -resolve(SingletonMsg, _Opts) - when is_map(SingletonMsg), not is_map_key(<<"path">>, SingletonMsg) -> - {error, <<"Attempted to resolve a message without a path.">>}; -``` - -### resolve - -This module is the root of the device call logic of the -Get the value of a message's key by running its associated device - -```erlang -resolve(SingletonMsg, Opts) -> - resolve_many(hb_singleton:from(SingletonMsg, Opts), Opts). -``` - -### resolve - -```erlang -resolve(Msg1, Path, Opts) when not is_map(Path) -> - resolve(Msg1, #{ <<"path">> => Path }, Opts); -``` - -### resolve - -```erlang -resolve(Msg1, Msg2, Opts) -> - PathParts = hb_path:from_message(request, Msg2, Opts), - ?event(ao_core, {stage, 1, prepare_multimessage_resolution, {path_parts, PathParts}}), - MessagesToExec = [ Msg2#{ <<"path">> => Path } || Path <- PathParts ], - ?event(ao_core, {stage, 1, prepare_multimessage_resolution, {messages_to_exec, MessagesToExec}}), - resolve_many([Msg1 | MessagesToExec], Opts). -``` - -### resolve_many - -Resolve a list of messages in sequence. Take the output of the first - -```erlang -resolve_many([ID], Opts) when ?IS_ID(ID) -> - % Note: This case is necessary to place specifically here for two reasons: - % 1. It is not in `do_resolve_many' because we need to handle the case - % where a result from a prior invocation is an ID itself. We should not - % attempt to resolve such IDs further. -``` - -### resolve_many - -```erlang -resolve_many(ListMsg, Opts) when is_map(ListMsg) -> - % We have been given a message rather than a list of messages, so we should - % convert it to a list, assuming that the message is monotonically numbered. -``` - -### resolve_many - -```erlang -resolve_many({as, DevID, Msg}, Opts) -> - subresolve(#{}, DevID, Msg, Opts); -``` - -### resolve_many - -```erlang -resolve_many([{resolve, Subres}], Opts) -> - resolve_many(Subres, Opts); -``` - -### resolve_many - -```erlang -resolve_many(MsgList, Opts) -> - ?event(ao_core, {resolve_many, MsgList}, Opts), - Res = do_resolve_many(MsgList, Opts), - ?event(ao_core, {resolve_many_complete, {res, Res}, {req, MsgList}}, Opts), - Res. -``` - -### do_resolve_many - -```erlang -do_resolve_many([], _Opts) -> - {failure, <<"Attempted to resolve an empty message sequence.">>}; -``` - -### do_resolve_many - -```erlang -do_resolve_many([Msg3], Opts) -> - ?event(ao_core, {stage, 11, resolve_complete, Msg3}), - {ok, hb_cache:ensure_loaded(Msg3, Opts)}; -``` - -### do_resolve_many - -```erlang -do_resolve_many([Msg1, Msg2 | MsgList], Opts) -> - ?event(ao_core, {stage, 0, resolve_many, {msg1, Msg1}, {msg2, Msg2}}), - case resolve_stage(1, Msg1, Msg2, Opts) of - {ok, Msg3} -> - ?event(ao_core, - { - stage, - 13, - resolved_step, - {msg3, Msg3}, - {opts, Opts} - }, - Opts - ), - do_resolve_many([Msg3 | MsgList], Opts); - Res -> - % The result is not a resolvable message. Return it. -``` - -### resolve_stage - -```erlang -resolve_stage(1, Link, Msg2, Opts) when ?IS_LINK(Link) -> - % If the first message is a link, we should load the message and - % continue with the resolution. -``` - -### resolve_stage - -```erlang -resolve_stage(1, Msg1, Link, Opts) when ?IS_LINK(Link) -> - % If the second message is a link, we should load the message and - % continue with the resolution. -``` - -### resolve_stage - -```erlang -resolve_stage(1, {as, DevID, Ref}, Msg2, Opts) when ?IS_ID(Ref) orelse ?IS_LINK(Ref) -> - % Normalize `as' requests with a raw ID or link as the path. Links will be - % loaded in following stages. -``` - -### resolve_stage - -```erlang -resolve_stage(1, {as, DevID, Link}, Msg2, Opts) when ?IS_LINK(Link) -> - % If the first message is an `as' with a link, we should load the message and - % continue with the resolution. -``` - -### resolve_stage - -```erlang -resolve_stage(1, {as, DevID, Raw = #{ <<"path">> := ID }}, Msg2, Opts) when ?IS_ID(ID) -> - % If the first message is an `as' with an ID, we should load the message and - % apply the non-path elements of the sub-request to it. -``` - -### resolve_stage - -```erlang -resolve_stage(1, Raw = {as, DevID, SubReq}, Msg2, Opts) -> - % Set the device of the message to the specified one and resolve the sub-path. -``` - -### resolve_stage - -```erlang -resolve_stage(1, RawMsg1, Msg2Outer = #{ <<"path">> := {as, DevID, Msg2Inner} }, Opts) -> - % Set the device to the specified `DevID' and resolve the message. Merging - % the `Msg2Inner' into the `Msg2Outer' message first. We return the result - % of the sub-resolution directly. -``` - -### resolve_stage - -```erlang -resolve_stage(1, {resolve, Subres}, Msg2, Opts) -> - % If the first message is a `{resolve, Subres}' tuple, we should execute it - % directly, then apply the request to the result. -``` - -### resolve_stage - -```erlang -resolve_stage(1, Msg1, {resolve, Subres}, Opts) -> - % If the second message is a `{resolve, Subresolution}' tuple, we should - % execute the subresolution directly to gain the underlying `Msg2' for - % our execution. We assume that the subresolution is already in a normalized, - % executable form, so we pass it to `resolve_many' for execution. -``` - -### resolve_stage - -```erlang -resolve_stage(1, Msg1, Msg2, Opts) when is_list(Msg1) -> - % Normalize lists to numbered maps (base=1) if necessary. -``` - -### resolve_stage - -```erlang -resolve_stage(1, Msg1, NonMapMsg2, Opts) when not is_map(NonMapMsg2) -> - ?event(ao_core, {stage, 1, path_normalize}), - resolve_stage(1, Msg1, #{ <<"path">> => NonMapMsg2 }, Opts); -``` - -### resolve_stage - -```erlang -resolve_stage(1, RawMsg1, RawMsg2, Opts) -> - % Normalize the path to a private key containing the list of remaining - % keys to resolve. -``` - -### resolve_stage - -```erlang -resolve_stage(2, Msg1, Msg2, Opts) -> - ?event(ao_core, {stage, 2, cache_lookup}, Opts), - % Lookup request in the cache. If we find a result, return it. -``` - -### resolve_stage - -```erlang -resolve_stage(3, Msg1, Msg2, Opts) when not is_map(Msg1) or not is_map(Msg2) -> - % Validation check: If the messages are not maps, we cannot find a key - % in them, so return not_found. -``` - -### resolve_stage - -```erlang -resolve_stage(3, Msg1, Msg2, Opts) -> - ?event(ao_core, {stage, 3, validation_check}, Opts), - % Validation check: Check if the message is valid. -``` - -### resolve_stage - -```erlang -resolve_stage(4, Msg1, Msg2, Opts) -> - ?event(ao_core, {stage, 4, persistent_resolver_lookup}, Opts), - % Persistent-resolver lookup: Search for local (or Distributed - % Erlang cluster) processes that are already performing the execution. -``` - -### resolve_stage - -```erlang -resolve_stage(5, Msg1, Msg2, ExecName, Opts) -> - ?event(ao_core, {stage, 5, device_lookup}, Opts), - % Device lookup: Find the Erlang function that should be utilized to - % execute Msg2 on Msg1. -``` - -### resolve_stage - -```erlang -resolve_stage(6, Func, Msg1, Msg2, ExecName, Opts) -> - ?event(ao_core, {stage, 6, ExecName, execution}, Opts), - % Execution. -``` - -### resolve_stage - -```erlang -resolve_stage(7, Msg1, Msg2, {St, Res}, ExecName, Opts = #{ on := On = #{ <<"step">> := _ }}) -> - ?event(ao_core, {stage, 7, ExecName, executing_step_hook, {on, On}}, Opts), - % If the `step' hook is defined, we execute it. Note: This function clause - % matches directly on the `on' key of the `Opts' map. This is in order to - % remove the expensive lookup check that would otherwise be performed on every - % execution. -``` - -### resolve_stage - -```erlang -resolve_stage(7, Msg1, Msg2, Res, ExecName, Opts) -> - ?event(ao_core, {stage, 7, ExecName, no_step_hook}, Opts), - resolve_stage(8, Msg1, Msg2, Res, ExecName, Opts); -``` - -### resolve_stage - -```erlang -resolve_stage(8, Msg1, Msg2, {ok, {resolve, Sublist}}, ExecName, Opts) -> - ?event(ao_core, {stage, 8, ExecName, subresolve_result}, Opts), - % If the result is a `{resolve, Sublist}' tuple, we need to execute it - % as a sub-resolution. -``` - -### resolve_stage - -```erlang -resolve_stage(8, Msg1, Msg2, Res, ExecName, Opts) -> - ?event(ao_core, {stage, 8, ExecName, no_subresolution_necessary}, Opts), - resolve_stage(9, Msg1, Msg2, Res, ExecName, Opts); -``` - -### resolve_stage - -```erlang -resolve_stage(9, Msg1, Msg2, {ok, Msg3}, ExecName, Opts) when is_map(Msg3) -> - ?event(ao_core, {stage, 9, ExecName, generate_hashpath}, Opts), - % Cryptographic linking. Now that we have generated the result, we - % need to cryptographically link the output to its input via a hashpath. -``` - -### resolve_stage - -```erlang -resolve_stage(9, Msg1, Msg2, {Status, Msg3}, ExecName, Opts) when is_map(Msg3) -> - ?event(ao_core, {stage, 9, ExecName, abnormal_status_reset_hashpath}, Opts), - ?event(hashpath, {resetting_hashpath_msg3, {msg1, Msg1}, {msg2, Msg2}, {opts, Opts}}), - % Skip cryptographic linking and reset the hashpath if the result is abnormal. -``` - -### resolve_stage - -```erlang -resolve_stage(9, Msg1, Msg2, Res, ExecName, Opts) -> - ?event(ao_core, {stage, 9, ExecName, non_map_result_skipping_hash_path}, Opts), - % Skip cryptographic linking and continue if we don't have a map that can have - % a hashpath at all. -``` - -### resolve_stage - -```erlang -resolve_stage(10, Msg1, Msg2, {ok, Msg3}, ExecName, Opts) -> - ?event(ao_core, {stage, 10, ExecName, result_caching}, Opts), - % Result caching: Optionally, cache the result of the computation locally. -``` - -### resolve_stage - -```erlang -resolve_stage(10, Msg1, Msg2, Res, ExecName, Opts) -> - ?event(ao_core, {stage, 10, ExecName, abnormal_status_skip_caching}, Opts), - % Skip result caching if the result is abnormal. -``` - -### resolve_stage - -```erlang -resolve_stage(11, Msg1, Msg2, Res, ExecName, Opts) -> - ?event(ao_core, {stage, 11, ExecName}, Opts), - % Notify processes that requested the resolution while we were executing and - % unregister ourselves from the group. -``` - -### resolve_stage - -```erlang -resolve_stage(12, _Msg1, _Msg2, {ok, Msg3} = Res, ExecName, Opts) -> - ?event(ao_core, {stage, 12, ExecName, maybe_spawn_worker}, Opts), - % Check if we should spawn a worker for the current execution - case {is_map(Msg3), hb_opts:get(spawn_worker, false, Opts#{ prefer => local })} of - {A, B} when (A == false) or (B == false) -> - Res; - {_, _} -> - % Spawn a worker for the current execution - WorkerPID = hb_persistent:start_worker(ExecName, Msg3, Opts), - hb_persistent:forward_work(WorkerPID, Opts), - Res - end; -``` - -### resolve_stage - -```erlang -resolve_stage(12, _Msg1, _Msg2, OtherRes, ExecName, Opts) -> - ?event(ao_core, {stage, 12, ExecName, abnormal_status_skip_spawning}, Opts), - OtherRes. -``` - -### subresolve - -Execute a sub-resolution. - -```erlang -subresolve(RawMsg1, DevID, ReqPath, Opts) when is_binary(ReqPath) -> - % If the request is a binary, we assume that it is a path. -``` - -### subresolve - -```erlang -subresolve(RawMsg1, DevID, Req, Opts) -> - % First, ensure that the message is loaded from the cache. -``` - -### maybe_profiled_apply - -If the `AO_PROFILING` macro is defined (set by building/launching with - -```erlang -maybe_profiled_apply(Func, Args, _Msg1, _Msg2, _Opts) -> - apply(Func, Args). -``` - -### maybe_profiled_apply - -```erlang -maybe_profiled_apply(Func, Args, Msg1, Msg2, Opts) -> - CallStack = erlang:get(ao_stack), - ?event(ao_trace, - {profiling_apply, - {func, Func}, - {args, Args}, - {call_stack, CallStack} - } - ), - Key = - case hb_maps:get(<<"device">>, Msg1, undefined, Opts) of - undefined -> - hb_util:bin(erlang:fun_to_list(Func)); - Device -> - case hb_maps:get(<<"path">>, Msg2, undefined, Opts) of - undefined -> - hb_util:bin(erlang:fun_to_list(Func)); - Path -> - MethodStr = - case hb_maps:get(<<"method">>, Msg2, undefined, Opts) of - undefined -> <<"">>; - <<"GET">> -> <<"">>; - Method -> <<"<", Method/binary, ">">> - end, - << - (hb_util:bin(Device))/binary, - "/", - MethodStr/binary, - (hb_util:bin(Path))/binary - >> - end - end, - put( - ao_stack, - case CallStack of - undefined -> [Key]; - Stack -> [Key | Stack] - end - ), - {ExecMicroSecs, Res} = timer:tc(fun() -> apply(Func, Args) end), - put(ao_stack, CallStack), - hb_event:increment(<<"ao-call-counts">>, Key, Opts), - hb_event:increment(<<"ao-total-durations">>, Key, Opts, ExecMicroSecs), - case CallStack of - undefined -> ok; - [Caller|_] -> - hb_event:increment( - <<"ao-callers:", Key/binary>>, - hb_util:bin( - [ - <<"duration:">>, - Caller - ] - ), - Opts, - ExecMicroSecs - ), - hb_event:increment( - <<"ao-callers:", Key/binary>>, - hb_util:bin( - [ - <<"calls:">>, - Caller - ]), - Opts - ) - end, - Res. -``` - -### ensure_message_loaded - -Ensure that a message is loaded from the cache if it is an ID, or - -```erlang -ensure_message_loaded(MsgID, Opts) when ?IS_ID(MsgID) -> - case hb_cache:read(MsgID, Opts) of - {ok, LoadedMsg} -> - LoadedMsg; - not_found -> - throw({necessary_message_not_found, <<"/">>, MsgID}) - end; -``` - -### ensure_message_loaded - -Ensure that a message is loaded from the cache if it is an ID, or - -```erlang -ensure_message_loaded(MsgLink, Opts) when ?IS_LINK(MsgLink) -> - hb_cache:ensure_loaded(MsgLink, Opts); -``` - -### ensure_message_loaded - -Ensure that a message is loaded from the cache if it is an ID, or - -```erlang -ensure_message_loaded(Msg, _Opts) -> - Msg. -``` - -### error_invalid_message - -Catch all return if the message is invalid. - -```erlang -error_invalid_message(Msg1, Msg2, Opts) -> - ?event( - ao_core, - {error, {type, invalid_message}, - {msg1, Msg1}, - {msg2, Msg2}, - {opts, Opts} - }, - Opts - ), - { - error, - #{ - <<"status">> => 400, - <<"body">> => <<"Request contains non-verifiable message.">> - } - }. -``` - -### error_infinite - -Catch all return if we are in an infinite loop. - -```erlang -error_infinite(Msg1, Msg2, Opts) -> - ?event( - ao_core, - {error, {type, infinite_recursion}, - {msg1, Msg1}, - {msg2, Msg2}, - {opts, Opts} - }, - Opts - ), - ?trace(), - { - error, - #{ - <<"status">> => 508, - <<"body">> => <<"Request creates infinite recursion.">> - } - }. -``` - -### error_invalid_intermediate_status - -```erlang -error_invalid_intermediate_status(Msg1, Msg2, Msg3, RemainingPath, Opts) -> - ?event( - ao_core, - {error, {type, invalid_intermediate_status}, - {msg2, Msg2}, - {msg3, Msg3}, - {remaining_path, RemainingPath}, - {opts, Opts} - }, - Opts - ), - ?event(ao_result, - {intermediate_failure, {msg1, Msg1}, - {msg2, Msg2}, {msg3, Msg3}, - {remaining_path, RemainingPath}, {opts, Opts}}), - { - error, - #{ - <<"status">> => 422, - <<"body">> => Msg3, - <<"key">> => hb_maps:get(<<"path">>, Msg2, <<"Key unknown.">>, Opts), - <<"remaining-path">> => RemainingPath - } - }. -``` - -### error_execution - -Handle an error in a device call. - -```erlang -error_execution(ExecGroup, Msg2, Whence, {Class, Exception, Stacktrace}, Opts) -> - Error = {error, Whence, {Class, Exception, Stacktrace}}, - hb_persistent:unregister_notify(ExecGroup, Msg2, Error, Opts), - ?event(ao_core, {handle_error, Error, {opts, Opts}}, Opts), - case hb_opts:get(error_strategy, throw, Opts) of - throw -> erlang:raise(Class, Exception, Stacktrace); - _ -> Error - end. -``` - -### maybe_force_message - -Force the result of a device call into a message if the result is not - -```erlang -maybe_force_message({Status, Res}, Opts) -> - case hb_opts:get(force_message, false, Opts) of - true -> force_message({Status, Res}, Opts); - false -> {Status, Res} - end; -``` - -### maybe_force_message - -Force the result of a device call into a message if the result is not - -```erlang -maybe_force_message(Res, Opts) -> - maybe_force_message({ok, Res}, Opts). -``` - -### force_message - -```erlang -force_message({Status, Res}, Opts) when is_list(Res) -> - force_message({Status, normalize_keys(Res, Opts)}, Opts); -``` - -### force_message - -```erlang -force_message({Status, Subres = {resolve, _}}, _Opts) -> - {Status, Subres}; -``` - -### force_message - -```erlang -force_message({Status, Literal}, _Opts) when not is_map(Literal) -> - ?event({force_message_from_literal, Literal}), - {Status, #{ <<"ao-result">> => <<"body">>, <<"body">> => Literal }}; -``` - -### force_message - -```erlang -force_message({Status, M = #{ <<"status">> := Status, <<"body">> := Body }}, _Opts) - when map_size(M) == 2 -> - ?event({force_message_from_literal_with_status, M}), - {Status, #{ - <<"status">> => Status, - <<"ao-result">> => <<"body">>, - <<"body">> => Body - }}; -``` - -### force_message - -```erlang -force_message({Status, Map}, _Opts) -> - ?event({force_message_from_map, Map}), - {Status, Map}. -``` - -### get - -Shortcut for resolving a key in a message without its status if it is - -```erlang -get(Path, Msg) -> - get(Path, Msg, #{}). -``` - -### get - -```erlang -get(Path, Msg, Opts) -> - get(Path, Msg, not_found, Opts). -``` - -### get - -```erlang -get(Path, {as, Device, Msg}, Default, Opts) -> - get( - Path, - set( - Msg, - #{ <<"device">> => Device }, - internal_opts(Opts) - ), - Default, - Opts - ); -``` - -### get - -```erlang -get(Path, Msg, Default, Opts) -> - case resolve(Msg, #{ <<"path">> => Path }, Opts#{ spawn_worker => false }) of - {ok, Value} -> Value; - {error, _} -> Default - end. -``` - -### get_first - -take a sequence of base messages and paths, then return the value of the - -```erlang -get_first(Paths, Opts) -> get_first(Paths, not_found, Opts). -``` - -### get_first - -take a sequence of base messages and paths, then return the value of the - -```erlang -get_first([], Default, _Opts) -> Default; -``` - -### get_first - -take a sequence of base messages and paths, then return the value of the - -```erlang -get_first([{Base, Path}|Msgs], Default, Opts) -> - case get(Path, Base, Opts) of - not_found -> get_first(Msgs, Default, Opts); - Value -> Value - end. -``` - -### keys - -Shortcut to get the list of keys from a message. - -```erlang -keys(Msg) -> keys(Msg, #{}). -``` - -### keys - -Shortcut to get the list of keys from a message. - -```erlang -keys(Msg, Opts) -> keys(Msg, Opts, keep). -``` - -### keys - -Shortcut to get the list of keys from a message. - -```erlang -keys(Msg, Opts, keep) -> - % There is quite a lot of AO-Core-specific machinery here. We: - % 1. `get' the keys from the message, via AO-Core in order to trigger the - % `keys' function on its device. -``` - -### keys - -```erlang -keys(Msg, Opts, remove) -> - lists:filter( - fun(Key) -> not lists:member(Key, ?AO_CORE_KEYS) end, - keys(Msg, Opts, keep) - ). -``` - -### set - -Shortcut for setting a key in the message using its underlying device. - -```erlang -set(RawMsg1, RawMsg2, Opts) when is_map(RawMsg2) -> - Msg1 = normalize_keys(RawMsg1, Opts), - Msg2 = hb_maps:without([<<"hashpath">>, <<"priv">>], normalize_keys(RawMsg2, Opts), Opts), - ?event(ao_internal, {set_called, {msg1, Msg1}, {msg2, Msg2}}, Opts), - % Get the next key to set. -``` - -### set - -```erlang -set(Msg1, Key, Value, Opts) -> - % For an individual key, we run deep_set with the key as the path. -``` - -### deep_set - -Recursively search a map, resolving keys, and set the value of the key - -```erlang -deep_set(Msg, [], Value, Opts) when is_map(Msg) or is_list(Msg) -> - device_set(Msg, <<"/">>, Value, Opts); -``` - -### deep_set - -Recursively search a map, resolving keys, and set the value of the key - -```erlang -deep_set(_Msg, [], Value, _Opts) -> - Value; -``` - -### deep_set - -Recursively search a map, resolving keys, and set the value of the key - -```erlang -deep_set(Msg, [Key], Value, Opts) -> - device_set(Msg, Key, Value, Opts); -``` - -### deep_set - -Recursively search a map, resolving keys, and set the value of the key - -```erlang -deep_set(Msg, [Key|Rest], Value, Opts) -> - case resolve(Msg, Key, Opts) of - {ok, SubMsg} -> - ?event( - {traversing_deeper_to_set, - {current_key, Key}, - {current_value, SubMsg}, - {rest, Rest} - } - ), - Res = device_set(Msg, Key, deep_set(SubMsg, Rest, Value, Opts), <<"explicit">>, Opts), - ?event({deep_set_result, {msg, Msg}, {key, Key}, {res, Res}}), - Res; - _ -> - ?event( - {creating_new_map, - {current_key, Key}, - {rest, Rest} - } - ), - Msg#{ Key => deep_set(#{}, Rest, Value, Opts) } - end. -``` - -### device_set - -Call the device's `set` function. - -```erlang -device_set(Msg, Key, Value, Opts) -> - device_set(Msg, Key, Value, <<"deep">>, Opts). -``` - -### device_set - -Call the device's `set` function. - -```erlang -device_set(Msg, Key, Value, Mode, Opts) -> - ReqWithoutMode = - case Key of - <<"path">> -> - #{ <<"path">> => <<"set_path">>, <<"value">> => Value }; - <<"/">> when is_map(Value) -> - % The value is a map and it is to be `set' at the root of the - % message. Subsequently, we call the device's `set' function - % with all of the keys found in the message, leading it to be - % merged into the message. -``` - -### remove - -Remove a key from a message, using its underlying device. - -```erlang -remove(Msg, Key) -> remove(Msg, Key, #{}). -``` - -### remove - -Remove a key from a message, using its underlying device. - -```erlang -remove(Msg, Key, Opts) -> - hb_util:ok( - resolve( - Msg, - #{ <<"path">> => <<"remove">>, <<"item">> => Key }, - internal_opts(Opts) - ), - Opts - ). -``` - -### truncate_args - -Truncate the arguments of a function to the number of arguments it - -```erlang -truncate_args(Fun, Args) -> - {arity, Arity} = erlang:fun_info(Fun, arity), - lists:sublist(Args, Arity). -``` - -### message_to_fun - -Calculate the Erlang function that should be called to get a value for - -```erlang -message_to_fun(Msg, Key, Opts) -> - % Get the device module from the message. -``` - -### message_to_device - -Extract the device module from a message. - -```erlang -message_to_device(Msg, Opts) -> - case dev_message:get(<<"device">>, Msg, Opts) of - {error, not_found} -> - % The message does not specify a device, so we use the default device. -``` - -### info_handler_to_fun - -Parse a handler key given by a device's `info`. - -```erlang -info_handler_to_fun(Handler, _Msg, _Key, _Opts) when is_function(Handler) -> - {add_key, Handler}; -``` - -### info_handler_to_fun - -Parse a handler key given by a device's `info`. - -```erlang -info_handler_to_fun(HandlerMap, Msg, Key, Opts) -> - case hb_maps:find(excludes, HandlerMap, Opts) of - {ok, Exclude} -> - case lists:member(Key, Exclude) of - true -> - {ok, MsgWithoutDevice} = - dev_message:remove(Msg, #{ item => device }, Opts), - message_to_fun( - MsgWithoutDevice#{ <<"device">> => default_module() }, - Key, - Opts - ); - false -> {add_key, hb_maps:get(func, HandlerMap, undefined, Opts)} - end; - error -> {add_key, hb_maps:get(func, HandlerMap, undefined, Opts)} - end. -``` - -### find_exported_function - -Find the function with the highest arity that has the given name, if it - -```erlang -find_exported_function(Msg, Dev, Key, MaxArity, Opts) when is_map(Dev) -> - case hb_maps:get(normalize_key(Key), normalize_keys(Dev, Opts), not_found, Opts) of - not_found -> not_found; - Fun when is_function(Fun) -> - case erlang:fun_info(Fun, arity) of - {arity, Arity} when Arity =< MaxArity -> - case is_exported(Msg, Dev, Key, Opts) of - true -> {ok, Fun}; - false -> not_found - end; - _ -> not_found - end - end; -``` - -### find_exported_function - -Find the function with the highest arity that has the given name, if it - -```erlang -find_exported_function(_Msg, _Mod, _Key, Arity, _Opts) when Arity < 0 -> - not_found; -``` - -### find_exported_function - -Find the function with the highest arity that has the given name, if it - -```erlang -find_exported_function(Msg, Mod, Key, Arity, Opts) when not is_atom(Key) -> - try hb_util:key_to_atom(Key, false) of - KeyAtom -> find_exported_function(Msg, Mod, KeyAtom, Arity, Opts) - catch _:_ -> not_found - end; -``` - -### find_exported_function - -Find the function with the highest arity that has the given name, if it - -```erlang -find_exported_function(Msg, Mod, Key, Arity, Opts) -> - case erlang:function_exported(Mod, Key, Arity) of - true -> - case is_exported(Msg, Mod, Key, Opts) of - true -> {ok, fun Mod:Key/Arity}; - false -> not_found - end; - false -> - find_exported_function(Msg, Mod, Key, Arity - 1, Opts) - end. -``` - -### is_exported - -Check if a device is guarding a key via its `exports` list. Defaults to - -```erlang -is_exported(_Msg, _Dev, info, _Opts) -> true; -``` - -### is_exported - -Check if a device is guarding a key via its `exports` list. Defaults to - -```erlang -is_exported(Msg, Dev, Key, Opts) -> - is_exported(info(Dev, Msg, Opts), Key, Opts). -``` - -### is_exported - -```erlang -is_exported(_, info, _Opts) -> true; -``` - -### is_exported - -```erlang -is_exported(Info = #{ excludes := Excludes }, Key, Opts) -> - case lists:member(normalize_key(Key), lists:map(fun normalize_key/1, Excludes)) of - true -> false; - false -> is_exported(hb_maps:remove(excludes, Info, Opts), Key, Opts) - end; -``` - -### is_exported - -```erlang -is_exported(#{ exports := Exports }, Key, _Opts) -> - lists:member(normalize_key(Key), lists:map(fun normalize_key/1, Exports)); -``` - -### is_exported - -Convert a key to a binary in normalized form. - -```erlang -is_exported(_Info, _Key, _Opts) -> true. -``` - -### normalize_key - -Convert a key to a binary in normalized form. - -```erlang -normalize_key(Key) -> normalize_key(Key, #{}). -``` - -### normalize_key - -Convert a key to a binary in normalized form. - -```erlang -normalize_key(Key, _Opts) when is_binary(Key) -> Key; -``` - -### normalize_key - -Convert a key to a binary in normalized form. - -```erlang -normalize_key(Key, _Opts) when is_atom(Key) -> atom_to_binary(Key); -``` - -### normalize_key - -Convert a key to a binary in normalized form. - -```erlang -normalize_key(Key, _Opts) when is_integer(Key) -> integer_to_binary(Key); -``` - -### normalize_key - -Convert a key to a binary in normalized form. - -```erlang -normalize_key(Key, _Opts) when is_list(Key) -> - case hb_util:is_string_list(Key) of - true -> normalize_key(list_to_binary(Key)); - false -> - iolist_to_binary( - lists:join( - <<"/">>, - lists:map(fun normalize_key/1, Key) - ) - ) - end. -``` - -### normalize_keys - -Ensure that a message is processable by the AO-Core resolver: No lists. - -```erlang -normalize_keys(Msg) -> normalize_keys(Msg, #{}). -``` - -### normalize_keys - -Ensure that a message is processable by the AO-Core resolver: No lists. - -```erlang -normalize_keys(Msg1, Opts) when is_list(Msg1) -> - normalize_keys( - hb_maps:from_list( - lists:zip( - lists:seq(1, length(Msg1)), - Msg1 - ) - ), - Opts - ); -``` - -### normalize_keys - -Ensure that a message is processable by the AO-Core resolver: No lists. - -```erlang -normalize_keys(Map, Opts) when is_map(Map) -> - hb_maps:from_list( - lists:map( - fun({Key, Value}) when is_map(Value) -> - {hb_ao:normalize_key(Key), Value}; - ({Key, Value}) -> - {hb_ao:normalize_key(Key), Value} - end, - hb_maps:to_list(Map, Opts) - ) - ); -``` - -### normalize_keys - -Ensure that a message is processable by the AO-Core resolver: No lists. -Load a device module from its name or a message ID. - -```erlang -normalize_keys(Other, _Opts) -> Other. -``` - -### load_device - -Ensure that a message is processable by the AO-Core resolver: No lists. -Load a device module from its name or a message ID. - -```erlang -load_device(Map, _Opts) when is_map(Map) -> {ok, Map}; -``` - -### load_device - -Ensure that a message is processable by the AO-Core resolver: No lists. -Load a device module from its name or a message ID. - -```erlang -load_device(ID, _Opts) when is_atom(ID) -> - try ID:module_info(), {ok, ID} - catch _:_ -> {error, not_loadable} - end; -``` - -### load_device - -Ensure that a message is processable by the AO-Core resolver: No lists. -Load a device module from its name or a message ID. - -```erlang -load_device(ID, Opts) when ?IS_ID(ID) -> - ?event(device_load, {requested_load, {id, ID}}, Opts), - case hb_opts:get(load_remote_devices, false, Opts) of - false -> - {error, remote_devices_disabled}; - true -> - ?event(device_load, {loading_from_cache, {id, ID}}, Opts), - {ok, Msg} = hb_cache:read(ID, Opts), - ?event(device_load, {received_device, {id, ID}, {msg, Msg}}, Opts), - TrustedSigners = hb_opts:get(trusted_device_signers, [], Opts), - Trusted = - lists:any( - fun(Signer) -> - lists:member(Signer, TrustedSigners) - end, - hb_message:signers(Msg, Opts) - ), - ?event(device_load, - {verifying_device_trust, - {id, ID}, - {trusted, Trusted}, - {signers, hb_message:signers(Msg, Opts)} - }, - Opts - ), - case Trusted of - false -> {error, device_signer_not_trusted}; - true -> - ?event(device_load, {loading_device, {id, ID}}, Opts), - case hb_maps:get(<<"content-type">>, Msg, undefined, Opts) of - <<"application/beam">> -> - case verify_device_compatibility(Msg, Opts) of - ok -> - ModName = - hb_util:key_to_atom( - hb_maps:get( - <<"module-name">>, - Msg, - undefined, - Opts - ), - new_atoms - ), - LoadRes = - erlang:load_module( - ModName, - hb_maps:get( - <<"body">>, - Msg, - undefined, - Opts - ) - ), - case LoadRes of - {module, _} -> - {ok, ModName}; - {error, Reason} -> - {error, {device_load_failed, Reason}} - end; - {error, Reason} -> - {error, {device_load_failed, Reason}} - end; - Other -> - {error, - {device_load_failed, - {incompatible_content_type, Other}, - {expected, <<"application/beam">>}, - {found, Other} - } - } - end - end - end; -``` - -### load_device - -Ensure that a message is processable by the AO-Core resolver: No lists. -Load a device module from its name or a message ID. - -```erlang -load_device(ID, Opts) -> - NormKey = - case is_atom(ID) of - true -> ID; - false -> normalize_key(ID) - end, - case lists:search( - fun (#{ <<"name">> := Name }) -> Name =:= NormKey end, - Preloaded = hb_opts:get(preloaded_devices, [], Opts) - ) of - false -> {error, {module_not_admissable, NormKey, Preloaded}}; - {value, #{ <<"module">> := Mod }} -> load_device(Mod, Opts) - end. -``` - -### verify_device_compatibility - -Verify that a device is compatible with the current machine. - -```erlang -verify_device_compatibility(Msg, Opts) -> - ?event(device_load, {verifying_device_compatibility, {msg, Msg}}, Opts), - Required = - lists:filtermap( - fun({<<"requires-", Key/binary>>, Value}) -> - {true, - { - hb_util:key_to_atom( - hb_ao:normalize_key(Key), - new_atoms - ), - hb_cache:ensure_loaded(Value, Opts) - } - }; - (_) -> false - end, - hb_maps:to_list(Msg, Opts) - ), - ?event(device_load, - {discerned_requirements, - {required, Required}, - {msg, Msg} - }, - Opts - ), - FailedToMatch = - lists:filtermap( - fun({Property, Value}) -> - % The values of these properties are _not_ 'keys', but we normalize - % them as such in order to make them comparable. -``` - -### info - -Get the info map for a device, optionally giving it a message if the - -```erlang -info(Msg, Opts) -> - info(message_to_device(Msg, Opts), Msg, Opts). -``` - -### info - -```erlang -info(DevMod, Msg, Opts) -> - %?event({calculating_info, {dev, DevMod}, {msg, Msg}}), - case find_exported_function(Msg, DevMod, info, 2, Opts) of - {ok, Fun} -> - Res = apply(Fun, truncate_args(Fun, [Msg, Opts])), - % ?event({ - % info_result, - % {dev, DevMod}, - % {args, truncate_args(Fun, [Msg])}, - % {result, Res} - % }), - Res; - not_found -> #{} - end. -``` - -### default_module - -The default device is the identity device, which simply returns the -The execution options that are used internally by this module - -```erlang -default_module() -> dev_message. -``` - -### internal_opts - -The default device is the identity device, which simply returns the -The execution options that are used internally by this module - -```erlang -internal_opts(Opts) -> - hb_maps:merge(Opts, #{ - topic => hb_opts:get(topic, ao_internal, Opts), - hashpath => ignore, - cache_control => [<<"no-cache">>, <<"no-store">>], - spawn_worker => false, - await_inprogress => false -``` - ---- - -*Generated from [hb_ao.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_ao.erl)* diff --git a/docs/book/src/hb_ao_test_vectors.erl.md b/docs/book/src/hb_ao_test_vectors.erl.md deleted file mode 100644 index 9a27b1619..000000000 --- a/docs/book/src/hb_ao_test_vectors.erl.md +++ /dev/null @@ -1,892 +0,0 @@ -# hb_ao_test_vectors - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_ao_test_vectors.erl) - -Uses a series of different `Opts` values to test the resolution engine's -execution under different circumstances. - ---- - -### run_test - -Uses a series of different `Opts` values to test the resolution engine's -Easy hook to make a test executable via the command line: - -```erlang -run_test() -> - multiple_as_subresolutions_test(#{}). -``` - -### suite_test_ - -Run each test in the file with each set of options. Start and reset - -```erlang -suite_test_() -> - hb_test_utils:suite_with_opts(test_suite(), test_opts()). -``` - -### benchmark_test_ - -```erlang -benchmark_test_() -> - hb_test_utils:suite_with_opts(benchmark_suite(), test_opts()). -``` - -### test_suite - -```erlang -test_suite() -> - [ - {resolve_simple, "resolve simple", - fun resolve_simple_test/1}, - {resolve_id, "resolve id", - fun resolve_id_test/1}, - {start_as, "start as", - fun start_as_test/1}, - {start_as_with_parameters, "start as with parameters", - fun start_as_with_parameters_test/1}, - {load_as, "load as", - fun load_as_test/1}, - {as_path, "as path", - fun as_path_test/1}, - {continue_as, "continue as", - fun continue_as_test/1}, - {multiple_as_subresolutions, "multiple as subresolutions", - fun multiple_as_subresolutions_test/1}, - {resolve_key_twice, "resolve key twice", - fun resolve_key_twice_test/1}, - {resolve_from_multiple_keys, "resolve from multiple keys", - fun resolve_from_multiple_keys_test/1}, - {resolve_path_element, "resolve path element", - fun resolve_path_element_test/1}, - {resolve_binary_key, "resolve binary key", - fun resolve_binary_key_test/1}, - {key_to_binary, "key to binary", - fun key_to_binary_test/1}, - {key_from_id_device_with_args, "key from id device with args", - fun key_from_id_device_with_args_test/1}, - {device_with_handler_function, "device with handler function", - fun device_with_handler_function_test/1}, - {device_with_default_handler_function, - "device with default handler function", - fun device_with_default_handler_function_test/1}, - {basic_get, "basic get", - fun basic_get_test/1}, - {recursive_get, "recursive get", - fun recursive_get_test/1}, - {deep_recursive_get, "deep recursive get", - fun deep_recursive_get_test/1}, - {basic_set, "basic set", - fun basic_set_test/1}, - {get_with_device, "get with device", - fun get_with_device_test/1}, - {get_as_with_device, "get as with device", - fun get_as_with_device_test/1}, - {set_with_device, "set with device", - fun set_with_device_test/1}, - {deep_set, "deep set", - fun deep_set_test/1}, - {deep_set_with_device, "deep set with device", - fun deep_set_with_device_test/1}, - {device_exports, "device exports", - fun device_exports_test/1}, - {device_excludes, "device excludes", - fun device_excludes_test/1}, - {denormalized_device_key, "denormalized device key", - fun denormalized_device_key_test/1}, - {list_transform, "list transform", - fun list_transform_test/1}, - {step_hook, "step hook", - fun step_hook_test/1} - ]. -``` - -### benchmark_suite - -```erlang -benchmark_suite() -> - [ - {benchmark_simple, "simple resolution benchmark", - fun benchmark_simple_test/1}, - {benchmark_multistep, "multistep resolution benchmark", - fun benchmark_multistep_test/1}, - {benchmark_get, "get benchmark", - fun benchmark_get_test/1}, - {benchmark_set, "single value set benchmark", - fun benchmark_set_test/1}, - {benchmark_set_multiple, "set two keys benchmark", - fun benchmark_set_multiple_test/1}, - {benchmark_set_multiple_deep, "set two keys deep benchmark", - fun benchmark_set_multiple_deep_test/1} - ]. -``` - -### test_opts - -```erlang -test_opts() -> - [ - #{ - name => normal, - desc => "Default opts", - opts => #{}, - skip => [] - }, - #{ - name => without_hashpath, - desc => "Default without hashpath", - opts => #{ - hashpath => ignore - }, - skip => [] - }, - #{ - name => no_cache, - desc => "No cache read or write", - opts => #{ - hashpath => ignore, - cache_control => [<<"no-cache">>, <<"no-store">>], - spawn_worker => false, - store => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/fs">> - } - }, - skip => [load_as] - }, - #{ - name => only_store, - desc => "Store, don't read", - opts => #{ - hashpath => update, - cache_control => [<<"no-cache">>], - spawn_worker => false, - store => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/fs">> - } - }, - skip => [ - denormalized_device_key, - deep_set_with_device, - load_as - ], - reset => false - }, - #{ - name => only_if_cached, - desc => "Only read, don't exec", - opts => #{ - hashpath => ignore, - cache_control => [<<"only-if-cached">>], - spawn_worker => false, - store => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/fs">> - } - }, - skip => [ - % Exclude tests that return a list on its own for now, as raw - % lists cannot be cached yet. -``` - -### exec_dummy_device - -Ensure that we can read a device from the cache then execute it. By - -```erlang -exec_dummy_device(SigningWallet, Opts) -> - % Compile the test device and store it in an accessible cache to the execution - % environment. -``` - -### load_device_test - -```erlang -load_device_test() -> - % Establish an execution environment which trusts the device author. -``` - -### untrusted_load_device_test - -```erlang -untrusted_load_device_test() -> - % Establish an execution environment which does not trust the device author. -``` - -### resolve_simple_test - -```erlang -resolve_simple_test(Opts) -> - Res = hb_ao:resolve(#{ <<"a">> => <<"RESULT">> }, <<"a">>, Opts), - ?assertEqual({ok, <<"RESULT">>}, Res). -``` - -### resolve_id_test - -```erlang -resolve_id_test(Opts) -> - ?assertMatch( - ID when byte_size(ID) == 43, - hb_ao:get(id, #{ test_key => <<"1">> }, Opts) - ). -``` - -### resolve_key_twice_test - -```erlang -resolve_key_twice_test(Opts) -> - % Ensure that the same message can be resolved again. -``` - -### resolve_from_multiple_keys_test - -```erlang -resolve_from_multiple_keys_test(Opts) -> - ?assertEqual( - {ok, [<<"a">>]}, - hb_ao:resolve(#{ <<"a">> => <<"1">>, <<"priv_a">> => <<"2">> }, <<"keys">>, Opts) - ). -``` - -### resolve_path_element_test - -```erlang -resolve_path_element_test(Opts) -> - ?assertEqual( - {ok, [<<"test_path">>]}, - hb_ao:resolve(#{ <<"path">> => [<<"test_path">>] }, <<"path">>, Opts) - ), - ?assertEqual( - {ok, [<<"a">>]}, - hb_ao:resolve(#{ <<"Path">> => [<<"a">>] }, <<"Path">>, Opts) - ). -``` - -### key_to_binary_test - -```erlang -key_to_binary_test(Opts) -> - ?assertEqual(<<"a">>, hb_ao:normalize_key(a, Opts)), - ?assertEqual(<<"a">>, hb_ao:normalize_key(<<"a">>, Opts)), - ?assertEqual(<<"a">>, hb_ao:normalize_key("a", Opts)). -``` - -### resolve_binary_key_test - -```erlang -resolve_binary_key_test(Opts) -> - ?assertEqual( - {ok, <<"RESULT">>}, - hb_ao:resolve(#{ a => <<"RESULT">> }, <<"a">>, Opts) - ), - ?assertEqual( - {ok, <<"1">>}, - hb_ao:resolve( - #{ - <<"Test-Header">> => <<"1">> - }, - <<"Test-Header">>, - Opts - ) - ). -``` - -### generate_device_with_keys_using_args - -Generates a test device with three keys, each of which uses - -```erlang -generate_device_with_keys_using_args() -> - #{ - key_using_only_state => - fun(State) -> - {ok, - <<(hb_maps:get(<<"state_key">>, State))/binary>> - } - end, - key_using_state_and_msg => - fun(State, Msg) -> - {ok, - << - (hb_maps:get(<<"state_key">>, State))/binary, - (hb_maps:get(<<"msg_key">>, Msg))/binary - >> - } - end, - key_using_all => - fun(State, Msg, Opts) -> - {ok, - << - (hb_maps:get(<<"state_key">>, State, undefined, Opts))/binary, - (hb_maps:get(<<"msg_key">>, Msg, undefined, Opts))/binary, - (hb_maps:get(<<"opts_key">>, Opts, undefined, Opts))/binary - >> - } - end - }. -``` - -### gen_default_device - -Create a simple test device that implements the default handler. - -```erlang -gen_default_device() -> - #{ - info => - fun() -> - #{ - default => - fun(_, _State) -> - {ok, <<"DEFAULT">>} - end - } - end, - <<"state_key">> => - fun(_) -> - {ok, <<"STATE">>} - end - }. -``` - -### gen_handler_device - -Create a simple test device that implements the handler key. - -```erlang -gen_handler_device() -> - #{ - info => - fun() -> - #{ - handler => - fun(<<"set">>, M1, M2, Opts) -> - dev_message:set(M1, M2, Opts); - (_, _, _, _) -> - {ok, <<"HANDLER VALUE">>} - end - } - end - }. -``` - -### key_from_id_device_with_args_test - -Test that arguments are passed to a device key as expected. - -```erlang -key_from_id_device_with_args_test(Opts) -> - Msg = - #{ - device => generate_device_with_keys_using_args(), - state_key => <<"1">> - }, - ?assertEqual( - {ok, <<"1">>}, - hb_ao:resolve( - Msg, - #{ - <<"path">> => <<"key_using_only_state">>, - <<"msg_key">> => <<"2">> % Param message, which is ignored - }, - Opts - ) - ), - ?assertEqual( - {ok, <<"13">>}, - hb_ao:resolve( - Msg, - #{ - <<"path">> => <<"key_using_state_and_msg">>, - <<"msg_key">> => <<"3">> % Param message, with value to add - }, - Opts - ) - ), - ?assertEqual( - {ok, <<"1337">>}, - hb_ao:resolve( - Msg, - #{ - <<"path">> => <<"key_using_all">>, - <<"msg_key">> => <<"3">> % Param message - }, - Opts#{ - <<"opts_key">> => <<"37">>, - <<"cache_control">> => [<<"no-cache">>, <<"no-store">>] - } - ) - ). -``` - -### device_with_handler_function_test - -```erlang -device_with_handler_function_test(Opts) -> - Msg = - #{ - device => gen_handler_device(), - test_key => <<"BAD">> - }, - ?assertEqual( - {ok, <<"HANDLER VALUE">>}, - hb_ao:resolve(Msg, <<"test_key">>, Opts) - ). -``` - -### device_with_default_handler_function_test - -```erlang -device_with_default_handler_function_test(Opts) -> - Msg = - #{ - device => gen_default_device() - }, - ?assertEqual( - {ok, <<"STATE">>}, - hb_ao:resolve(Msg, <<"state_key">>, Opts) - ), - ?assertEqual( - {ok, <<"DEFAULT">>}, - hb_ao:resolve(Msg, <<"any_random_key">>, Opts) - ). -``` - -### basic_get_test - -```erlang -basic_get_test(Opts) -> - Msg = #{ <<"key1">> => <<"value1">>, <<"key2">> => <<"value2">> }, - ?assertEqual(<<"value1">>, hb_ao:get(<<"key1">>, Msg, Opts)), - ?assertEqual(<<"value2">>, hb_ao:get(<<"key2">>, Msg, Opts)), - ?assertEqual(<<"value2">>, hb_ao:get(<<"key2">>, Msg, Opts)), - ?assertEqual(<<"value2">>, hb_ao:get([<<"key2">>], Msg, Opts)). -``` - -### recursive_get_test - -```erlang -recursive_get_test(Opts) -> - Msg = #{ - <<"key1">> => <<"value1">>, - <<"key2">> => #{ - <<"key3">> => <<"value3">>, - <<"key4">> => #{ - <<"key5">> => <<"value5">>, - <<"key6">> => #{ - <<"key7">> => <<"value7">> - } - } - } - }, - ?assertEqual( - {ok, <<"value1">>}, - hb_ao:resolve(Msg, #{ <<"path">> => <<"key1">> }, Opts) - ), - ?assertEqual(<<"value1">>, hb_ao:get(<<"key1">>, Msg, Opts)), - ?assertEqual( - {ok, <<"value3">>}, - hb_ao:resolve(Msg, #{ <<"path">> => [<<"key2">>, <<"key3">>] }, Opts) - ), - ?assertEqual(<<"value3">>, hb_ao:get([<<"key2">>, <<"key3">>], Msg, Opts)), - ?assertEqual(<<"value3">>, hb_ao:get(<<"key2/key3">>, Msg, Opts)). -``` - -### deep_recursive_get_test - -```erlang -deep_recursive_get_test(Opts) -> - Msg = #{ - <<"key1">> => <<"value1">>, - <<"key2">> => #{ - <<"key3">> => <<"value3">>, - <<"key4">> => #{ - <<"key5">> => <<"value5">>, - <<"key6">> => #{ - <<"key7">> => <<"value7">> - } - } - } - }, - ?assertEqual(<<"value7">>, hb_ao:get(<<"key2/key4/key6/key7">>, Msg, Opts)). -``` - -### basic_set_test - -```erlang -basic_set_test(Opts) -> - Msg = #{ <<"key1">> => <<"value1">>, <<"key2">> => <<"value2">> }, - UpdatedMsg = hb_ao:set(Msg, #{ <<"key1">> => <<"new_value1">> }, Opts), - ?event({set_key_complete, {key, <<"key1">>}, {value, <<"new_value1">>}}), - ?assertEqual(<<"new_value1">>, hb_ao:get(<<"key1">>, UpdatedMsg, Opts)), - ?assertEqual(<<"value2">>, hb_ao:get(<<"key2">>, UpdatedMsg, Opts)). -``` - -### get_with_device_test - -```erlang -get_with_device_test(Opts) -> - Msg = - #{ - <<"device">> => generate_device_with_keys_using_args(), - <<"state_key">> => <<"STATE">> - }, - ?assertEqual(<<"STATE">>, hb_ao:get(<<"state_key">>, Msg, Opts)), - ?assertEqual(<<"STATE">>, hb_ao:get(<<"key_using_only_state">>, Msg, Opts)). -``` - -### get_as_with_device_test - -```erlang -get_as_with_device_test(Opts) -> - Msg = - #{ - <<"device">> => gen_handler_device(), - <<"test_key">> => <<"ACTUAL VALUE">> - }, - ?assertEqual( - <<"HANDLER VALUE">>, - hb_ao:get(test_key, Msg, Opts) - ), - ?assertEqual( - <<"ACTUAL VALUE">>, - hb_ao:get(test_key, {as, dev_message, Msg}, Opts) - ). -``` - -### set_with_device_test - -```erlang -set_with_device_test(Opts) -> - Msg = - #{ - <<"device">> => - #{ - <<"set">> => - fun(State, _Msg) -> - Acc = hb_maps:get(<<"set_count">>, State, <<"">>, Opts), - {ok, - State#{ - <<"set_count">> => << Acc/binary, "." >> - } - } - end - }, - <<"state_key">> => <<"STATE">> - }, - ?assertEqual(<<"STATE">>, hb_ao:get(<<"state_key">>, Msg, Opts)), - SetOnce = hb_ao:set(Msg, #{ <<"state_key">> => <<"SET_ONCE">> }, Opts), - ?assertEqual(<<".">>, hb_ao:get(<<"set_count">>, SetOnce, Opts)), - SetTwice = hb_ao:set(SetOnce, #{ <<"state_key">> => <<"SET_TWICE">> }, Opts), - ?assertEqual(<<"..">>, hb_ao:get(<<"set_count">>, SetTwice, Opts)), - ?assertEqual(<<"STATE">>, hb_ao:get(<<"state_key">>, SetTwice, Opts)). -``` - -### deep_set_test - -```erlang -deep_set_test(Opts) -> - % First validate second layer changes are handled correctly. -``` - -### deep_set_new_messages_test - -```erlang -deep_set_new_messages_test() -> - Opts = hb_maps:get(opts, hd(test_opts())), - % Test that new messages are created when the path does not exist. -``` - -### deep_set_with_device_test - -```erlang -deep_set_with_device_test(Opts) -> - Device = #{ - set => - fun(Msg1, Msg2) -> - % A device where the set function modifies the key - % and adds a modified flag. -``` - -### device_exports_test - -```erlang -device_exports_test(Opts) -> - Msg = #{ <<"device">> => dev_message }, - ?assert(hb_ao:is_exported(Msg, dev_message, info, Opts)), - ?assert(hb_ao:is_exported(Msg, dev_message, set, Opts)), - ?assert( - hb_ao:is_exported( - Msg, - dev_message, - not_explicitly_exported, - Opts - ) - ), - Dev = #{ - info => fun() -> #{ exports => [set] } end, - set => fun(_, _) -> {ok, <<"SET">>} end - }, - Msg2 = #{ <<"device">> => Dev }, - ?assert(hb_ao:is_exported(Msg2, Dev, info, Opts)), - ?assert(hb_ao:is_exported(Msg2, Dev, set, Opts)), - ?assert(not hb_ao:is_exported(Msg2, Dev, not_exported, Opts)), - Dev2 = #{ - info => - fun() -> - #{ - exports => [test1, <<"test2">>], - handler => - fun() -> - {ok, <<"Handler-Value">>} - end - } - end - }, - Msg3 = #{ <<"device">> => Dev2, <<"test1">> => <<"BAD1">>, <<"test3">> => <<"GOOD3">> }, - ?assertEqual(<<"Handler-Value">>, hb_ao:get(<<"test1">>, Msg3, Opts)), - ?assertEqual(<<"Handler-Value">>, hb_ao:get(<<"test2">>, Msg3, Opts)), - ?assertEqual(<<"GOOD3">>, hb_ao:get(<<"test3">>, Msg3, Opts)), - ?assertEqual(<<"GOOD4">>, - hb_ao:get( - <<"test4">>, - hb_ao:set(Msg3, <<"test4">>, <<"GOOD4">>, Opts) - ) - ), - ?assertEqual(not_found, hb_ao:get(<<"test5">>, Msg3, Opts)). -``` - -### device_excludes_test - -```erlang -device_excludes_test(Opts) -> - % Create a device that returns an identifiable message for any key, but also - % sets excludes to [set], such that the message can be modified using the - % default handler. -``` - -### denormalized_device_key_test - -```erlang -denormalized_device_key_test(Opts) -> - Msg = #{ <<"device">> => dev_test }, - ?assertEqual(dev_test, hb_ao:get(device, Msg, Opts)), - ?assertEqual(dev_test, hb_ao:get(<<"device">>, Msg, Opts)), - ?assertEqual({module, dev_test}, - erlang:fun_info( - element(3, hb_ao:message_to_fun(Msg, test_func, Opts)), - module - ) - ). -``` - -### list_transform_test - -```erlang -list_transform_test(Opts) -> - Msg = [<<"A">>, <<"B">>, <<"C">>, <<"D">>, <<"E">>], - ?assertEqual(<<"A">>, hb_ao:get(1, Msg, Opts)), - ?assertEqual(<<"B">>, hb_ao:get(2, Msg, Opts)), - ?assertEqual(<<"C">>, hb_ao:get(3, Msg, Opts)), - ?assertEqual(<<"D">>, hb_ao:get(4, Msg, Opts)), - ?assertEqual(<<"E">>, hb_ao:get(5, Msg, Opts)). -``` - -### start_as_test - -```erlang -start_as_test(Opts) -> - ?assertEqual( - {ok, <<"GOOD_FUNCTION">>}, - hb_ao:resolve_many( - [ - {as, <<"test-device@1.0">>, #{ <<"path">> => <<>> }}, - #{ <<"path">> => <<"test_func">> } - ], - Opts - ) - ). -``` - -### start_as_with_parameters_test - -```erlang -start_as_with_parameters_test(Opts) -> - % Resolve a key on a message that has its device set with `as'. -``` - -### load_as_test - -```erlang -load_as_test(Opts) -> - % Load a message as a device with the `as' keyword. -``` - -### as_path_test - -```erlang -as_path_test(Opts) -> - % Create a message with the test device, which implements the test_func - % function. It normally returns `GOOD_FUNCTION'. -``` - -### continue_as_test - -```erlang -continue_as_test(Opts) -> - % Resolve a list of messages in sequence, swapping the device in the middle. -``` - -### multiple_as_subresolutions_test - -```erlang -multiple_as_subresolutions_test(Opts) -> - % Test that multiple as subresolutions in a sequence are handled correctly. -``` - -### step_hook_test - -```erlang -step_hook_test(InitOpts) -> - % Test that the step hook is called correctly. We do this by sending ourselves - % a message each time the hook is called. We also send a `reference', such - % that this test is uniquely identified and further/prior tests do not affect - % it. -``` - -### benchmark_simple_test - -```erlang -benchmark_simple_test(Opts) -> - Time = - hb_test_utils:benchmark_iterations( - fun(I) -> hb_ao:resolve(#{ <<"a">> => I }, <<"a">>, Opts) end, - ?BENCHMARK_ITERATIONS - ), - hb_test_utils:benchmark_print( - <<"Single-step resolutions:">>, - ?BENCHMARK_ITERATIONS, - Time - ). -``` - -### benchmark_multistep_test - -```erlang -benchmark_multistep_test(Opts) -> - Time = - hb_test_utils:benchmark_iterations( - fun(I) -> - hb_ao:resolve( - #{ - <<"iteration">> => I, - <<"a">> => #{ - <<"b">> => #{ <<"return">> => I } - } - }, - <<"a/b/return">>, - Opts - ) - end, - ?BENCHMARK_ITERATIONS - ), - hb_test_utils:benchmark_print( - <<"Multistep resolutions:">>, - ?BENCHMARK_ITERATIONS, - Time - ). -``` - -### benchmark_get_test - -```erlang -benchmark_get_test(Opts) -> - Time = - hb_test_utils:benchmark_iterations( - fun(I) -> - hb_ao:get( - <<"a">>, - #{ <<"a">> => <<"1">>, <<"iteration">> => I }, - Opts - ) - end, - ?BENCHMARK_ITERATIONS - ), - hb_test_utils:benchmark_print( - <<"Get operations:">>, - ?BENCHMARK_ITERATIONS, - Time - ). -``` - -### benchmark_set_test - -```erlang -benchmark_set_test(Opts) -> - Time = - hb_test_utils:benchmark_iterations( - fun(I) -> - hb_ao:set( - #{ <<"a">> => <<"1">>, <<"iteration">> => I }, - <<"a">>, - <<"2">>, - Opts - ) - end, - ?BENCHMARK_ITERATIONS - ), - hb_test_utils:benchmark_print( - <<"Single value set operations:">>, - ?BENCHMARK_ITERATIONS, - Time - ). -``` - -### benchmark_set_multiple_test - -```erlang -benchmark_set_multiple_test(Opts) -> - Time = - hb_test_utils:benchmark_iterations( - fun(I) -> - hb_ao:set( - #{ <<"a">> => <<"1">>, <<"iteration">> => I }, - #{ <<"a">> => <<"1a">>, <<"b">> => <<"2">> }, - Opts - ) - end, - ?BENCHMARK_ITERATIONS - ), - hb_test_utils:benchmark_print( - <<"Set two keys operations:">>, - ?BENCHMARK_ITERATIONS, - Time - ). -``` - -### benchmark_set_multiple_deep_test - -```erlang -benchmark_set_multiple_deep_test(Opts) -> - Time = - hb_test_utils:benchmark_iterations( - fun(I) -> - hb_ao:set( - #{ <<"a">> => #{ <<"b">> => <<"1">> } }, - #{ <<"a">> => #{ <<"b">> => <<"2">>, <<"c">> => I } }, - Opts - ) - end, - ?BENCHMARK_ITERATIONS - ), - hb_test_utils:benchmark_print( - <<"Set two keys operations:">>, - ?BENCHMARK_ITERATIONS, - Time -``` - ---- - -*Generated from [hb_ao_test_vectors.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_ao_test_vectors.erl)* diff --git a/docs/book/src/hb_app.erl.md b/docs/book/src/hb_app.erl.md deleted file mode 100644 index 5ad52cd50..000000000 --- a/docs/book/src/hb_app.erl.md +++ /dev/null @@ -1,37 +0,0 @@ -# hb_app - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_app.erl) - -The main HyperBEAM application module. - ---- - -## Exported Functions - -- `start/2` -- `stop/1` - ---- - -### start - -The main HyperBEAM application module. - -```erlang -start(_StartType, _StartArgs) -> - hb:init(), - hb_sup:start_link(), - ok = dev_scheduler_registry:start(), - _TimestampServer = ar_timestamp:start(), - {ok, _} = hb_http_server:start(). -``` - -### stop - -```erlang -stop(_State) -> -``` - ---- - -*Generated from [hb_app.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_app.erl)* diff --git a/docs/book/src/hb_beamr.erl.md b/docs/book/src/hb_beamr.erl.md deleted file mode 100644 index bc2f18a17..000000000 --- a/docs/book/src/hb_beamr.erl.md +++ /dev/null @@ -1,443 +0,0 @@ -# hb_beamr - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_beamr.erl) - -BEAMR: A WAMR wrapper for BEAM. -Beamr is a library that allows you to run WASM modules in BEAM, using the -Webassembly Micro Runtime (WAMR) as its engine. Each WASM module is -executed using a Linked-In Driver (LID) that is loaded into BEAM. It is -designed with a focus on supporting long-running WASM executions that -interact with Erlang functions and processes easily. -Because each WASM module runs as an independent async worker, if you plan -to run many instances in parallel, you should be sure to configure the -BEAM to have enough async worker threads enabled (see `erl +A N` in the -Erlang manuals). -The core API is simple: -
-    start(WasmBinary) -> {ok, Port, Imports, Exports}
-        Where:
-            WasmBinary is the WASM binary to load.
-            Port is the port to the LID.
-            Imports is a list of tuples of the form {Module, Function,
-                Args, Signature}.
-            Exports is a list of tuples of the form {Function, Args,
-                Signature}.
-    stop(Port) -> ok
-    call(Port, FunctionName, Args) -> {ok, Result}
-        Where:
-            FunctionName is the name of the function to call.
-            Args is a list of Erlang terms (converted to WASM values by
-                BEAMR) that match the signature of the function.
-            Result is a list of Erlang terms (converted from WASM values).
-    call(Port, FunName, Args[, Import, State, Opts]) -> {ok, Res, NewState}
-        Where:
-            ImportFun is a function that will be called upon each import.
-            ImportFun must have an arity of 2: Taking an arbitrary `state`
-            term, and a map containing the `port`, `module`, `func`, `args`,
-            `signature`, and the `options` map of the import.
-            It must return a tuple of the form {ok, Response, NewState}.
-    serialize(Port) -> {ok, Mem}
-        Where:
-            Port is the port to the LID.
-            Mem is a binary representing the full WASM state.
-    deserialize(Port, Mem) -> ok
-        Where:
-            Port is the port to the LID.
-            Mem is a binary output of a previous `serialize/1` call.
-
-BEAMR was designed for use in the HyperBEAM project, but is suitable for -deployment in other Erlang applications that need to run WASM modules. PRs -are welcome. - ---- - -## Exported Functions - -- `call/3` -- `call/4` -- `call/5` -- `call/6` -- `deserialize/2` -- `serialize/1` -- `start/1` -- `start/2` -- `stop/1` -- `stub/3` -- `wasm_send/2` - ---- - -### load_driver - -BEAMR: A WAMR wrapper for BEAM. -Load the driver for the WASM executor. - -```erlang -load_driver() -> - case erl_ddll:load(code:priv_dir(hb), ?MODULE) of - ok -> ok; - {error, already_loaded} -> ok; - {error, Error} -> {error, Error} - end. -``` - -### start - -Start a WASM executor context. Yields a port to the LID, and the - -```erlang -start(WasmBinary) when is_binary(WasmBinary) -> - start(WasmBinary, wasm). -``` - -### start - -```erlang -start(WasmBinary, Mode) when is_binary(WasmBinary) -> - ?event({loading_module, {bytes, byte_size(WasmBinary)}, Mode}), - Self = self(), - WASM = spawn( - fun() -> - ok = load_driver(), - Port = open_port({spawn, "hb_beamr"}, []), - Port ! {self(), {command, term_to_binary({init, WasmBinary, Mode})}}, - ?event({waiting_for_init_from, Port}), - worker(Port, Self) - end - ), - receive - {execution_result, Imports, Exports} -> - ?event( - {wasm_init_success, - {imports, Imports}, - {exports, Exports}}), - {ok, WASM, Imports, Exports}; - {error, Error} -> - ?event({wasm_init_error, Error}), - stop(WASM), - {error, Error} - end. -``` - -### worker - -A worker process that is responsible for handling a WASM instance. - -```erlang -worker(Port, Listener) -> - receive - stop -> - ?event({stop_invoked_for_beamr, self()}), - case erlang:port_info(Port, id) of - undefined -> - ok; - _ -> - port_close(Port), - ok - end, - ok; - {wasm_send, NewListener, Message} -> - ?event({wasm_send, {listener, NewListener}, {message, Message}}), - Port ! {self(), Message}, - worker(Port, NewListener); - WASMResult -> - ?event({wasm_result, {listener, Listener}, {result, WASMResult}}), - Listener ! WASMResult, - worker(Port, Listener) - end. -``` - -### wasm_send - -```erlang -wasm_send(WASM, Message) when is_pid(WASM) -> - WASM ! {wasm_send, self(), Message}, - ok. -``` - -### stop - -Stop a WASM executor context. - -```erlang -stop(WASM) when is_pid(WASM) -> - WASM ! stop, - ok. -``` - -### call - -Call a function in the WASM executor (see moduledoc for more details). - -```erlang -call(PID, FuncRef, Args) -> - {ok, Res, _} = call(PID, FuncRef, Args, fun stub/3), - {ok, Res}. -``` - -### call - -```erlang -call(PID, FuncRef, Args, ImportFun) -> - call(PID, FuncRef, Args, ImportFun, #{}). -``` - -### call - -```erlang -call(PID, FuncRef, Args, ImportFun, StateMsg) -> - call(PID, FuncRef, Args, ImportFun, StateMsg, #{}). -``` - -### call - -```erlang -call(PID, FuncRef, Args, ImportFun, StateMsg, Opts) - when is_binary(FuncRef) -> - call(PID, binary_to_list(FuncRef), Args, ImportFun, StateMsg, Opts); -``` - -### call - -```erlang -call(WASM, FuncRef, Args, ImportFun, StateMsg, Opts) - when is_pid(WASM) - andalso (is_list(FuncRef) or is_integer(FuncRef)) - andalso is_list(Args) - andalso is_function(ImportFun) - andalso is_map(Opts) -> - case is_valid_arg_list(Args) of - true -> - ?event( - {call_started, - WASM, - FuncRef, - Args, - ImportFun, - StateMsg, - Opts}), - wasm_send(WASM, - {command, - term_to_binary( - case is_integer(FuncRef) of - true -> {indirect_call, FuncRef, Args}; - false -> {call, FuncRef, Args} - end - ) - } - ), - ?event({waiting_for_call_result, self(), WASM}), - monitor_call(WASM, ImportFun, StateMsg, Opts); - false -> - {error, {invalid_args, Args}} - end. -``` - -### stub - -Stub import function for the WASM executor. - -```erlang -stub(Msg1, _Msg2, _Opts) -> - ?event(stub_stdlib_called), - {ok, [0], Msg1}. -``` - -### monitor_call - -Synchonously monitor the WASM executor for a call result and any - -```erlang -monitor_call(WASM, ImportFun, StateMsg, Opts) -> - receive - {execution_result, Result} -> - ?event({call_result, Result}), - {ok, Result, StateMsg}; - {import, Module, Func, Args, Signature} -> - ?event({import_called, Module, Func, Args, Signature}), - try - {ok, Res, StateMsg2} = - ImportFun(StateMsg, - #{ - instance => WASM, - module => Module, - func => Func, - args => Args, - func_sig => Signature - }, - Opts - ), - ?event({import_ret, Module, Func, {args, Args}, {res, Res}}), - dispatch_response(WASM, Res), - monitor_call(WASM, ImportFun, StateMsg2, Opts) - catch - Err:Reason:Stack -> - % Signal the WASM executor to stop. -``` - -### dispatch_response - -Check the type of an import response and dispatch it to a Beamr port. - -```erlang -dispatch_response(WASM, Term) when is_pid(WASM) -> - case is_valid_arg_list(Term) of - true -> - wasm_send(WASM, - {command, term_to_binary({import_response, Term})}); - false -> - throw({error, {invalid_response, Term}}) - end; -``` - -### dispatch_response - -Check the type of an import response and dispatch it to a Beamr port. - -```erlang -dispatch_response(_WASM, Term) -> - throw({error, {invalid_response, Term}}). -``` - -### is_valid_arg_list - -Check that a list of arguments is valid for a WASM function call. - -```erlang -is_valid_arg_list(Args) when is_list(Args) -> - lists:all(fun(Arg) -> is_integer(Arg) or is_float(Arg) end, Args); -``` - -### is_valid_arg_list - -Check that a list of arguments is valid for a WASM function call. - -```erlang -is_valid_arg_list(_) -> - false. -``` - -### serialize - -Serialize the WASM state to a binary. - -```erlang -serialize(WASM) when is_pid(WASM) -> - ?event(starting_serialize), - {ok, Size} = hb_beamr_io:size(WASM), - ?event({image_size, Size}), - {ok, Mem} = hb_beamr_io:read(WASM, 0, Size), - ?event({finished_serialize, byte_size(Mem)}), - {ok, Mem}. -``` - -### deserialize - -Deserialize a WASM state from a binary. - -```erlang -deserialize(WASM, Bin) when is_pid(WASM) andalso is_binary(Bin) -> - ?event(starting_deserialize), - Res = hb_beamr_io:write(WASM, 0, Bin), - ?event({finished_deserialize, Res}), - ok. -``` - -### driver_loads_test - -```erlang -driver_loads_test() -> - ?assertEqual(ok, load_driver()). -``` - -### simple_wasm_test - -Test standalone `hb_beamr` correctly after loading a WASM module. - -```erlang -simple_wasm_test() -> - {ok, File} = file:read_file("test/test.wasm"), - {ok, WASM, _Imports, _Exports} = start(File), - {ok, [Result]} = call(WASM, "fac", [5.0]), - ?assertEqual(120.0, Result). -``` - -### imported_function_test - -Test that imported functions can be called from the WASM module. - -```erlang -imported_function_test() -> - {ok, File} = file:read_file("test/pow_calculator.wasm"), - {ok, WASM, _Imports, _Exports} = start(File), - {ok, [Result], _} = - call(WASM, <<"pow">>, [2, 5], - fun(Msg1, #{ args := [Arg1, Arg2] }, _Opts) -> - {ok, [Arg1 * Arg2], Msg1} - end), - ?assertEqual(32, Result). -``` - -### wasm64_test - -Test that WASM Memory64 modules load and execute correctly. - -```erlang -wasm64_test() -> - {ok, File} = file:read_file("test/test-64.wasm"), - {ok, WASM, _ImportMap, _Exports} = start(File), - {ok, [Result]} = call(WASM, "fac", [5.0]), - ?assertEqual(120.0, Result). -``` - -### multiclient_test - -Ensure that processes outside of the initial one can interact with - -```erlang -multiclient_test() -> - Self = self(), - ExecPID = spawn(fun() -> - receive {wasm, WASM} -> - {ok, [Result]} = call(WASM, "fac", [5.0]), - Self ! {result, Result} - end - end), - _StartPID = spawn(fun() -> - {ok, File} = file:read_file("test/test.wasm"), - {ok, WASM, _ImportMap, _Exports} = start(File), - ExecPID ! {wasm, WASM} - end), - receive - {result, Result} -> - ?assertEqual(120.0, Result) - end. -``` - -### benchmark_test - -```erlang -benchmark_test() -> - BenchTime = 1, - {ok, File} = file:read_file("test/test-64.wasm"), - {ok, WASM, _ImportMap, _Exports} = start(File), - Iterations = hb_test_utils:benchmark( - fun() -> - {ok, [Result]} = call(WASM, "fac", [5.0]), - ?assertEqual(120.0, Result) - end, - BenchTime - ), - ?event(benchmark, {scheduled, Iterations}), - ?assert(Iterations > 1000), - hb_test_utils:benchmark_print( - <<"Direct beamr: Executed">>, - <<"calls">>, - Iterations, - BenchTime - ), -``` - ---- - -*Generated from [hb_beamr.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_beamr.erl)* diff --git a/docs/book/src/hb_beamr_io.erl.md b/docs/book/src/hb_beamr_io.erl.md deleted file mode 100644 index 181ac9e38..000000000 --- a/docs/book/src/hb_beamr_io.erl.md +++ /dev/null @@ -1,238 +0,0 @@ -# hb_beamr_io - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_beamr_io.erl) - -Simple interface for memory management for Beamr instances. -It allows for reading and writing to memory, as well as allocating and -freeing memory by calling the WASM module's exported malloc and free -functions. -Unlike the majority of HyperBEAM modules, this module takes a defensive -approach to type checking, breaking from the conventional Erlang style, -such that failures are caught in the Erlang-side of functions rather than -in the C/WASM-side. - ---- - -## Exported Functions - -- `free/2` -- `malloc/2` -- `read_string/2` -- `read/3` -- `size/1` -- `write_string/2` -- `write/3` - ---- - -### size - -Simple interface for memory management for Beamr instances. -Get the size (in bytes) of the native memory allocated in the Beamr - -```erlang -size(WASM) when is_pid(WASM) -> - hb_beamr:wasm_send(WASM, {command, term_to_binary({size})}), - receive - {execution_result, Size} -> - {ok, Size} - end. -``` - -### write - -Write a binary to the Beamr instance's native memory at a given offset. - -```erlang -write(WASM, Offset, Data) - when is_pid(WASM) - andalso is_binary(Data) - andalso is_integer(Offset) -> - ?event(writing_to_mem), - hb_beamr:wasm_send(WASM, {command, term_to_binary({write, Offset, Data})}), - ?event(mem_written), - receive - ok -> ok; - {error, Error} -> {error, Error} - end. -``` - -### write_string - -Simple helper function to allocate space for (via malloc) and write a - -```erlang -write_string(WASM, Data) when is_pid(WASM) andalso is_list(Data) -> - write_string(WASM, iolist_to_binary(Data)); -``` - -### write_string - -Simple helper function to allocate space for (via malloc) and write a - -```erlang -write_string(WASM, Data) when is_pid(WASM) andalso is_binary(Data) -> - DataSize = byte_size(Data) + 1, - String = <>, - case malloc(WASM, DataSize) of - {ok, Ptr} -> - case write(WASM, Ptr, String) of - ok -> {ok, Ptr}; - {error, Error} -> {error, Error} - end; - Error -> Error - end. -``` - -### read - -Read a binary from the Beamr instance's native memory at a given offset - -```erlang -read(WASM, Offset, Size) - when is_pid(WASM) - andalso is_integer(Offset) - andalso is_integer(Size) -> - ?event({read_request, {port, WASM}, {location, Offset}, {size, Size}}), - hb_beamr:wasm_send(WASM, {command, term_to_binary({read, Offset, Size})}), - ?event(read_req_sent), - receive - {execution_result, Result} -> - ?event( - {read_result, - {wasm, WASM}, - {location, Offset}, - {size, Size}, - {result, Result}}), - {ok, Result}; - {error, Error} -> - {error, Error} - end. -``` - -### read_string - -Simple helper function to read a string from the Beamr instance's native - -```erlang -read_string(Port, Offset) -> read_string(Port, Offset, 8). -``` - -### read_string - -Simple helper function to read a string from the Beamr instance's native - -```erlang -read_string(WASM, Offset, ChunkSize) - when is_pid(WASM) - andalso is_integer(Offset) - andalso is_integer(ChunkSize) -> - {ok, iolist_to_binary(do_read_string(WASM, Offset, ChunkSize))}. -``` - -### do_read_string - -```erlang -do_read_string(WASM, Offset, ChunkSize) -> - {ok, Data} = read(WASM, Offset, ChunkSize), - case binary:split(Data, [<<0>>]) of - [Data|[]] -> [Data|do_read_string(WASM, Offset + ChunkSize, ChunkSize)]; - [FinalData|_Remainder] -> [FinalData] - end. -``` - -### malloc - -Allocate space for (via an exported malloc function from the WASM) in - -```erlang -malloc(WASM, Size) when is_pid(WASM) andalso is_integer(Size) -> - case hb_beamr:call(WASM, "malloc", [Size]) of - {ok, [0]} -> - ?event({malloc_failed, Size}), - {error, malloc_failed}; - {ok, [Ptr]} -> - ?event({malloc_success, Ptr, Size}), - {ok, Ptr}; - {error, Error} -> - {error, Error} - end. -``` - -### free - -Free space allocated in the Beamr instance's native memory via a - -```erlang -free(WASM, Ptr) when is_pid(WASM) andalso is_integer(Ptr) -> - case hb_beamr:call(WASM, "free", [Ptr]) of - {ok, Res} -> - ?event({free_result, Res}), - ok; - {error, Error} -> - {error, Error} - end. -``` - -### size_test - -```erlang -size_test() -> - WASMPageSize = 65536, - File1Pages = 1, - File2Pages = 193, - {ok, File} = file:read_file("test/test-print.wasm"), - {ok, WASM, _Imports, _Exports} = hb_beamr:start(File), - ?assertEqual({ok, WASMPageSize * File1Pages}, hb_beamr_io:size(WASM)), - hb_beamr:stop(WASM), - {ok, File2} = file:read_file("test/aos-2-pure-xs.wasm"), - {ok, WASM2, _Imports2, _Exports2} = hb_beamr:start(File2), - ?assertEqual({ok, WASMPageSize * File2Pages}, hb_beamr_io:size(WASM2)), - hb_beamr:stop(WASM2). -``` - -### write_test - -Test writing memory in and out of bounds. - -```erlang -write_test() -> - % Load the `test-print' WASM module, which has a simple print function. -``` - -### read_test - -Test reading memory in and out of bounds. - -```erlang -read_test() -> - % Our `test-print' module is hand-written in WASM, so we know that it - % has a `Hello, World!` string at precisely offset 66. -``` - -### malloc_test - -Test allocating and freeing memory. - -```erlang -malloc_test() -> - {ok, File} = file:read_file("test/test-calling.wasm"), - {ok, WASM, _Imports, _Exports} = hb_beamr:start(File), - % Check that we can allocate memory inside the bounds of the WASM module. -``` - -### string_write_and_read_test - -Write and read strings to memory. - -```erlang -string_write_and_read_test() -> - {ok, File} = file:read_file("test/test-calling.wasm"), - {ok, WASM, _Imports, _Exports} = hb_beamr:start(File), - {ok, Ptr} = write_string(WASM, <<"Hello, World!">>), - ?assertEqual({ok, <<"Hello, World!">>}, read_string(WASM, Ptr)). -``` - ---- - -*Generated from [hb_beamr_io.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_beamr_io.erl)* diff --git a/docs/book/src/hb_cache.erl.md b/docs/book/src/hb_cache.erl.md deleted file mode 100644 index 356becb5d..000000000 --- a/docs/book/src/hb_cache.erl.md +++ /dev/null @@ -1,967 +0,0 @@ -# hb_cache - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache.erl) - -A cache of AO-Core protocol messages and compute results. -HyperBEAM stores all paths in key value stores, abstracted by the `hb_store` -module. Each store has its own storage backend, but each works with simple -key-value pairs. Each store can write binary keys at paths, and link between -paths. -There are three layers to HyperBEAMs internal data representation on-disk: -1. The raw binary data, written to the store at the hash of the content. - Storing binary paths in this way effectively deduplicates the data. -2. The hashpath-graph of all content, stored as a set of links between - hashpaths, their keys, and the data that underlies them. This allows - all messages to share the same hashpath space, such that all requests - from users additively fill-in the hashpath space, minimizing duplicated - compute. -3. Messages, referrable by their IDs (committed or uncommitted). These are - stored as a set of links commitment IDs and the uncommitted message. -Before writing a message to the store, we convert it to Type-Annotated -Binary Messages (TABMs), such that each of the keys in the message is -either a map or a direct binary. -Nested keys are lazily loaded from the stores, such that large deeply -nested messages where only a small part of the data is actually used are -not loaded into memory unnecessarily. In order to ensure that a message is -loaded from the cache after a `read`, we can use the `ensure_loaded/1` and -`ensure_all_loaded/1` functions. Ensure loaded will load the exact value -that has been requested, while ensure all loaded will load the entire -structure of the message into memory. -Lazily loadable `links` are expressed as a tuple of the following form: -`{link, ID, LinkOpts}`, where `ID` is the path to the data in the store, -and `LinkOpts` is a map of suggested options to use when loading the data. -In particular, this module ensures to stash the `store` option in `LinkOpts`, -such that the `read` function can use the correct store without having to -search unnecessarily. By providing an `Opts` argument to `ensure_loaded` or -`ensure_all_loaded`, the caller can specify additional options to use when -loading the data -- overriding the suggested options in the link. - ---- - -## Exported Functions - -- `ensure_all_loaded/1` -- `ensure_all_loaded/2` -- `ensure_loaded/1` -- `ensure_loaded/2` -- `link/3` -- `list_numbered/2` -- `list/2` -- `match/2` -- `read_resolved/3` -- `read/2` -- `test_signed/1` -- `test_unsigned/1` -- `write_binary/3` -- `write_hashpath/2` -- `write/2` - ---- - -### ensure_loaded - -A cache of AO-Core protocol messages and compute results. -Ensure that a value is loaded from the cache if it is an ID or a link. - -```erlang -ensure_loaded(Msg) -> - ensure_loaded(Msg, #{}). -``` - -### ensure_loaded - -```erlang -ensure_loaded(Msg, Opts) -> - ensure_loaded([], Msg, Opts). -``` - -### ensure_loaded - -```erlang -ensure_loaded(Ref, {Status, Msg}, Opts) when Status == ok; Status == error -> - {Status, ensure_loaded(Ref, Msg, Opts)}; -``` - -### ensure_loaded - -```erlang -ensure_loaded(Ref, - Lk = {link, ID, LkOpts = #{ <<"type">> := <<"link">>, <<"lazy">> := Lazy }}, - RawOpts) -> - % The link is to a submessage; either in lazy (unresolved) form, or direct - % form. -``` - -### ensure_loaded - -```erlang -ensure_loaded(Ref, Link = {link, ID, LinkOpts = #{ <<"lazy">> := true }}, RawOpts) -> - % If the user provided their own options, we merge them and _overwrite_ - % the options that are already set in the link. -``` - -### ensure_loaded - -```erlang -ensure_loaded(Ref, {link, ID, LinkOpts}, Opts) -> - ensure_loaded(Ref, {link, ID, LinkOpts#{ <<"lazy">> => true}}, Opts); -``` - -### ensure_loaded - -```erlang -ensure_loaded(_Ref, Msg, _Opts) when not ?IS_LINK(Msg) -> - Msg. -``` - -### report_ensure_loaded_not_found - -Report that a value was not found in the cache. If a key is provided, - -```erlang -report_ensure_loaded_not_found(Ref, Lk, Opts) -> - ?event(link_error, {link_not_resolvable, {ref, Ref}, {link, Lk}, {opts, Opts}}), - throw( - {necessary_message_not_found, - hb_path:to_binary(lists:reverse(Ref)), - hb_link:format_unresolved(Lk, Opts, 0) - } - ). -``` - -### ensure_all_loaded - -Ensure that all of the components of a message (whether a map, list, - -```erlang -ensure_all_loaded(Msg) -> - ensure_all_loaded(Msg, #{}). -``` - -### ensure_all_loaded - -```erlang -ensure_all_loaded(Msg, Opts) -> - ensure_all_loaded([], Msg, Opts). -``` - -### ensure_all_loaded - -```erlang -ensure_all_loaded(Ref, Link, Opts) when ?IS_LINK(Link) -> - ensure_all_loaded(Ref, ensure_loaded(Ref, Link, Opts), Opts); -``` - -### ensure_all_loaded - -```erlang -ensure_all_loaded(Ref, Msg, Opts) when is_map(Msg) -> - maps:map(fun(K, V) -> ensure_all_loaded([K|Ref], V, Opts) end, Msg); -``` - -### ensure_all_loaded - -```erlang -ensure_all_loaded(Ref, Msg, Opts) when is_list(Msg) -> - lists:map( - fun({N, V}) -> ensure_all_loaded([N|Ref], V, Opts) end, - hb_util:number(Msg) - ); -``` - -### ensure_all_loaded - -```erlang -ensure_all_loaded(Ref, Msg, Opts) -> - ensure_loaded(Ref, Msg, Opts). -``` - -### list_numbered - -List all items in a directory, assuming they are numbered. - -```erlang -list_numbered(Path, Opts) -> - SlotDir = hb_store:path(hb_opts:get(store, no_viable_store, Opts), Path), - [ hb_util:int(Name) || Name <- list(SlotDir, Opts) ]. -``` - -### list - -List all items under a given path. - -```erlang -list(Path, Opts) when is_map(Opts) and not is_map_key(<<"store-module">>, Opts) -> - case hb_opts:get(store, no_viable_store, Opts) of - not_found -> []; - Store -> - list(Path, Store) - end; -``` - -### list - -List all items under a given path. - -```erlang -list(Path, Store) -> - ResolvedPath = hb_store:resolve(Store, Path), - case hb_store:list(Store, ResolvedPath) of - {ok, Names} -> Names; - {error, _} -> []; - not_found -> [] - end. -``` - -### match - -Match a template message against the cache, returning a list of IDs - -```erlang -match(MatchSpec, Opts) -> - Spec = hb_message:convert(MatchSpec, tabm, <<"structured@1.0">>, Opts), - ConvertedMatchSpec = - maps:map( - fun(_, Value) -> - generate_binary_path(Value, Opts) - end, - maps:without([<<"ao-types">>], hb_ao:normalize_keys(Spec, Opts)) - ), - case hb_store:match(hb_opts:get(store, no_viable_store, Opts), ConvertedMatchSpec) of - {ok, Matches} -> {ok, Matches}; - _ -> not_found - end. -``` - -### generate_binary_path - -Generate the path at which a binary value should be stored. -Write a message to the cache. For raw binaries, we write the data at - -```erlang -generate_binary_path(Bin, Opts) -> - Hashpath = hb_path:hashpath(Bin, Opts), - <<"data/", Hashpath/binary>>. -``` - -### write - -Generate the path at which a binary value should be stored. -Write a message to the cache. For raw binaries, we write the data at - -```erlang -write(RawMsg, Opts) when is_map(RawMsg) -> - {ok, Msg} = hb_message:with_only_committed(RawMsg, Opts), - TABM = hb_message:convert(Msg, tabm, <<"structured@1.0">>, Opts), - ?event(debug_cache, {writing_full_message, {msg, TABM}}), - try - do_write_message( - TABM, - hb_opts:get(store, no_viable_store, Opts), - Opts - ) - catch - Type:Reason:Stacktrace -> - ?event(error, - {cache_write_error, - {type, Type}, - {reason, Reason}, - {stacktrace, {trace, Stacktrace}} - }, - Opts - ), - erlang:raise(Type, Reason, Stacktrace) - end; -``` - -### write - -Generate the path at which a binary value should be stored. -Write a message to the cache. For raw binaries, we write the data at - -```erlang -write(List, Opts) when is_list(List) -> - write(hb_message:convert(List, tabm, <<"structured@1.0">>, Opts), Opts); -``` - -### write - -Generate the path at which a binary value should be stored. -Write a message to the cache. For raw binaries, we write the data at - -```erlang -write(Bin, Opts) when is_binary(Bin) -> - do_write_message(Bin, hb_opts:get(store, no_viable_store, Opts), Opts). -``` - -### do_write_message - -```erlang -do_write_message(Bin, Store, Opts) when is_binary(Bin) -> - % Write the binary in the store at its calculated content-hash. -``` - -### do_write_message - -```erlang -do_write_message(List, Store, Opts) when is_list(List) -> - do_write_message( - hb_message:convert(List, tabm, <<"structured@1.0">>, Opts), - Store, - Opts - ); -``` - -### do_write_message - -```erlang -do_write_message(Msg, Store, Opts) when is_map(Msg) -> - ?event(debug_cache, {writing_message, Msg}), - % Calculate the IDs of the message. -``` - -### write_key - -Write a single key for a message into the store. - -```erlang -write_key(Base, <<"commitments">>, _HPAlg, RawCommitments, Store, Opts) -> - % The commitments are a special case: We calculate the single-part hashpath - % for the `baseID/commitments` key, then write each commitment to the store - % and link it to `baseCommHP/commitmentID`. -``` - -### write_key - -```erlang -write_key(Base, Key, HPAlg, Value, Store, Opts) -> - KeyHashPath = - hb_path:hashpath( - Base, - hb_path:to_binary(Key), - HPAlg, - Opts - ), - {ok, Path} = do_write_message(Value, Store, Opts), - hb_store:make_link(Store, Path, KeyHashPath), - {ok, Path}. -``` - -### prepare_commitments - -The `structured@1.0` encoder does not typically encode `commitments`, - -```erlang -prepare_commitments(RawCommitments, Opts) -> - Commitments = ensure_all_loaded(RawCommitments, Opts), - maps:map( - fun(_, StructuredCommitment) -> - hb_message:convert(StructuredCommitment, tabm, Opts) - end, - Commitments - ). -``` - -### commitment_path - -Generate the commitment path for a given base path. -Calculate the IDs for a message. - -```erlang -commitment_path(Base, Opts) -> - hb_path:hashpath(<>, Opts). -``` - -### calculate_all_ids - -Generate the commitment path for a given base path. -Calculate the IDs for a message. - -```erlang -calculate_all_ids(Bin, _Opts) when is_binary(Bin) -> []; -``` - -### calculate_all_ids - -Generate the commitment path for a given base path. -Calculate the IDs for a message. - -```erlang -calculate_all_ids(Msg, Opts) -> - Commitments = - hb_maps:without( - [<<"priv">>], - hb_maps:get(<<"commitments">>, Msg, #{}, Opts), - Opts - ), - CommIDs = hb_maps:keys(Commitments, Opts), - ?event({calculating_ids, {msg, Msg}, {commitments, Commitments}, {comm_ids, CommIDs}}), - All = hb_message:id(Msg, all, Opts#{ linkify_mode => discard }), - case lists:member(All, CommIDs) of - true -> CommIDs; - false -> [All | CommIDs] - end. -``` - -### write_hashpath - -Write a hashpath and its message to the store and link it. - -```erlang -write_hashpath(Msg = #{ <<"priv">> := #{ <<"hashpath">> := HP } }, Opts) -> - write_hashpath(HP, Msg, Opts); -``` - -### write_hashpath - -Write a hashpath and its message to the store and link it. - -```erlang -write_hashpath(MsgWithoutHP, Opts) -> - write(MsgWithoutHP, Opts). -``` - -### write_hashpath - -```erlang -write_hashpath(HP, Msg, Opts) when is_binary(HP) or is_list(HP) -> - Store = hb_opts:get(store, no_viable_store, Opts), - ?event({writing_hashpath, {hashpath, HP}, {msg, Msg}, {store, Store}}), - {ok, Path} = write(Msg, Opts), - hb_store:make_link(Store, Path, HP), - {ok, Path}. -``` - -### write_binary - -Write a raw binary keys into the store and link it at a given hashpath. - -```erlang -write_binary(Hashpath, Bin, Opts) -> - write_binary(Hashpath, Bin, hb_opts:get(store, no_viable_store, Opts), Opts). -``` - -### write_binary - -```erlang -write_binary(Hashpath, Bin, Store, Opts) -> - ?event({writing_binary, {hashpath, Hashpath}, {bin, Bin}, {store, Store}}), - {ok, Path} = do_write_message(Bin, Store, Opts), - hb_store:make_link(Store, Path, Hashpath), - {ok, Path}. -``` - -### read - -Read the message at a path. Returns in `structured@1.0` format: Either a - -```erlang -read(Path, Opts) -> - case store_read(Path, hb_opts:get(store, no_viable_store, Opts), Opts) of - not_found -> not_found; - {ok, Res} -> - %?event({applying_types_to_read_message, Res}), - %Structured = dev_codec_structured:to(Res), - %?event({finished_read, Structured}), - {ok, Res} - end. -``` - -### store_read - -List all of the subpaths of a given path and return a map of keys and - -```erlang -store_read(_Path, no_viable_store, _) -> - not_found; -``` - -### store_read - -List all of the subpaths of a given path and return a map of keys and - -```erlang -store_read(Path, Store, Opts) -> - ResolvedFullPath = hb_store:resolve(Store, PathBin = hb_path:to_binary(Path)), - ?event({read_resolved, - {original_path, {string, PathBin}}, - {resolved_path, ResolvedFullPath}, - {store, Store} - }), - case hb_store:type(Store, ResolvedFullPath) of - not_found -> not_found; - simple -> - ?event({reading_data, ResolvedFullPath}), - case hb_store:read(Store, ResolvedFullPath) of - {ok, Bin} -> {ok, Bin}; - not_found -> not_found - end; - composite -> - ?event({reading_composite, ResolvedFullPath}), - case hb_store:list(Store, ResolvedFullPath) of - {ok, RawSubpaths} -> - Subpaths = - lists:map(fun hb_util:bin/1, RawSubpaths), - ?event( - {listed, - {original_path, Path}, - {subpaths, {explicit, Subpaths}} - } - ), - % Generate links for all subpaths except `commitments' and - % `ao-types'. `commitments' is always read in its entirety, - % such that all messages have their IDs and signatures - % locally available. -``` - -### prepare_links - -Prepare a set of links from a listing of subpaths. - -```erlang -prepare_links(RootPath, Subpaths, Store, Opts) -> - {ok, Implicit, Types} = read_ao_types(RootPath, Subpaths, Store, Opts), - Res = - maps:from_list(lists:filtermap( - fun(<<"ao-types">>) -> false; - (<<"commitments">>) -> - % List the commitments for this message, and load them into - % memory. If there no commitments at the path, we exclude - % commitments from the list of links. -``` - -### read_ao_types - -Read and parse the ao-types for a given path if it is in the supplied - -```erlang -read_ao_types(Path, Subpaths, Store, Opts) -> - ?event({reading_ao_types, {path, Path}, {subpaths, {explicit, Subpaths}}}), - case lists:member(<<"ao-types">>, Subpaths) of - true -> - {ok, TypesBin} = - hb_store:read( - Store, - hb_store:path(Store, [Path, <<"ao-types">>]) - ), - Types = dev_codec_structured:decode_ao_types(TypesBin, Opts), - ?event({parsed_ao_types, {types, Types}}), - {ok, types_to_implicit(Types), Types}; - false -> - ?event({no_ao_types_key_found, {path, Path}, {subpaths, Subpaths}}), - {ok, #{}, #{}} - end. -``` - -### types_to_implicit - -Convert a map of ao-types to an implicit map of types. - -```erlang -types_to_implicit(Types) -> - maps:filtermap( - fun(_K, <<"empty-message">>) -> {true, #{}}; - (_K, <<"empty-list">>) -> {true, []}; - (_K, <<"empty-binary">>) -> {true, <<>>}; - (_, _) -> false - end, - Types - ). -``` - -### read_resolved - -Read the output of a prior computation, given Msg1, Msg2, and some - -```erlang -read_resolved(MsgID1, MsgID2, Opts) when ?IS_ID(MsgID1) and ?IS_ID(MsgID2) -> - ?event({cache_lookup, {msg1, MsgID1}, {msg2, MsgID2}, {opts, Opts}}), - read(<>, Opts); -``` - -### read_resolved - -Read the output of a prior computation, given Msg1, Msg2, and some - -```erlang -read_resolved(MsgID1, Msg2, Opts) when ?IS_ID(MsgID1) and is_map(Msg2) -> - {ok, MsgID2} = dev_message:id(Msg2, #{ <<"committers">> => <<"all">> }, Opts), - read(<>, Opts); -``` - -### read_resolved - -Read the output of a prior computation, given Msg1, Msg2, and some - -```erlang -read_resolved(Msg1, Msg2, Opts) when is_map(Msg1) and is_map(Msg2) -> - read(hb_path:hashpath(Msg1, Msg2, Opts), Opts); -``` - -### read_resolved - -Read the output of a prior computation, given Msg1, Msg2, and some -Make a link from one path to another in the store. - -```erlang -read_resolved(_, _, _) -> not_found. -``` - -### link - -Read the output of a prior computation, given Msg1, Msg2, and some -Make a link from one path to another in the store. - -```erlang -link(Existing, New, Opts) -> - hb_store:make_link( - hb_opts:get(store, no_viable_store, Opts), - Existing, - New - ). -``` - -### test_unsigned - -```erlang -test_unsigned(Data) -> - #{ - <<"base-test-key">> => <<"base-test-value">>, - <<"other-test-key">> => Data - }. -``` - -### test_signed - -```erlang -test_signed(Data) -> test_signed(Data, ar_wallet:new()). -``` - -### test_signed - -```erlang -test_signed(Data, Wallet) -> - hb_message:commit(test_unsigned(Data), Wallet). -``` - -### test_store_binary - -```erlang -test_store_binary(Store) -> - Bin = <<"Simple unsigned data item">>, - ?event(debug_store_test, {store, Store}), - Opts = #{ store => Store }, - {ok, ID} = write(Bin, Opts), - {ok, RetrievedBin} = read(ID, Opts), - ?assertEqual(Bin, RetrievedBin). -``` - -### test_store_unsigned_empty_message - -```erlang -test_store_unsigned_empty_message(Store) -> - ?event(debug_store_test, {store, Store}), - hb_store:reset(Store), - Item = #{}, - Opts = #{ store => Store }, - {ok, Path} = write(Item, Opts), - {ok, RetrievedItem} = read(Path, Opts), - ?event( - {retrieved_item, - {path, {string, Path}}, - {expected, Item}, - {got, RetrievedItem} - } - ), - MatchRes = hb_message:match(Item, RetrievedItem, strict, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes). -``` - -### test_store_unsigned_nested_empty_message - -```erlang -test_store_unsigned_nested_empty_message(Store) -> - ?event(debug_store_test, {store, Store}), - hb_store:reset(Store), - Item = - #{ <<"layer1">> => - #{ <<"layer2">> => - #{ <<"layer3">> => - #{ <<"a">> => <<"b">>} - }, - <<"layer3b">> => #{ <<"c">> => <<"d">>}, - <<"layer3c">> => #{} - } - }, - Opts = #{ store => Store }, - {ok, Path} = write(Item, Opts), - {ok, RetrievedItem} = read(Path, Opts), - ?assert(hb_message:match(Item, RetrievedItem, strict, Opts)). -``` - -### test_store_simple_unsigned_message - -Test storing and retrieving a simple unsigned item - -```erlang -test_store_simple_unsigned_message(Store) -> - Item = test_unsigned(<<"Simple unsigned data item">>), - ?event(debug_store_test, {store, Store}), - Opts = #{ store => Store }, - %% Write the simple unsigned item - {ok, _Path} = write(Item, Opts), - %% Read the item back - ID = hb_util:human_id(hb_ao:get(id, Item)), - {ok, RetrievedItem} = read(ID, Opts), - ?assert(hb_message:match(Item, RetrievedItem, strict, Opts)), - ok. -``` - -### test_store_ans104_message - -```erlang -test_store_ans104_message(Store) -> - ?event(debug_store_test, {store, Store}), - hb_store:reset(Store), - Opts = #{ store => Store }, - Item = #{ <<"type">> => <<"ANS104">>, <<"content">> => <<"Hello, world!">> }, - Committed = hb_message:commit(Item, hb:wallet()), - {ok, _Path} = write(Committed, Opts), - CommittedID = hb_util:human_id(hb_message:id(Committed, all)), - UncommittedID = hb_util:human_id(hb_message:id(Committed, none)), - ?event({test_message_ids, {uncommitted, UncommittedID}, {committed, CommittedID}}), - {ok, RetrievedItem} = read(CommittedID, Opts), - {ok, RetrievedItemU} = read(UncommittedID, Opts), - ?assert(hb_message:match(Committed, RetrievedItem, strict, Opts)), - ?assert(hb_message:match(Committed, RetrievedItemU, strict, Opts)), - ok. -``` - -### test_store_simple_signed_message - -Test storing and retrieving a simple unsigned item - -```erlang -test_store_simple_signed_message(Store) -> - ?event(debug_store_test, {store, Store}), - Opts = #{ store => Store }, - hb_store:reset(Store), - Wallet = ar_wallet:new(), - Address = hb_util:human_id(ar_wallet:to_address(Wallet)), - Item = test_signed(<<"Simple signed data item">>, Wallet), - ?event({writing_test_message, Item}), - %% Write the simple unsigned item - {ok, _Path} = write(Item, Opts), - % %% Read the item back - % {ok, UID} = dev_message:id(Item, #{ <<"committers">> => <<"none">> }, Opts), - % {ok, RetrievedItemUnsig} = read(UID, Opts), - % ?event({retreived_unsigned_message, {expected, Item}, {got, RetrievedItemUnsig}}), - % MatchRes = hb_message:match(Item, RetrievedItemUnsig, strict, Opts), - % ?event({match_result, MatchRes}), - % ?assert(MatchRes), - {ok, CommittedID} = dev_message:id(Item, #{ <<"committers">> => [Address] }, Opts), - {ok, RetrievedItemSigned} = read(CommittedID, Opts), - ?event({retreived_signed_message, {expected, Item}, {got, RetrievedItemSigned}}), - MatchResSigned = hb_message:match(Item, RetrievedItemSigned, strict, Opts), - ?event({match_result_signed, MatchResSigned}), - ?assert(MatchResSigned), - ok. -``` - -### test_deeply_nested_complex_message - -Test deeply nested item storage and retrieval - -```erlang -test_deeply_nested_complex_message(Store) -> - ?event(debug_store_test, {store, Store}), - hb_store:reset(Store), - Wallet = ar_wallet:new(), - Opts = #{ store => Store, priv_wallet => Wallet }, - %% Create nested data - Level3SignedSubmessage = test_signed([1,2,3], Opts#{priv_wallet => Wallet}), - Outer = - hb_message:commit( - #{ - <<"level1">> => - InnerSigned = hb_message:commit( - #{ - <<"level2">> => - #{ - <<"level3">> => Level3SignedSubmessage, - <<"e">> => <<"f">>, - <<"z">> => [1,2,3] - }, - <<"c">> => <<"d">>, - <<"g">> => [<<"h">>, <<"i">>], - <<"j">> => 1337 - }, - Opts - ), - <<"a">> => <<"b">> - }, - Opts - ), - UID = hb_message:id(Outer, none, Opts), - ?event({string, <<"================================================">>}), - CommittedID = hb_message:id(Outer, signed, Opts), - ?event({string, <<"================================================">>}), - ?event({test_message_ids, {uncommitted, UID}, {committed, CommittedID}}), - %% Write the nested item - {ok, _} = write(Outer, Opts), - %% Read the deep value back using subpath - OuterID = hb_util:human_id(UID), - {ok, OuterMsg} = read(OuterID, Opts), - EnsuredLoadedOuter = hb_cache:ensure_all_loaded(OuterMsg, Opts), - ?event({deep_message, {explicit, EnsuredLoadedOuter}}), - %% Assert that the retrieved item matches the original deep value - ?assertEqual( - [1,2,3], - hb_ao:get( - <<"level1/level2/level3/other-test-key">>, - EnsuredLoadedOuter, - Opts - ) - ), - ?event( - {deep_message_match, - {read, EnsuredLoadedOuter}, - {write, Level3SignedSubmessage} - } - ), - ?event({reading_committed_outer, {id, CommittedID}, {expect, Outer}}), - {ok, CommittedMsg} = read(hb_util:human_id(CommittedID), Opts), - EnsuredLoadedCommitted = hb_cache:ensure_all_loaded(CommittedMsg, Opts), - ?assertEqual( - [1,2,3], - hb_ao:get( - <<"level1/level2/level3/other-test-key">>, - EnsuredLoadedCommitted, - Opts - ) - ). -``` - -### test_message_with_list - -```erlang -test_message_with_list(Store) -> - hb_store:reset(Store), - Opts = #{ store => Store }, - Msg = test_unsigned([<<"a">>, <<"b">>, <<"c">>]), - ?event({writing_message, Msg}), - {ok, Path} = write(Msg, Opts), - {ok, RetrievedItem} = read(Path, Opts), - ?assert(hb_message:match(Msg, RetrievedItem, strict, Opts)). -``` - -### test_match_message - -```erlang -test_match_message(Store) when map_get(<<"store-module">>, Store) =/= hb_store_lmdb -> - skip; -``` - -### test_match_message - -```erlang -test_match_message(Store) -> - hb_store:reset(Store), - Opts = #{ store => Store }, - % Write two messages that match the template, and a third that does not. -``` - -### test_match_linked_message - -```erlang -test_match_linked_message(Store) when map_get(<<"store-module">>, Store) =/= hb_store_lmdb -> - skip; -``` - -### test_match_linked_message - -```erlang -test_match_linked_message(Store) -> - hb_store:reset(Store), - Opts = #{ store => Store }, - Msg = #{ <<"a">> => Inner = #{ <<"b">> => <<"c">>, <<"d">> => <<"e">> } }, - {ok, _ID} = write(Msg, Opts), - {ok, [MatchedID]} = match(#{ <<"b">> => <<"c">> }, Opts), - {ok, Read1} = read(MatchedID, Opts), - ?assertEqual( - #{ <<"b">> => <<"c">>, <<"d">> => <<"e">> }, - hb_cache:ensure_all_loaded(Read1, Opts) - ), - {ok, [MatchedID2]} = match(#{ <<"a">> => Inner }, Opts), - {ok, Read2} = read(MatchedID2, Opts), - ?assertEqual(#{ <<"a">> => Inner }, ensure_all_loaded(Read2, Opts)). -``` - -### test_match_typed_message - -```erlang -test_match_typed_message(Store) when map_get(<<"store-module">>, Store) =/= hb_store_lmdb -> - skip; -``` - -### test_match_typed_message - -```erlang -test_match_typed_message(Store) -> - hb_store:reset(Store), - Opts = #{ store => Store }, - % Add some messages that should not match the template, as well as the main - % message that should match the template. -``` - -### cache_suite_test_ - -```erlang -cache_suite_test_() -> - hb_store:generate_test_suite([ - {"store unsigned empty message", - fun test_store_unsigned_empty_message/1}, - {"store binary", fun test_store_binary/1}, - {"store unsigned nested empty message", - fun test_store_unsigned_nested_empty_message/1}, - {"store simple unsigned message", fun test_store_simple_unsigned_message/1}, - {"store simple signed message", fun test_store_simple_signed_message/1}, - {"deeply nested complex message", fun test_deeply_nested_complex_message/1}, - {"message with list", fun test_message_with_list/1}, - {"match message", fun test_match_message/1}, - {"match linked message", fun test_match_linked_message/1}, - {"match typed message", fun test_match_typed_message/1} - ]). -``` - -### test_device_map_cannot_be_written_test - -Test that message whose device is `#{}` cannot be written. If it were to - -```erlang -test_device_map_cannot_be_written_test() -> - try - Opts = #{ store => StoreOpts = - [#{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-TEST">> }] }, - hb_store:reset(StoreOpts), - Danger = #{ <<"device">> => #{}}, - write(Danger, Opts), - ?assert(false) - catch - _:_:_ -> ?assert(true) - end. -``` - -### run_test - -Run a specific test with a given store module. - -```erlang -run_test() -> - Store = hb_test_utils:test_store(hb_store_lmdb), -``` - ---- - -*Generated from [hb_cache.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache.erl)* diff --git a/docs/book/src/hb_cache_control.erl.md b/docs/book/src/hb_cache_control.erl.md deleted file mode 100644 index 1a3146606..000000000 --- a/docs/book/src/hb_cache_control.erl.md +++ /dev/null @@ -1,554 +0,0 @@ -# hb_cache_control - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache_control.erl) - -Cache control logic for the AO-Core resolver. It derives cache settings -from request, response, execution-local node Opts, as well as the global -node Opts. It applies these settings when asked to maybe store/lookup in -response to a request. - ---- - -## Exported Functions - -- `maybe_lookup/3` -- `maybe_store/4` - ---- - -### maybe_store - -Cache control logic for the AO-Core resolver. It derives cache settings -Write a resulting M3 message to the cache if requested. The precedence - -```erlang -maybe_store(Msg1, Msg2, Msg3, Opts) -> - case derive_cache_settings([Msg3, Msg2], Opts) of - #{ <<"store">> := true } -> - ?event(caching, {caching_result, {msg1, Msg1}, {msg2, Msg2}, {msg3, Msg3}}), - dispatch_cache_write(Msg1, Msg2, Msg3, Opts); - _ -> - not_caching - end. -``` - -### maybe_lookup - -Handles cache lookup, modulated by the caching options requested by - -```erlang -maybe_lookup(Msg1, Msg2, Opts) -> - case exec_likely_faster_heuristic(Msg1, Msg2, Opts) of - true -> - ?event(caching, {skip_cache_check, exec_likely_faster_heuristic}), - {continue, Msg1, Msg2}; - false -> lookup(Msg1, Msg2, Opts) - end. -``` - -### lookup - -```erlang -lookup(Msg1, Msg2, Opts) -> - case derive_cache_settings([Msg1, Msg2], Opts) of - #{ <<"lookup">> := false } -> - ?event({skip_cache_check, lookup_disabled}), - {continue, Msg1, Msg2}; - Settings = #{ <<"lookup">> := true } -> - OutputScopedOpts = - hb_store:scope( - Opts, - hb_opts:get(store_scope_resolved, local, Opts) - ), - case hb_cache:read_resolved(Msg1, Msg2, OutputScopedOpts) of - {ok, Msg3} -> - ?event(caching, - {cache_hit, - case is_binary(Msg3) of - true -> hb_path:hashpath(Msg1, Msg2, Opts); - false -> hb_path:hashpath(Msg3, Opts) - end, - {msg1, Msg1}, - {msg2, Msg2}, - {msg3, Msg3} - } - ), - {ok, Msg3}; - not_found -> - ?event(caching, {result_cache_miss, Msg1, Msg2}), - case Settings of - #{ <<"only-if-cached">> := true } -> - only_if_cached_not_found_error(Msg1, Msg2, Opts); - _ -> - case ?IS_ID(Msg1) of - false -> {continue, Msg1, Msg2}; - true -> - case hb_cache:read(Msg1, Opts) of - {ok, FullMsg1} -> - ?event(load_message, - {cache_hit_base_message_load, - {base_id, Msg1}, - {base_loaded, FullMsg1} - } - ), - {continue, FullMsg1, Msg2}; - not_found -> - necessary_messages_not_found_error( - Msg1, - Msg2, - Opts - ) - end - end - end - end - end. -``` - -### dispatch_cache_write - -Dispatch the cache write to a worker process if requested. - -```erlang -dispatch_cache_write(Msg1, Msg2, Msg3, Opts) -> - case hb_opts:get(async_cache, false, Opts) of - true -> - find_or_spawn_async_writer(Opts) ! {write, Msg1, Msg2, Msg3, Opts}, - ok; - false -> - perform_cache_write(Msg1, Msg2, Msg3, Opts) - end. -``` - -### find_or_spawn_async_writer - -Find our async cacher process, or spawn one if none exists. - -```erlang -find_or_spawn_async_writer(_Opts) -> - case erlang:get({hb_cache_control, async_writer}) of - undefined -> - PID = spawn(fun() -> async_writer() end), - erlang:put({hb_cache_control, async_writer}, PID), - PID; - PID -> - PID - end. -``` - -### async_writer - -Optional worker process to write messages to the cache. - -```erlang -async_writer() -> - receive - {write, Msg1, Msg2, Msg3, Opts} -> - perform_cache_write(Msg1, Msg2, Msg3, Opts); - stop -> ok - end. -``` - -### perform_cache_write - -Internal function to write a compute result to the cache. - -```erlang -perform_cache_write(Msg1, Msg2, Msg3, Opts) -> - hb_cache:write(Msg1, Opts), - hb_cache:write(Msg2, Opts), - case Msg3 of - <<_/binary>> -> - hb_cache:write_binary( - hb_path:hashpath(Msg1, Msg2, Opts), - Msg3, - Opts - ); - Map when is_map(Map) -> - hb_cache:write(Msg3, Opts); - _ -> - ?event({cannot_write_result, Msg3}), - skip_caching - end. -``` - -### only_if_cached_not_found_error - -Generate a message to return when `only_if_cached` was specified, and - -```erlang -only_if_cached_not_found_error(Msg1, Msg2, Opts) -> - ?event( - caching, - {only_if_cached_execution_failed, {msg1, Msg1}, {msg2, Msg2}}, - Opts - ), - {error, - #{ - <<"status">> => 504, - <<"cache-status">> => <<"miss">>, - <<"body">> => - <<"Computed result not available in cache.">> - } - }. -``` - -### necessary_messages_not_found_error - -Generate a message to return when the necessary messages to execute a - -```erlang -necessary_messages_not_found_error(Msg1, Msg2, Opts) -> - ?event( - load_message, - {necessary_messages_not_found, {msg1, Msg1}, {msg2, Msg2}}, - Opts - ), - {error, - #{ - <<"status">> => 404, - <<"body">> => - <<"Necessary messages not found in cache.">> - } - }. -``` - -### exec_likely_faster_heuristic - -Determine whether we are likely to be faster looking up the result in - -```erlang -exec_likely_faster_heuristic(M1, _M2, _) when (not ?IS_ID(M1)) -> - true; -``` - -### exec_likely_faster_heuristic - -Determine whether we are likely to be faster looking up the result in - -```erlang -exec_likely_faster_heuristic({as, _, Msg1}, Msg2, Opts) -> - exec_likely_faster_heuristic(Msg1, Msg2, Opts); -``` - -### exec_likely_faster_heuristic - -Determine whether we are likely to be faster looking up the result in - -```erlang -exec_likely_faster_heuristic(Msg1, Msg2, Opts) -> - case hb_opts:get(cache_lookup_hueristics, true, Opts) of - false -> false; - true -> - case ?IS_ID(Msg1) of - true -> false; - false -> is_explicit_lookup(Msg1, Msg2, Opts) - end - end. -``` - -### is_explicit_lookup - -```erlang -is_explicit_lookup(Msg1, #{ <<"path">> := Key }, Opts) -> - % For now, just check whether the key is explicitly in the map. That is - % a good signal that we will likely be asked by the device to grab it. -``` - -### derive_cache_settings - -Derive cache settings from a series of option sources and the opts, - -```erlang -derive_cache_settings(SourceList, Opts) -> - lists:foldr( - fun(Source, Acc) -> - maybe_set(Acc, cache_source_to_cache_settings(Source, Opts), Opts) - end, - #{ <<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => ?DEFAULT_LOOKUP_OPT }, - [{opts, Opts}|lists:filter(fun erlang:is_map/1, SourceList)] - ). -``` - -### maybe_set - -Takes a key and two maps, returning the first map with the key set to - -```erlang -maybe_set(Map1, Map2, Opts) -> - lists:foldl( - fun(Key, AccMap) -> - case hb_maps:get(Key, Map2, undefined, Opts) of - undefined -> AccMap; - Value -> hb_maps:put(Key, Value, AccMap, Opts) - end - end, - Map1, - hb_maps:keys(Map2, Opts) - ). -``` - -### cache_source_to_cache_settings - -Convert a cache source to a cache setting. The setting _must_ always be - -```erlang -cache_source_to_cache_settings({opts, Opts}, _) -> - CCMap = specifiers_to_cache_settings(hb_opts:get(cache_control, [], Opts)), - case hb_opts:get(hashpath, update, Opts) of - ignore -> CCMap#{ <<"store">> => false }; - _ -> CCMap - end; -``` - -### cache_source_to_cache_settings - -Convert a cache source to a cache setting. The setting _must_ always be - -```erlang -cache_source_to_cache_settings(Msg, Opts) -> - case dev_message:get(<<"cache-control">>, Msg, Opts) of - {ok, CC} -> specifiers_to_cache_settings(CC); - {error, not_found} -> #{} - end. -``` - -### specifiers_to_cache_settings - -Convert a cache control list as received via HTTP headers into a - -```erlang -specifiers_to_cache_settings(CCSpecifier) when not is_list(CCSpecifier) -> - specifiers_to_cache_settings([CCSpecifier]); -``` - -### specifiers_to_cache_settings - -Convert a cache control list as received via HTTP headers into a - -```erlang -specifiers_to_cache_settings(RawCCList) -> - CCList = lists:map(fun hb_ao:normalize_key/1, RawCCList), - #{ - <<"store">> => - case lists:member(<<"always">>, CCList) of - true -> true; - false -> - case lists:member(<<"no-store">>, CCList) of - true -> false; - false -> - case lists:member(<<"store">>, CCList) of - true -> true; - false -> undefined - end - end - end, - <<"lookup">> => - case lists:member(<<"always">>, CCList) of - true -> true; - false -> - case lists:member(<<"no-cache">>, CCList) of - true -> false; - false -> - case lists:member(<<"cache">>, CCList) of - true -> true; - false -> undefined - end - end - end, - <<"only-if-cached">> => - case lists:member(<<"only-if-cached">>, CCList) of - true -> true; - false -> undefined - end - }. -``` - -### msg_with_cc - -```erlang -msg_with_cc(CC) -> #{ <<"cache-control">> => CC }. -``` - -### opts_with_cc - -```erlang -opts_with_cc(CC) -> #{ cache_control => CC }. -%% Test precedence order (Opts > Msg3 > Msg2) -``` - -### opts_override_message_settings_test - -```erlang -opts_override_message_settings_test() -> - Msg2 = msg_with_cc([<<"no-store">>]), - Msg3 = msg_with_cc([<<"no-cache">>]), - Opts = opts_with_cc([<<"always">>]), - Result = derive_cache_settings([Msg3, Msg2], Opts), - ?assertEqual(#{<<"store">> => true, <<"lookup">> => true}, Result). -``` - -### msg_precidence_overrides_test - -```erlang -msg_precidence_overrides_test() -> - Msg2 = msg_with_cc([<<"always">>]), - Msg3 = msg_with_cc([<<"no-store">>]), % No restrictions - Result = derive_cache_settings([Msg3, Msg2], opts_with_cc([])), - ?assertEqual(#{<<"store">> => false, <<"lookup">> => true}, Result). -%% Test specific directives -``` - -### no_store_directive_test - -```erlang -no_store_directive_test() -> - Msg = msg_with_cc([<<"no-store">>]), - Result = derive_cache_settings([Msg], opts_with_cc([])), - ?assertEqual(#{<<"store">> => false, <<"lookup">> => ?DEFAULT_LOOKUP_OPT}, Result). -``` - -### no_cache_directive_test - -```erlang -no_cache_directive_test() -> - Msg = msg_with_cc([<<"no-cache">>]), - Result = derive_cache_settings([Msg], opts_with_cc([])), - ?assertEqual(#{<<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => false}, Result). -``` - -### only_if_cached_directive_test - -```erlang -only_if_cached_directive_test() -> - Msg = msg_with_cc([<<"only-if-cached">>]), - Result = derive_cache_settings([Msg], opts_with_cc([])), - ?assertEqual( - #{ - <<"store">> => ?DEFAULT_STORE_OPT, - <<"lookup">> => ?DEFAULT_LOOKUP_OPT, - <<"only-if-cached">> => true - }, - Result - ). -``` - -### hashpath_ignore_prevents_storage_test - -```erlang -hashpath_ignore_prevents_storage_test() -> - Opts = (opts_with_cc([]))#{hashpath => ignore}, - Result = derive_cache_settings([], Opts), - ?assertEqual(#{<<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => ?DEFAULT_LOOKUP_OPT}, Result). -%% Test multiple directives -``` - -### multiple_directives_test - -```erlang -multiple_directives_test() -> - Msg = msg_with_cc([<<"no-store">>, <<"no-cache">>, <<"only-if-cached">>]), - Result = derive_cache_settings([Msg], opts_with_cc([])), - ?assertEqual( - #{ - <<"store">> => false, - <<"lookup">> => false, - <<"only-if-cached">> => true - }, - Result - ). -``` - -### empty_message_list_test - -```erlang -empty_message_list_test() -> - Result = derive_cache_settings([], opts_with_cc([])), - ?assertEqual(#{<<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => ?DEFAULT_LOOKUP_OPT}, Result). -``` - -### message_without_cache_control_test - -```erlang -message_without_cache_control_test() -> - Result = derive_cache_settings([#{}], opts_with_cc([])), - ?assertEqual(#{<<"store">> => ?DEFAULT_STORE_OPT, <<"lookup">> => ?DEFAULT_LOOKUP_OPT}, Result). -%% Test the cache_source_to_cache_setting function directly -``` - -### opts_source_cache_control_test - -```erlang -opts_source_cache_control_test() -> - Result = - cache_source_to_cache_settings( - {opts, opts_with_cc([<<"no-store">>])}, - #{} - ), - ?assertEqual(#{ - <<"store">> => false, - <<"lookup">> => undefined, - <<"only-if-cached">> => undefined - }, Result). -``` - -### message_source_cache_control_test - -```erlang -message_source_cache_control_test() -> - Msg = msg_with_cc([<<"no-cache">>]), - Result = cache_source_to_cache_settings(Msg, #{}), - ?assertEqual(#{ - <<"store">> => undefined, - <<"lookup">> => false, - <<"only-if-cached">> => undefined - }, Result). -``` - -### cache_binary_result_test - -```erlang -cache_binary_result_test() -> - CachedMsg = <<"test-message">>, - Msg1 = #{ <<"test-key">> => CachedMsg }, - Msg2 = <<"test-key">>, - {ok, Res} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"always">>] }), - ?assertEqual(CachedMsg, Res), - {ok, Res2} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"only-if-cached">>] }), - {ok, Res3} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"only-if-cached">>] }), - ?assertEqual(CachedMsg, Res2), - ?assertEqual(Res2, Res3). -``` - -### cache_message_result_test - -```erlang -cache_message_result_test() -> - CachedMsg = - #{ - <<"purpose">> => <<"Test-Message">>, - <<"aux">> => #{ <<"aux-message">> => <<"Aux-Message-Value">> }, - <<"test-key">> => rand:uniform(1000000) - }, - Msg1 = #{ <<"test-key">> => CachedMsg, <<"local">> => <<"Binary">> }, - Msg2 = <<"test-key">>, - {ok, Res} = - hb_ao:resolve( - Msg1, - Msg2, - #{ - cache_control => [<<"always">>] - } - ), - ?event({res1, Res}), - ?event(reading_from_cache), - {ok, Res2} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"only-if-cached">>] }), - ?event(reading_from_cache_again), - {ok, Res3} = hb_ao:resolve(Msg1, Msg2, #{ cache_control => [<<"only-if-cached">>] }), - ?event({res2, Res2}), - ?event({res3, Res3}), -``` - ---- - -*Generated from [hb_cache_control.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache_control.erl)* diff --git a/docs/book/src/hb_cache_render.erl.md b/docs/book/src/hb_cache_render.erl.md deleted file mode 100644 index 8cc1bd236..000000000 --- a/docs/book/src/hb_cache_render.erl.md +++ /dev/null @@ -1,511 +0,0 @@ -# hb_cache_render - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache_render.erl) - -A module that helps to render given Key graphs into the .dot files - ---- - -## Exported Functions - -- `cache_path_to_dot/2` -- `cache_path_to_dot/3` -- `cache_path_to_graph/3` -- `dot_to_svg/1` -- `get_graph_data/3` -- `prepare_signed_data/0` -- `prepare_unsigned_data/0` -- `render/1` -- `render/2` - ---- - -### render - -A module that helps to render given Key graphs into the .dot files -Render the given Key into svg - -```erlang -render(StoreOrOpts) -> - render(all, StoreOrOpts). -``` - -### render - -```erlang -render(ToRender, StoreOrOpts) -> - % Collect graph elements (nodes and arcs) by traversing the store - % Generate and view the graph visualization - % Write SVG to file and open it - file:write_file("new_render_diagram.svg", - dot_to_svg(cache_path_to_dot(ToRender, StoreOrOpts))), - os:cmd("open new_render_diagram.svg"), - ok. -``` - -### cache_path_to_dot - -Generate a dot file from a cache path and options/store - -```erlang -cache_path_to_dot(ToRender, StoreOrOpts) -> - cache_path_to_dot(ToRender, #{}, StoreOrOpts). -``` - -### cache_path_to_dot - -```erlang -cache_path_to_dot(ToRender, RenderOpts, StoreOrOpts) -> - graph_to_dot(cache_path_to_graph(ToRender, RenderOpts, StoreOrOpts), StoreOrOpts). -``` - -### cache_path_to_graph - -Main function to collect graph elements - -```erlang -cache_path_to_graph(ToRender, GraphOpts, StoreOrOpts) when is_map(StoreOrOpts) -> - Store = hb_opts:get(store, no_viable_store, StoreOrOpts), - ?event({store, Store}), - cache_path_to_graph(ToRender, GraphOpts, Store, StoreOrOpts). -``` - -### cache_path_to_graph - -```erlang -cache_path_to_graph(all, GraphOpts, Store, Opts) -> - Keys = - case hb_store:list(Store, <<"/">>) of - {ok, KeyList} -> KeyList; - not_found -> [] - end, - ?event({all_keys, Keys}), - cache_path_to_graph(Store, GraphOpts, Keys, Opts); -``` - -### cache_path_to_graph - -```erlang -cache_path_to_graph(InitPath, GraphOpts, Store, Opts) when is_binary(InitPath) -> - cache_path_to_graph(Store, GraphOpts, [InitPath], Opts); -``` - -### cache_path_to_graph - -```erlang -cache_path_to_graph(Store, GraphOpts, RootKeys, Opts) -> - % Use a map to track nodes, arcs and visited paths (to avoid cycles) - EmptyGraph = GraphOpts#{ nodes => #{}, arcs => #{}, visited => #{} }, - % Process all root keys and get the final graph - lists:foldl( - fun(Key, Acc) -> traverse_store(Store, Key, undefined, Acc, Opts) end, - EmptyGraph, - RootKeys - ). -``` - -### traverse_store - -Traverse the store recursively to build the graph - -```erlang -traverse_store(Store, Path, Parent, Graph, Opts) -> - % Get the path and check if we've already visited it - JoinedPath = hb_store:join(Path), - ResolvedPath = - case hb_link:is_link_key(JoinedPath) of - true -> - ?event({is_link_key, {path, Path}, {res_path, JoinedPath}}), - {ok, Link} = hb_store:read(Store, hb_store:resolve(Store, JoinedPath)), - ?event({resolved_link, {read, Link}}), - hb_store:resolve(Store, Link); - false -> hb_store:resolve(Store, Path) - end, - ?event({traverse_store, {path, Path}, {joined_path, JoinedPath}, {resolved_path, ResolvedPath}, {parent, Parent}}), - % Skip if we've already processed this node - case hb_maps:get(visited, Graph, #{}, Opts) of - #{ JoinedPath := _ } -> Graph; - _ -> - % Mark as visited to avoid cycles - Graph1 = Graph#{visited => hb_maps:put(JoinedPath, true, hb_maps:get(visited, Graph, #{}, Opts), Opts)}, - % ?event({traverse_store, {key, Key}, {graph1, Graph1}}), - % Process node based on its type - case hb_store:type(Store, ResolvedPath) of - simple -> - process_simple_node(Store, Path, Parent, ResolvedPath, JoinedPath, Graph1, Opts); - composite -> - process_composite_node(Store, Path, Parent, ResolvedPath, JoinedPath, Graph1, Opts); - _ -> - ?event({unknown_node_type, {path, Path}, {type, hb_store:type(Store, Path)}}), - Graph1 - end - end. -``` - -### process_simple_node - -Process a simple (leaf) node - -```erlang -process_simple_node(_Store, _Key, Parent, ResolvedPath, JoinedPath, Graph, Opts) -> - % ?event({process_simple_node, {key, Key}, {resolved_path, ResolvedPath}}), - % Add the node to the graph - case hb_maps:get(render_data, Graph, true, Opts) of - false -> Graph; - true -> - Graph1 = add_node(Graph, ResolvedPath, "lightblue", Opts), - % If we have a parent, add an arc from parent to this node - case Parent of - undefined -> Graph1; - ParentPath -> - Label = extract_label(JoinedPath), - add_arc(Graph1, ParentPath, ResolvedPath, Label, Opts) - end - end. -``` - -### process_composite_node - -Process a composite (directory) node - -```erlang -process_composite_node(_Store, <<"data">>, _Parent, _ResolvedPath, _JoinedPath, Graph, _Opts) -> - % Data is a special case: It contains every binary item in the store. -``` - -### process_composite_node - -```erlang -process_composite_node(Store, _Key, Parent, ResolvedPath, JoinedPath, Graph, Opts) -> - % Add the node to the graph - Graph1 = add_node(Graph, ResolvedPath, "lightcoral", Opts), - % If we have a parent, add an arc from parent to this node - Graph2 = case Parent of - undefined -> Graph1; - ParentPath -> - Label = extract_label(JoinedPath), - add_arc(Graph1, ParentPath, ResolvedPath, Label, Opts) - end, - % Process children recursively - case hb_store:list(Store, ResolvedPath) of - {ok, SubItems} -> - lists:foldl( - fun(SubItem, Acc) -> - ChildKey = [ResolvedPath, SubItem], - traverse_store(Store, ChildKey, ResolvedPath, Acc, Opts) - end, - Graph2, - SubItems - ); - _ -> Graph2 - end. -``` - -### add_node - -Add a node to the graph - -```erlang -add_node(Graph, ID, Color, Opts) -> - Nodes = hb_maps:get(nodes, Graph, #{}, Opts), - Graph#{nodes => hb_maps:put(ID, {ID, Color}, Nodes, Opts)}. -``` - -### add_arc - -Add an arc to the graph - -```erlang -add_arc(Graph, From, To, Label, Opts) -> - ?event({insert_arc, {id1, From}, {id2, To}, {label, Label}}), - Arcs = hb_maps:get(arcs, Graph, #{}, Opts), - Graph#{arcs => hb_maps:put({From, To, Label}, true, Arcs, Opts)}. -``` - -### extract_label - -Extract a label from a path - -```erlang -extract_label(Path) -> - case binary:split(Path, <<"/">>, [global]) of - [] -> Path; - Parts -> - FilteredParts = [P || P <- Parts, P /= <<>>], - case FilteredParts of - [] -> Path; - _ -> lists:last(FilteredParts) - end - end. -``` - -### graph_to_dot - -Generate the DOT file from the graph - -```erlang -graph_to_dot(Graph, Opts) -> - % Create graph header - Header = [ - <<"digraph filesystem {\n">>, - <<" node [shape=circle];\n">> - ], - % Create nodes section - Nodes = hb_maps:fold( - fun(ID, {Label, Color}, Acc) -> - [ - Acc, - io_lib:format( - <<" \"~s\" [label=\"~s\", color=~s, style=filled];~n">>, - [ID, hb_format:short_id(hb_util:bin(Label)), Color] - ) - ] - end, - [], - hb_maps:get(nodes, Graph, #{}, Opts), - Opts - ), - % Create arcs section - Arcs = hb_maps:fold( - fun({From, To, Label}, _, Acc) -> - [ - Acc, - io_lib:format( - <<" \"~s\" -> \"~s\" [label=\"~s\"];~n">>, - [From, To, hb_format:short_id(hb_util:bin(Label))] - ) - ] - end, - [], - hb_maps:get(arcs, Graph, #{}, Opts), - Opts - ), - % Create graph footer - Footer = <<"}\n">>, - % Combine all parts and convert to binary - iolist_to_binary([Header, Nodes, Arcs, Footer]). -``` - -### dot_to_svg - -Convert a dot graph to SVG format - -```erlang -dot_to_svg(DotInput) -> - % Create a port to the dot command - Port = open_port({spawn, "dot -Tsvg"}, [binary, use_stdio, stderr_to_stdout]), - % Send the dot content to the process - true = port_command(Port, iolist_to_binary(DotInput)), - % Get the SVG output - collect_output(Port, []). -``` - -### collect_output - -Helper function to collect output from port - -```erlang -collect_output(Port, Acc) -> - receive - {Port, {data, Data}} -> - case binary:part(Data, byte_size(Data) - 7, 7) of - <<"\n">> -> - port_close(Port), - iolist_to_binary(lists:reverse([Data | Acc])); - _ -> collect_output(Port, [Data | Acc]) - end; - {Port, eof} -> - port_close(Port), - iolist_to_binary(lists:reverse(Acc)) - after 10000 -> - {error, timeout} - end. -``` - -### get_graph_data - -Get graph data for the Three.js visualization - -```erlang -get_graph_data(Base, MaxSize, Opts) -> - % Try to generate graph using hb_cache_render - Graph = - try - % Use hb_cache_render to build the graph - cache_path_to_graph(Base, #{}, Opts) - catch - Error:Reason:Stack -> - ?event({hyperbuddy_graph_error, Error, Reason, Stack}), - #{nodes => #{}, arcs => #{}, visited => #{}} - end, - % Extract nodes and links for the visualization - NodesMap = maps:get(nodes, Graph, #{}), - ArcsMap = maps:get(arcs, Graph, #{}), - % Limit to top `MaxSize` nodes if there are too many - NodesList = - case maps:size(NodesMap) > MaxSize of - true -> - % Take a subset of nodes - {ReducedNodes, _} = lists:split( - MaxSize, - maps:to_list(NodesMap) - ), - ReducedNodes; - false -> - maps:to_list(NodesMap) - end, - % Get node IDs for filtering links - NodeIds = [ID || {ID, _} <- NodesList], - % Convert to JSON format for web visualization - Nodes = - [ - #{ - <<"id">> => ID, - <<"label">> => get_label(hb_util:bin(ID)), - <<"type">> => get_node_type(Color) - } - || - {ID, {_, Color}} <- NodesList - ], - % Filter links to only include those between nodes we're showing - FilteredLinks = - [ - {From, To, Label} - || - {From, To, Label} <- maps:keys(ArcsMap), - lists:member(From, NodeIds) - andalso lists:member(To, NodeIds) - ], - Links = - [ - #{ - <<"source">> => From, - <<"target">> => To, - <<"label">> => Label - } - || - {From, To, Label} <- FilteredLinks - ], - % Return the JSON data - JsonData = hb_json:encode(#{ <<"nodes">> => Nodes, <<"links">> => Links }), - {ok, #{ - <<"body">> => JsonData, - <<"content-type">> => <<"application/json">> - }}. -``` - -### get_node_type - -Convert node color from hb_cache_render to node type for visualization - -```erlang -get_node_type(Color) -> - case Color of - "lightblue" -> <<"simple">>; - "lightcoral" -> <<"composite">>; - _ -> <<"unknown">> - end. -``` - -### get_label - -Extract a readable label from a path - -```erlang -get_label(Path) -> - case binary:split(Path, <<"/">>, [global]) of - [] -> Path; - Parts -> - FilteredParts = [P || P <- Parts, P /= <<>>], - case FilteredParts of - [] -> Path; - _ -> lists:last(FilteredParts) - end - end. -``` - -### prepare_unsigned_data - -```erlang -prepare_unsigned_data() -> - Opts = #{ - store => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/render-fs">> - } - }, - Item = test_unsigned(#{ <<"key">> => <<"Simple unsigned data item">> }), - {ok, _Path} = hb_cache:write(Item, Opts). -``` - -### prepare_signed_data - -```erlang -prepare_signed_data() -> - Opts = #{ - store => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/render-fs">> - } - }, - Wallet = ar_wallet:new(), - Item = test_signed(#{ <<"l2-test-key">> => <<"l2-test-value">> }, Wallet), - %% Write the simple unsigned item - {ok, _Path} = hb_cache:write(Item, Opts). -``` - -### prepare_deeply_nested_complex_message - -```erlang -prepare_deeply_nested_complex_message() -> - Opts = #{ - store => #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/render-fs">> - } - }, - Wallet = ar_wallet:new(), - %% Create nested data - Level3SignedSubmessage = test_signed([1,2,3], Wallet), - Outer = - #{ - <<"level1">> => - hb_message:commit( - #{ - <<"level2">> => - #{ - <<"level3">> => Level3SignedSubmessage, - <<"e">> => <<"f">>, - <<"z">> => [1,2,3] - }, - <<"c">> => <<"d">>, - <<"g">> => [<<"h">>, <<"i">>], - <<"j">> => 1337 - }, - ar_wallet:new() - ), - <<"a">> => <<"b">> - }, - %% Write the nested item - {ok, _} = hb_cache:write(Outer, Opts). -``` - -### test_unsigned - -```erlang -test_unsigned(Data) -> - #{ - <<"base-test-key">> => <<"base-test-value">>, - <<"data">> => Data - }. -``` - -### test_signed - -```erlang -test_signed(Data, Wallet) -> -``` - ---- - -*Generated from [hb_cache_render.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_cache_render.erl)* diff --git a/docs/book/src/hb_client.erl.md b/docs/book/src/hb_client.erl.md deleted file mode 100644 index 0b206fcbe..000000000 --- a/docs/book/src/hb_client.erl.md +++ /dev/null @@ -1,244 +0,0 @@ -# hb_client - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_client.erl) - -AO-Core API and HyperBEAM Built-In Devices - ---- - -## Exported Functions - -- `add_route/3` -- `arweave_timestamp/0` -- `resolve/4` -- `routes/2` -- `upload/2` -- `upload/3` - ---- - -### resolve - -Resolve a message pair on a remote node. - -```erlang -resolve(Node, Msg1, Msg2, Opts) -> - TABM2 = - hb_ao:set( - #{ - <<"path">> => hb_ao:get(<<"path">>, Msg2, <<"/">>, Opts), - <<"2.path">> => unset - }, - prefix_keys(<<"2.">>, Msg2, Opts), - Opts#{ hashpath => ignore } - ), - hb_http:post( - Node, - hb_maps:merge(prefix_keys(<<"1.">>, Msg1, Opts), TABM2, Opts), - Opts - ). -``` - -### prefix_keys - -```erlang -prefix_keys(Prefix, Message, Opts) -> - hb_maps:fold( - fun(Key, Val, Acc) -> - hb_maps:put(<>, Val, Acc, Opts) - end, - #{}, - hb_message:convert(Message, tabm, Opts), - Opts - ). -``` - -### routes - -```erlang -routes(Node, Opts) -> - resolve(Node, - #{ - <<"device">> => <<"Router@1.0">> - }, - #{ - <<"path">> => <<"routes">>, - <<"method">> => <<"GET">> - }, - Opts - ). -``` - -### add_route - -```erlang -add_route(Node, Route, Opts) -> - resolve(Node, - Route#{ - <<"device">> => <<"Router@1.0">> - }, - #{ - <<"path">> => <<"routes">>, - <<"method">> => <<"POST">> - }, - Opts - ). -``` - -### arweave_timestamp - -Grab the latest block information from the Arweave gateway node. - -```erlang -arweave_timestamp() -> - case hb_opts:get(mode) of - debug -> {0, 0, hb_util:human_id(<<0:256>>)}; - prod -> - {ok, {{_, 200, _}, _, Body}} = - httpc:request( - <<(hb_opts:get(gateway))/binary, "/block/current">> - ), - Fields = hb_json:decode(hb_util:bin(Body)), - Timestamp = hb_maps:get(<<"timestamp">>, Fields), - Hash = hb_maps:get(<<"indep_hash">>, Fields), - Height = hb_maps:get(<<"height">>, Fields), - {Timestamp, Height, Hash} - end. -``` - -### upload - -Upload a data item to the bundler node. - -```erlang -upload(Msg, Opts) -> - UploadResults = - lists:map( - fun(Device) -> - upload(Msg, Opts, Device) - end, - hb_message:commitment_devices(Msg, Opts) - ), - {ok, UploadResults}. -``` - -### upload - -```erlang -upload(Msg, Opts, <<"httpsig@1.0">>) -> - case hb_opts:get(bundler_httpsig, not_found, Opts) of - not_found -> - {error, no_httpsig_bundler}; - Bundler -> - ?event({uploading_item, Msg}), - hb_http:post(Bundler, <<"/tx">>, Msg, Opts) - end; -``` - -### upload - -```erlang -upload(Msg, Opts, <<"ans104@1.0">>) when is_map(Msg) -> - ?event({msg_to_convert, Msg}), - Converted = hb_message:convert(Msg, <<"ans104@1.0">>, Opts), - ?event({msg_to_tx_res, {converted, Converted}}), - Serialized = ar_bundles:serialize(Converted), - ?event({converted_msg_to_tx, Serialized}), - upload(Serialized, Opts, <<"ans104@1.0">>); -``` - -### upload - -```erlang -upload(Serialized, Opts, <<"ans104@1.0">>) when is_binary(Serialized) -> - ?event({uploading_item, Serialized}), - hb_http:post( - hb_opts:get(bundler_ans104, not_found, Opts), - #{ - <<"path">> => <<"/tx">>, - <<"content-type">> => <<"application/octet-stream">>, - <<"body">> => Serialized - }, - Opts#{ - http_client => - hb_opts:get(bundler_ans104_http_client, httpc, Opts) - } - ). -``` - -### upload_empty_raw_ans104_test - -```erlang -upload_empty_raw_ans104_test() -> - Serialized = ar_bundles:serialize( - ar_bundles:sign_item(#tx{ - data = <<"TEST">> - }, hb:wallet()) - ), - ?event({uploading_item, Serialized}), - Result = upload(Serialized, #{}, <<"ans104@1.0">>), - ?event({upload_result, Result}), - ?assertMatch({ok, _}, Result). -``` - -### upload_raw_ans104_test - -```erlang -upload_raw_ans104_test() -> - Serialized = ar_bundles:serialize( - ar_bundles:sign_item(#tx{ - data = <<"TEST">>, - tags = [{<<"test-tag">>, <<"test-value">>}] - }, hb:wallet()) - ), - ?event({uploading_item, Serialized}), - Result = upload(Serialized, #{}, <<"ans104@1.0">>), - ?event({upload_result, Result}), - ?assertMatch({ok, _}, Result). -``` - -### upload_raw_ans104_with_anchor_test - -```erlang -upload_raw_ans104_with_anchor_test() -> - Serialized = ar_bundles:serialize( - ar_bundles:sign_item(#tx{ - data = <<"TEST">>, - anchor = crypto:strong_rand_bytes(32), - tags = [{<<"test-tag">>, <<"test-value">>}] - }, hb:wallet()) - ), - ?event({uploading_item, Serialized}), - Result = upload(Serialized, #{}, <<"ans104@1.0">>), - ?event({upload_result, Result}), - ?assertMatch({ok, _}, Result). -``` - -### upload_empty_message_test - -```erlang -upload_empty_message_test() -> - Msg = #{ <<"data">> => <<"TEST">> }, - Committed = hb_message:commit(Msg, hb:wallet(), <<"ans104@1.0">>), - Result = upload(Committed, #{}, <<"ans104@1.0">>), - ?event({upload_result, Result}), - ?assertMatch({ok, _}, Result). -``` - -### upload_single_layer_message_test - -```erlang -upload_single_layer_message_test() -> - Msg = #{ - <<"data">> => <<"TEST">>, - <<"basic">> => <<"value">>, - <<"integer">> => 1 - }, - Committed = hb_message:commit(Msg, hb:wallet(), <<"ans104@1.0">>), - Result = upload(Committed, #{}, <<"ans104@1.0">>), - ?event({upload_result, Result}), -``` - ---- - -*Generated from [hb_client.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_client.erl)* diff --git a/docs/book/src/hb_crypto.erl.md b/docs/book/src/hb_crypto.erl.md deleted file mode 100644 index 723ce586b..000000000 --- a/docs/book/src/hb_crypto.erl.md +++ /dev/null @@ -1,143 +0,0 @@ -# hb_crypto - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_crypto.erl) - -Implements the cryptographic functions and wraps the primitives -used in HyperBEAM. Abstracted such that this (extremely!) dangerous code -can be carefully managed. -HyperBEAM currently implements two hashpath algorithms: -* `sha-256-chain`: A simple chained SHA-256 hash. -* `accumulate-256`: A SHA-256 hash that chains the given IDs and accumulates - their values into a single commitment. -The accumulate algorithm is experimental and at this point only exists to -allow us to test multiple HashPath algorithms in HyperBEAM. - ---- - -## Exported Functions - -- `accumulate/1` -- `accumulate/2` -- `pbkdf2/5` -- `sha256_chain/2` -- `sha256/1` - ---- - -### sha256_chain - -Implements the cryptographic functions and wraps the primitives -Add a new ID to the end of a SHA-256 hash chain. - -```erlang -sha256_chain(ID1, ID2) when ?IS_ID(ID1) -> - sha256(<>); -``` - -### sha256_chain - -Implements the cryptographic functions and wraps the primitives -Add a new ID to the end of a SHA-256 hash chain. - -```erlang -sha256_chain(ID1, ID2) -> - throw({cannot_chain_bad_ids, ID1, ID2}). -``` - -### accumulate - -Accumulate two IDs, or a list of IDs, into a single commitment. This - -```erlang -accumulate(IDs) when is_list(IDs) -> - lists:foldl(fun accumulate/2, << 0:256 >>, IDs). -``` - -### accumulate - -```erlang -accumulate(ID1 = << ID1Int:256 >>, ID2 = << ID2Int:256 >>) - when (byte_size(ID1) =:= 32) and (byte_size(ID2) =:= 32) -> - << (ID1Int + ID2Int):256 >>; -``` - -### accumulate - -```erlang -accumulate(ID1, ID2) -> - throw({cannot_accumulate_bad_ids, ID1, ID2}). -``` - -### sha256 - -Wrap Erlang's `crypto:hash/2` to provide a standard interface. - -```erlang -sha256(Data) -> - crypto:hash(sha256, Data). -``` - -### pbkdf2 - -Wrap Erlang's `crypto:pbkdf2_hmac/5` to provide a standard interface. - -```erlang -pbkdf2(Alg, Password, Salt, Iterations, KeyLength) -> - case crypto:pbkdf2_hmac(Alg, Password, Salt, Iterations, KeyLength) of - Key when is_binary(Key) -> {ok, Key}; - {Tag, CFileInfo, Desc} -> - ?event( - {pbkdf2_error, - {tag, Tag}, - {desc, Desc}, - {c_file_info, CFileInfo} - } - ), - {error, Desc} - end. -``` - -### count_zeroes - -Count the number of leading zeroes in a bitstring. - -```erlang -count_zeroes(<<>>) -> - 0; -``` - -### count_zeroes - -Count the number of leading zeroes in a bitstring. - -```erlang -count_zeroes(<<0:1, Rest/bitstring>>) -> - 1 + count_zeroes(Rest); -``` - -### count_zeroes - -Count the number of leading zeroes in a bitstring. - -```erlang -count_zeroes(<<_:1, Rest/bitstring>>) -> - count_zeroes(Rest). -``` - -### sha256_chain_test - -Check that `sha-256-chain` correctly produces a hash matching - -```erlang -sha256_chain_test() -> - ID1 = <<1:256>>, - ID2 = <<2:256>>, - ID3 = sha256_chain(ID1, ID2), - HashBase = << ID1/binary, ID2/binary >>, - ?assertEqual(ID3, crypto:hash(sha256, HashBase)), - % Basic entropy check. -``` - ---- - -*Generated from [hb_crypto.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_crypto.erl)* diff --git a/docs/book/src/hb_debugger.erl.md b/docs/book/src/hb_debugger.erl.md deleted file mode 100644 index 42a1949d7..000000000 --- a/docs/book/src/hb_debugger.erl.md +++ /dev/null @@ -1,242 +0,0 @@ -# hb_debugger - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_debugger.erl) - -A module that provides bootstrapping interfaces for external debuggers -to connect to HyperBEAM. -The simplest way to utilize an external graphical debugger is to use the -`erlang-ls` extension for VS Code, Emacs, or other Language Server Protocol -(LSP) compatible editors. This repository contains a `launch.json` -configuration file for VS Code that can be used to spawn a new HyperBEAM, -attach the debugger to it, and execute the specified `Module:Function(Args)`. -Additionally, the node can be started with `rebar3 debugging` in order to -allow access to the console while also allowing the debugger to attach. -Boot time is approximately 10 seconds. - ---- - -## Exported Functions - -- `await_breakpoint/0` -- `profile_and_stop/1` -- `start_and_break/2` -- `start_and_break/3` -- `start_and_break/4` -- `start/0` - ---- - -### profile_and_stop - -A module that provides bootstrapping interfaces for external debuggers -Profile a function with eflame and stop the node. - -```erlang -profile_and_stop(Fun) -> - {ok, F} = file:open("profiling-output", [write]), - group_leader(F, self()), - io:format("profiling-output: started.~n"), - io:format("Profiling function: ~p.~n", [Fun]), - Res = - dev_profile:eval( - Fun, - #{ <<"return-mode">> => <<"open">>, <<"engine">> => <<"eflame">> }, - #{} - ), - io:format("Profiling complete. Res: ~p~n", [Res]), - init:stop(), - erlang:halt(). -``` - -### start - -```erlang -start() -> - io:format("Starting debugger...~n", []), - DebuggerRes = application:ensure_all_started(debugger), - io:format("Started debugger server. Result: ~p.~n", [DebuggerRes]), - io:format( - "Waiting for debugger. Node is: ~p. Cookie is: ~p.~n", - [node(), erlang:get_cookie()] - ), - await_debugger(). -``` - -### interpret - -Attempt to interpret a specified module to load it into the debugger. - -```erlang -interpret(Module) -> - Parent = self(), - spawn(fun() -> - case int:interpretable(Module) of - true -> - try Parent ! {interpreted, Module, int:i(Module) == ok} - catch _:_ -> - io:format("Could not load module: ~p.~n", [Module]), - false - end; - Error -> - io:format( - "Could not interpret module: ~p. Error: ~p.~n", - [Module, Error] - ), - false - end - end), - receive {interpreted, Module, Res} -> Res - after 250 -> false - end. -``` - -### interpret_modules - -Interpret modules from a list of atom prefixes. - -```erlang -interpret_modules(Prefixes) when is_binary(Prefixes) -> - interpret_modules(binary:split(Prefixes, <<",">>, [global, trim_all])); -``` - -### interpret_modules - -Interpret modules from a list of atom prefixes. - -```erlang -interpret_modules(Prefixes) when is_list(Prefixes) -> - RelevantModules = - lists:filter( - fun(Mod) -> - ModBin = hb_util:bin(Mod), - lists:any( - fun(Prefix) -> - PrefixBin = hb_util:bin(Prefix), - binary:longest_common_prefix([ModBin, PrefixBin]) == - byte_size(PrefixBin) - end, - Prefixes - ) - end, - hb_util:all_hb_modules() - ), - io:format("Relevant modules: ~p.~n", [RelevantModules]), - lists:foreach( - fun(Mod) -> - io:format("Interpreting module: ~p.~n", [Mod]), - interpret(Mod) - end, - RelevantModules - ), - RelevantModules. -``` - -### start_and_break - -A bootstrapping function to wait for an external debugger to be attached, - -```erlang -start_and_break(Module, Function) -> - start_and_break(Module, Function, [], []). -``` - -### start_and_break - -```erlang -start_and_break(Module, Function, Args) -> - start_and_break(Module, Function, Args, []). -``` - -### start_and_break - -```erlang -start_and_break(Module, Function, Args, DebuggerScope) -> - timer:sleep(1000), - spawn(fun() -> - start(), - interpret(Module), - interpret_modules(DebuggerScope), - SetRes = int:break_in(Module, Function, length(Args)), - io:format( - "Breakpoint set. Result from `int:break_in/3`: ~p.~n", - [SetRes] - ), - io:format("Invoking function...~n", []), - apply(Module, Function, Args), - io:format("Function invoked. Terminating.~n", []), - init:stop(), - erlang:halt() - end). -``` - -### await_debugger - -Await a debugger to be attached to the node. - -```erlang -await_debugger() -> await_debugger(0). -``` - -### await_debugger - -Await a debugger to be attached to the node. - -```erlang -await_debugger(N) -> - case is_debugging_node_connected() of - false -> - timer:sleep(1000), - io:format("Still waiting for debugger after ~p seconds...~n", [N]), - await_debugger(N + 1); - Node -> - io:format( - "External node connection detected. Peer: ~p.~n", - [Node] - ), - N - end. -``` - -### is_debugging_node_connected - -Is another Distributed Erlang node connected to us? - -```erlang -is_debugging_node_connected() -> - case nodes() ++ nodes(hidden) of - [] -> false; - [Node | _] -> Node - end. -``` - -### await_breakpoint - -Await a new breakpoint being set by the debugger. - -```erlang -await_breakpoint() -> - case is_debugging_node_connected() of - false -> start(); - _ -> do_nothing - end, - await_breakpoint(0). -``` - -### await_breakpoint - -```erlang -await_breakpoint(N) -> - io:format("Waiting for breakpoint to be set in function...~n", []), - case int:all_breaks() of - [] -> - timer:sleep(1000), - io:format("Still waiting for breakpoint after ~p seconds...~n", [N]), - await_breakpoint(N + 1); - [Breakpoint | _] -> - io:format("Breakpoint set. Info: ~p.~n", [Breakpoint]), - Breakpoint -``` - ---- - -*Generated from [hb_debugger.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_debugger.erl)* diff --git a/docs/book/src/hb_escape.erl.md b/docs/book/src/hb_escape.erl.md deleted file mode 100644 index 41ec69161..000000000 --- a/docs/book/src/hb_escape.erl.md +++ /dev/null @@ -1,394 +0,0 @@ -# hb_escape - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_escape.erl) - -Functions for escaping and unescaping mixed case values, for use in HTTP -headers. Both percent-encoding and escaping of double-quoted strings -(`"` => `\"`) are supported. -This is necessary for encodings of AO-Core messages for transmission in -HTTP/2 and HTTP/3, because uppercase header keys are explicitly disallowed. -While most map keys in HyperBEAM are normalized to lowercase, IDs are not. -Subsequently, we encode all header keys to lowercase %-encoded URI-style -strings because transmission. - ---- - -## Exported Functions - -- `decode_keys/2` -- `decode_quotes/1` -- `decode/1` -- `encode_keys/2` -- `encode_quotes/1` -- `encode/1` - ---- - -### encode - -Functions for escaping and unescaping mixed case values, for use in HTTP -Encode a binary as a URI-encoded string. - -```erlang -encode(Bin) when is_binary(Bin) -> - list_to_binary(percent_escape(binary_to_list(Bin))). -``` - -### decode - -Decode a URI-encoded string back to a binary. - -```erlang -decode(Bin) when is_binary(Bin) -> - list_to_binary(percent_unescape(binary_to_list(Bin))). -``` - -### encode_quotes - -Encode a string with escaped quotes. - -```erlang -encode_quotes(String) when is_binary(String) -> - list_to_binary(encode_quotes(binary_to_list(String))); -``` - -### encode_quotes - -Encode a string with escaped quotes. - -```erlang -encode_quotes([]) -> []; -``` - -### encode_quotes - -Encode a string with escaped quotes. - -```erlang -encode_quotes([$\" | Rest]) -> [$\\, $\" | encode_quotes(Rest)]; -``` - -### encode_quotes - -Encode a string with escaped quotes. -Decode a string with escaped quotes. - -```erlang -encode_quotes([C | Rest]) -> [C | encode_quotes(Rest)]. -``` - -### decode_quotes - -Encode a string with escaped quotes. -Decode a string with escaped quotes. - -```erlang -decode_quotes(String) when is_binary(String) -> - list_to_binary(decode_quotes(binary_to_list(String))); -``` - -### decode_quotes - -Encode a string with escaped quotes. -Decode a string with escaped quotes. - -```erlang -decode_quotes([]) -> []; -``` - -### decode_quotes - -Encode a string with escaped quotes. -Decode a string with escaped quotes. - -```erlang -decode_quotes([$\\, $\" | Rest]) -> [$\" | decode_quotes(Rest)]; -``` - -### decode_quotes - -Encode a string with escaped quotes. -Decode a string with escaped quotes. - -```erlang -decode_quotes([$\" | Rest]) -> decode_quotes(Rest); -``` - -### decode_quotes - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. - -```erlang -decode_quotes([C | Rest]) -> [C | decode_quotes(Rest)]. -``` - -### decode_keys - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. - -```erlang -decode_keys(Msg, Opts) when is_map(Msg) -> - hb_maps:from_list( - lists:map( - fun({Key, Value}) -> {decode(Key), Value} end, - hb_maps:to_list(Msg, Opts) - ) - ); -``` - -### decode_keys - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. -URI encode keys in the base layer of a message. Does not recurse. - -```erlang -decode_keys(Other, _Opts) -> Other. -``` - -### encode_keys - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. -URI encode keys in the base layer of a message. Does not recurse. - -```erlang -encode_keys(Msg, Opts) when is_map(Msg) -> - hb_maps:from_list( - lists:map( - fun({Key, Value}) -> {encode(Key), Value} end, - hb_maps:to_list(Msg, Opts) - ) - ); -``` - -### encode_keys - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. -URI encode keys in the base layer of a message. Does not recurse. -Escape a list of characters as a URI-encoded string. - -```erlang -encode_keys(Other, _Opts) -> Other. -``` - -### percent_escape - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. -URI encode keys in the base layer of a message. Does not recurse. -Escape a list of characters as a URI-encoded string. - -```erlang -percent_escape([]) -> []; -``` - -### percent_escape - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. -URI encode keys in the base layer of a message. Does not recurse. -Escape a list of characters as a URI-encoded string. - -```erlang -percent_escape([C | Cs]) when C >= $a, C =< $z -> [C | percent_escape(Cs)]; -``` - -### percent_escape - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. -URI encode keys in the base layer of a message. Does not recurse. -Escape a list of characters as a URI-encoded string. - -```erlang -percent_escape([C | Cs]) when C >= $0, C =< $9 -> [C | percent_escape(Cs)]; -``` - -### percent_escape - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. -URI encode keys in the base layer of a message. Does not recurse. -Escape a list of characters as a URI-encoded string. - -```erlang -percent_escape([C | Cs]) when - C == $.; C == $-; C == $_; C == $/; - C == $?; C == $& -> - [C | percent_escape(Cs)]; -``` - -### percent_escape - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. -URI encode keys in the base layer of a message. Does not recurse. -Escape a list of characters as a URI-encoded string. -Escape a single byte as a URI-encoded string. - -```erlang -percent_escape([C | Cs]) -> [escape_byte(C) | percent_escape(Cs)]. -``` - -### escape_byte - -Encode a string with escaped quotes. -Decode a string with escaped quotes. -Return a message with all of its keys decoded. -URI encode keys in the base layer of a message. Does not recurse. -Escape a list of characters as a URI-encoded string. -Escape a single byte as a URI-encoded string. - -```erlang -escape_byte(C) when C >= 0, C =< 255 -> - [$%, hex_digit(C bsr 4), hex_digit(C band 15)]. -``` - -### hex_digit - -```erlang -hex_digit(N) when N >= 0, N =< 9 -> - N + $0; -``` - -### hex_digit - -```erlang -hex_digit(N) when N > 9, N =< 15 -> - N + $a - 10. -``` - -### percent_unescape - -Unescape a URI-encoded string. - -```erlang -percent_unescape([$%, H1, H2 | Cs]) -> - Byte = (hex_value(H1) bsl 4) + hex_value(H2), - [Byte | percent_unescape(Cs)]; -``` - -### percent_unescape - -Unescape a URI-encoded string. - -```erlang -percent_unescape([C | Cs]) -> - [C | percent_unescape(Cs)]; -``` - -### percent_unescape - -Unescape a URI-encoded string. - -```erlang -percent_unescape([]) -> - []. -``` - -### hex_value - -```erlang -hex_value(C) when C >= $0, C =< $9 -> - C - $0; -``` - -### hex_value - -```erlang -hex_value(C) when C >= $a, C =< $f -> - C - $a + 10; -``` - -### hex_value - -```erlang -hex_value(C) when C >= $A, C =< $F -> - C - $A + 10. -``` - -### escape_unescape_identity_test - -```erlang -escape_unescape_identity_test() -> - % Test that unescape(escape(X)) == X for various inputs - TestCases = [ - <<"hello">>, - <<"hello, world!">>, - <<"hello+list">>, - <<"special@chars#here">>, - <<"UPPERCASE">>, - <<"MixedCASEstring">>, - <<"12345">>, - <<>> % Empty string - ], - ?event(parsing, - {escape_unescape_identity_test, - {test_cases, - [ - {Case, {explicit, encode(Case)}} - || - Case <- TestCases - ] - } - } - ), - lists:foreach(fun(TestCase) -> - ?assertEqual(TestCase, decode(encode(TestCase))) - end, TestCases). -``` - -### unescape_specific_test - -```erlang -unescape_specific_test() -> - % Test specific unescape cases - ?assertEqual(<<"a">>, decode(<<"%61">>)), - ?assertEqual(<<"A">>, decode(<<"%41">>)), - ?assertEqual(<<"!">>, decode(<<"%21">>)), - ?assertEqual(<<"hello, World!">>, decode(<<"hello%2c%20%57orld%21">>)), - ?assertEqual(<<"/">>, decode(<<"%2f">>)), - ?assertEqual(<<"?">>, decode(<<"%3f">>)). -``` - -### uppercase_test - -```erlang -uppercase_test() -> - % Test uppercase characters are properly escaped - ?assertEqual(<<"%41">>, encode(<<"A">>)), - ?assertEqual(<<"%42">>, encode(<<"B">>)), - ?assertEqual(<<"%5a">>, encode(<<"Z">>)), - ?assertEqual(<<"hello%20%57orld">>, encode(<<"hello World">>)), - ?assertEqual(<<"test%41%42%43">>, encode(<<"testABC">>)). -``` - -### escape_unescape_special_chars_test - -```erlang -escape_unescape_special_chars_test() -> - % Test characters that should be escaped - SpecialChars = [ - $@, $#, $", $$, $%, $&, $', $(, $), $*, $+, $,, $/, $:, $;, - $<, $=, $>, $?, $[, $\\, $], $^, $`, ${, $|, $}, $~, $\s - ], - TestString = list_to_binary(SpecialChars), -``` - ---- - -*Generated from [hb_escape.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_escape.erl)* diff --git a/docs/book/src/hb_event.erl.md b/docs/book/src/hb_event.erl.md deleted file mode 100644 index 6df4e3b70..000000000 --- a/docs/book/src/hb_event.erl.md +++ /dev/null @@ -1,505 +0,0 @@ -# hb_event - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_event.erl) - -Wrapper for incrementing prometheus counters. - ---- - -## Exported Functions - -- `counters/0` -- `diff/1` -- `diff/2` -- `increment_callers/1` -- `increment/3` -- `increment/4` -- `log/1` -- `log/2` -- `log/3` -- `log/4` -- `log/5` -- `log/6` - ---- - -### log - -Wrapper for incrementing prometheus counters. - -```erlang -log(_X) -> ok. -``` - -### log - -Wrapper for incrementing prometheus counters. - -```erlang -log(_Topic, _X) -> ok. -``` - -### log - -Wrapper for incrementing prometheus counters. - -```erlang -log(_Topic, _X, _Mod) -> ok. -``` - -### log - -Wrapper for incrementing prometheus counters. - -```erlang -log(_Topic, _X, _Mod, _Func) -> ok. -``` - -### log - -Wrapper for incrementing prometheus counters. - -```erlang -log(_Topic, _X, _Mod, _Func, _Line) -> ok. -``` - -### log - -Wrapper for incrementing prometheus counters. - -```erlang -log(_Topic, _X, _Mod, _Func, _Line, _Opts) -> ok. --else. -``` - -### log - -Debugging log logging function. For now, it just prints to standard - -```erlang -log(X) -> log(global, X). -``` - -### log - -Debugging log logging function. For now, it just prints to standard - -```erlang -log(Topic, X) -> log(Topic, X, ""). -``` - -### log - -Debugging log logging function. For now, it just prints to standard - -```erlang -log(Topic, X, Mod) -> log(Topic, X, Mod, undefined). -``` - -### log - -Debugging log logging function. For now, it just prints to standard - -```erlang -log(Topic, X, Mod, Func) -> log(Topic, X, Mod, Func, undefined). -``` - -### log - -Debugging log logging function. For now, it just prints to standard - -```erlang -log(Topic, X, Mod, Func, Line) -> log(Topic, X, Mod, Func, Line, #{}). -``` - -### log - -Debugging log logging function. For now, it just prints to standard - -```erlang -log(Topic, X, Mod, undefined, Line, Opts) -> log(Topic, X, Mod, "", Line, Opts); -``` - -### log - -Debugging log logging function. For now, it just prints to standard - -```erlang -log(Topic, X, Mod, Func, undefined, Opts) -> log(Topic, X, Mod, Func, "", Opts); -``` - -### log - -Debugging log logging function. For now, it just prints to standard - -```erlang -log(Topic, X, Mod, Func, Line, Opts) -> - % Check if the debug_print option has the topic in it if set. -``` - -### should_print - -Determine if the topic should be printed. Uses a cache in the process - -```erlang -should_print(Topic, Opts) -> - case erlang:get({event_print, Topic}) of - {cached, X} -> X; - undefined -> - Result = - case hb_opts:get(debug_print, false, Opts) of - EventList when is_list(EventList) -> - lists:member(Topic, EventList); - true -> true; - false -> false - end, - erlang:put({event_print, Topic}, {cached, Result}), - Result - end. -``` - -### handle_tracer - -```erlang -handle_tracer(Topic, X, Opts) -> - AllowedTopics = [http, ao_result], - case lists:member(Topic, AllowedTopics) of - true -> - case hb_opts:get(trace, undefined, Opts) of - undefined -> - case tuple_to_list(X) of - [_ | Rest] -> - try - Map = maps:from_list(Rest), - TopicOpts = hb_opts:get(opts, #{}, Map), - case hb_opts:get(trace, undefined, TopicOpts) of - undefined -> ok; - TracePID -> - hb_tracer:record_step(TracePID, {Topic, X}) - end - catch - _:_ -> ok - end; - _ -> - ok - end; - TracePID -> hb_tracer:record_step(TracePID, {Topic, X}) - end; - _ -> ok - end. -``` - -### increment - -Increment the counter for the given topic and message. Registers the - -```erlang -increment(Topic, Message, Opts) -> - increment(Topic, Message, Opts, 1). -``` - -### increment - -```erlang -increment(global, _Message, _Opts, _Count) -> ignored; -``` - -### increment - -```erlang -increment(ao_core, _Message, _Opts, _Count) -> ignored; -``` - -### increment - -```erlang -increment(ao_internal, _Message, _Opts, _Count) -> ignored; -``` - -### increment - -```erlang -increment(ao_devices, _Message, _Opts, _Count) -> ignored; -``` - -### increment - -```erlang -increment(ao_subresolution, _Message, _Opts, _Count) -> ignored; -``` - -### increment - -```erlang -increment(signature_base, _Message, _Opts, _Count) -> ignored; -``` - -### increment - -```erlang -increment(id_base, _Message, _Opts, _Count) -> ignored; -``` - -### increment - -```erlang -increment(parsing, _Message, _Opts, _Count) -> ignored; -``` - -### increment - -```erlang -increment(Topic, Message, _Opts, Count) -> - case parse_name(Message) of - <<"debug", _/binary>> -> ignored; - EventName -> - TopicBin = parse_name(Topic), - case find_event_server() of - Pid when is_pid(Pid) -> - Pid ! {increment, TopicBin, EventName, Count}; - undefined -> - PID = spawn(fun() -> server() end), - hb_name:register(?MODULE, PID), - PID ! {increment, TopicBin, EventName, Count} - end - end. -``` - -### increment_callers - -Increment the call paths and individual upstream calling functions of - -```erlang -increment_callers(Topic) -> - increment_callers(Topic, erlang). -``` - -### increment_callers - -```erlang -increment_callers(Topic, Type) -> - BinTopic = hb_util:bin(Topic), - increment( - <>, - hb_format:trace_short(Type), - #{} - ), - lists:foreach( - fun(Caller) -> - increment(<>, Caller, #{}) - end, - hb_format:trace_to_list(hb_format:get_trace(Type)) - ). -``` - -### counters - -Return a message containing the current counter values for all logged - -```erlang -counters() -> - UnaggregatedCounts = - [ - {Group, Name, Count} - || - {{default, <<"event">>, [Group, Name], _}, Count, _} <- raw_counters() - ], - lists:foldl( - fun({Group, Name, Count}, Acc) -> - Acc#{ - Group => (maps:get(Group, Acc, #{}))#{ - Name => maps:get(Name, maps:get(Group, Acc, #{}), 0) + Count - } - } - end, - #{}, - UnaggregatedCounts - ). -``` - -### diff - -Return the change in the event counters before and after executing the - -```erlang -diff(Fun) -> - diff(Fun, #{}). -``` - -### diff - -```erlang -diff(Fun, Opts) -> - EventsBefore = counters(), - Res = Fun(), - EventsAfter = counters(), - {hb_message:diff(EventsBefore, EventsAfter, Opts), Res}. -``` - -### raw_counters - -```erlang -raw_counters() -> - []. -``` - -### raw_counters - -```erlang -raw_counters() -> - ets:tab2list(prometheus_counter_table). -``` - -### find_event_server - -Find the event server, creating it if it doesn't exist. We cache the - -```erlang -find_event_server() -> - case erlang:get({event_server, ?MODULE}) of - {cached, Pid} -> Pid; - undefined -> - PID = - case hb_name:lookup(?MODULE) of - Pid when is_pid(Pid) -> Pid; - undefined -> - NewServer = spawn(fun() -> server() end), - hb_name:register(?MODULE, NewServer), - NewServer - end, - erlang:put({event_server, ?MODULE}, {cached, PID}), - PID - end. -``` - -### server - -```erlang -server() -> - await_prometheus_started(), - prometheus_counter:declare( - [ - {name, <<"event">>}, - {help, <<"AO-Core execution events">>}, - {labels, [topic, event]} - ]), - handle_events(). -``` - -### handle_events - -```erlang -handle_events() -> - receive - {increment, TopicBin, EventName, Count} -> - case erlang:process_info(self(), message_queue_len) of - {message_queue_len, Len} when Len > ?OVERLOAD_QUEUE_LENGTH -> - % Print a warning, but do so less frequently the more - % overloaded the system is. -``` - -### await_prometheus_started - -Delay the event server until prometheus is started. - -```erlang -await_prometheus_started() -> - receive - Msg -> - case application:get_application(prometheus) of - undefined -> await_prometheus_started(); - _ -> self() ! Msg, ok - end - end. -``` - -### parse_name - -```erlang -parse_name(Name) when is_tuple(Name) -> - parse_name(element(1, Name)); -``` - -### parse_name - -```erlang -parse_name(Name) when is_atom(Name) -> - atom_to_binary(Name, utf8); -``` - -### parse_name - -```erlang -parse_name(Name) when is_binary(Name) -> - Name; -``` - -### parse_name - -```erlang -parse_name(Name) when is_list(Name) -> - iolist_to_binary(Name); -``` - -### parse_name - -Benchmark the performance of a full log of an event. - -```erlang -parse_name(_) -> no_event_name. -%%% Benchmark tests -``` - -### benchmark_event_test - -Benchmark the performance of a full log of an event. - -```erlang -benchmark_event_test() -> - Iterations = - hb_test_utils:benchmark( - fun() -> - log(test_module, {test, 1}) - end - ), - hb_test_utils:benchmark_print(<<"Recorded">>, <<"events">>, Iterations), - ?assert(Iterations >= 1000), - ok. -``` - -### benchmark_print_lookup_test - -Benchmark the performance of looking up whether a topic and module - -```erlang -benchmark_print_lookup_test() -> - DefaultOpts = hb_opts:default_message_with_env(), - Iterations = - hb_test_utils:benchmark( - fun() -> - should_print(test_module, DefaultOpts) - orelse should_print(test_event, DefaultOpts) - end - ), - hb_test_utils:benchmark_print(<<"Looked-up">>, <<"topics">>, Iterations), - ?assert(Iterations >= 1000), - ok. -``` - -### benchmark_increment_test - -Benchmark the performance of incrementing an event. - -```erlang -benchmark_increment_test() -> - Iterations = - hb_test_utils:benchmark( - fun() -> increment(test_module, {test, 1}, #{}) end - ), - hb_test_utils:benchmark_print(<<"Incremented">>, <<"events">>, Iterations), - ?assert(Iterations >= 1000), -``` - ---- - -*Generated from [hb_event.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_event.erl)* diff --git a/docs/book/src/hb_examples.erl.md b/docs/book/src/hb_examples.erl.md deleted file mode 100644 index def3663df..000000000 --- a/docs/book/src/hb_examples.erl.md +++ /dev/null @@ -1,227 +0,0 @@ -# hb_examples - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_examples.erl) - -This module contains end-to-end tests for Hyperbeam, accessing through -the HTTP interface. As well as testing the system, you can use these tests -as examples of how to interact with HyperBEAM nodes. - ---- - -### relay_with_payments_test_ - -This module contains end-to-end tests for Hyperbeam, accessing through -Start a node running the simple pay meta device, and use it to relay - -```erlang -relay_with_payments_test_() -> - {timeout, 30, fun relay_with_payments_test/0}. -``` - -### relay_with_payments_test - -```erlang -relay_with_payments_test() -> - HostWallet = ar_wallet:new(), - ClientWallet = ar_wallet:new(), - ClientAddress = hb_util:human_id(ar_wallet:to_address(ClientWallet)), - % Start a node with the simple-pay device enabled. -``` - -### paid_wasm_test_ - -Gain signed WASM responses from a node and verify them. - -```erlang -paid_wasm_test_() -> - {timeout, 30, fun paid_wasm/0}. -``` - -### paid_wasm - -```erlang -paid_wasm() -> - HostWallet = ar_wallet:new(), - ClientWallet = ar_wallet:new(), - ClientAddress = hb_util:human_id(ar_wallet:to_address(ClientWallet)), - ProcessorMsg = - #{ - <<"device">> => <<"p4@1.0">>, - <<"ledger-device">> => <<"simple-pay@1.0">>, - <<"pricing-device">> => <<"simple-pay@1.0">> - }, - HostNode = - hb_http_server:start_node( - Opts = #{ - store => [ - #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - } - ], - simple_pay_ledger => #{ ClientAddress => 100 }, - simple_pay_price => 10, - operator => ar_wallet:to_address(HostWallet), - on => #{ - <<"request">> => ProcessorMsg, - <<"response">> => ProcessorMsg - } - } - ), - % Read the WASM file from disk, post it to the host and execute it. -``` - -### create_schedule_aos2_test_disabled - -```erlang -create_schedule_aos2_test_disabled() -> - % The legacy process format, according to the ao.tn.1 spec: - % Data-Protocol The name of the Data-Protocol for this data-item 1-1 ao - % Variant The network version that this data-item is for 1-1 ao.TN.1 - % Type Indicates the shape of this Data-Protocol data-item 1-1 Process - % Module Links the process to ao module using the module's unique - % Transaction ID (TXID). 1-1 {TXID} - % Scheduler Specifies the scheduler unit by Wallet Address or Name, and can - % be referenced by a recent Scheduler-Location. 1-1 {ADDRESS} - % Cron-Interval An interval at which a particular Cron Message is recevied by the process, - % in the format X-Y, where X is a scalar value, and Y is milliseconds, - % seconds, minutes, hours, days, months, years, or blocks 0-n 1-second - % Cron-Tag-{Name} defines tags for Cron Messages at set intervals, - % specifying relevant metadata. 0-1 - % Memory-Limit Overrides maximum memory, in megabytes or gigabytes, set by - % Module, can not exceed modules setting 0-1 16-mb - % Compute-Limit Caps the compute cycles for a module per evaluation, ensuring - % efficient, controlled execution 0-1 1000 - % Pushed-For Message TXID that this Process is pushed as a result 0-1 {TXID} - % Cast Sets message handling: 'True' for do not push, 'False' for normal - % pushing 0-1 {True or False} - % Authority Defines a trusted wallet address which can send Messages to - % the Process 0-1 {ADDRESS} - % On-Boot Defines a startup script to run when the process is spawned. If - % value "Data" it uses the Data field of the Process Data Item. If it is a - % TXID it will load that TX from Arweave and execute it. 0-1 {Data or TXID} - % {Any-Tags} Custom Tags specific for the initial input of the Process 0-n - Node = - try hb_http_server:start_node(#{ priv_wallet => hb:wallet() }) - catch - _:_ -> - <<"http://localhost:8734">> - end, - ProcMsg = #{ - <<"data-protocol">> => <<"ao">>, - <<"type">> => <<"Process">>, - <<"variant">> => <<"ao.TN.1">>, - <<"type">> => <<"Process">>, - <<"module">> => <<"bkjb55i07GUCUSWROtKK4HU1mBS_X0TyH3M5jMV6aPg">>, - <<"scheduler">> => hb_util:human_id(hb:address()), - <<"memory-limit">> => <<"1024-mb">>, - <<"compute-limit">> => <<"10000000">>, - <<"authority">> => hb_util:human_id(hb:address()), - <<"scheduler-location">> => hb_util:human_id(hb:address()) - }, - Wallet = hb:wallet(), - SignedProc = hb_message:commit(ProcMsg, Wallet), - IDNone = hb_message:id(SignedProc, none), - IDAll = hb_message:id(SignedProc, all), - {ok, Res} = schedule(SignedProc, IDNone, Wallet, Node), - ?event({res, Res}), - receive after 100 -> ok end, - ?event({id, IDNone, IDAll}), - {ok, Res2} = hb_http:get( - Node, - <<"/~scheduler@1.0/slot?target=", IDNone/binary>>, - #{} - ), - ?assertMatch(Slot when Slot >= 0, hb_ao:get(<<"at-slot">>, Res2, #{})). -``` - -### schedule - -```erlang -schedule(ProcMsg, Target) -> - schedule(ProcMsg, Target, hb:wallet()). -``` - -### schedule - -```erlang -schedule(ProcMsg, Target, Wallet) -> - schedule(ProcMsg, Target, Wallet, <<"http://localhost:8734">>). -``` - -### schedule - -```erlang -schedule(ProcMsg, Target, Wallet, Node) -> - SignedReq = - hb_message:commit( - #{ - <<"path">> => <<"/~scheduler@1.0/schedule">>, - <<"target">> => Target, - <<"body">> => ProcMsg - }, - Wallet - ), - ?event({signed_req, SignedReq}), - hb_http:post(Node, SignedReq, #{}). -``` - -### relay_schedule_ans104_test - -Test that we can schedule an ANS-104 data item on a relayed node. The - -```erlang -relay_schedule_ans104_test() -> - SchedulerWallet = ar_wallet:new(), - ComputeWallet = ar_wallet:new(), - RelayWallet = ar_wallet:new(), - ?event(debug_test, - {wallets, - {scheduler, hb_util:human_id(SchedulerWallet)}, - {compute, hb_util:human_id(ComputeWallet)}, - {relay, hb_util:human_id(RelayWallet)} - } - ), - Scheduler = - hb_http_server:start_node( - #{ - on => #{ - <<"start">> => #{ - <<"device">> => <<"scheduler@1.0">>, - <<"path">> => <<"location">>, - <<"method">> => <<"POST">>, - <<"target">> => <<"self">>, - <<"require-codec">> => <<"ans104@1.0">>, - <<"hook">> => #{ - <<"result">> => <<"ignore">>, - <<"commit-request">> => true - } - } - }, - store => [hb_test_utils:test_store()], - priv_wallet => SchedulerWallet - } - ), - ?event(debug_test, {scheduler, Scheduler}), - Compute = - hb_http_server:start_node( - #{ - priv_wallet => ComputeWallet, - store => - [ - ComputeStore = hb_test_utils:test_store(), - #{ - <<"store-module">> => hb_store_remote_node, - <<"name">> => <<"cache-TEST/remote-node">>, - <<"node">> => Scheduler - } - ] - } - ), - % Get the scheduler location of the scheduling node and write it to the - % compute node's store. -``` - ---- - -*Generated from [hb_examples.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_examples.erl)* diff --git a/docs/book/src/hb_features.erl.md b/docs/book/src/hb_features.erl.md deleted file mode 100644 index 6f49a16b2..000000000 --- a/docs/book/src/hb_features.erl.md +++ /dev/null @@ -1,132 +0,0 @@ -# hb_features - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_features.erl) - -A module that exports a list of feature flags that the node supports -using the `-ifdef` macro. -As a consequence, this module acts as a proxy of information between the -build system and the runtime execution environment. - ---- - -## Exported Functions - -- `all/0` -- `eflame/0` -- `enabled/1` -- `genesis_wasm/0` -- `http3/0` -- `rocksdb/0` -- `test/0` - ---- - -### all - -A module that exports a list of feature flags that the node supports -Returns a list of all feature flags that the node supports. - -```erlang -all() -> - Features = - lists:filtermap( - fun({Name, _}) -> - case lists:member(Name, [all, enabled, module_info]) of - true -> false; - false -> {true, Name} - end - end, - ?MODULE:module_info(exports) - ), - hb_maps:from_list( - lists:map( - fun(Name) -> - {Name, ?MODULE:Name()} - end, - Features - ) - ). -``` - -### enabled - -Returns true if the feature flag is enabled. - -```erlang -enabled(Feature) -> - hb_maps:get(Feature, all(), false). -``` - -### http3 - -```erlang -http3() -> true. --else. -``` - -### http3 - -```erlang -http3() -> false. --endif. -``` - -### rocksdb - -```erlang -rocksdb() -> true. --else. -``` - -### rocksdb - -```erlang -rocksdb() -> false. --endif. -``` - -### genesis_wasm - -```erlang -genesis_wasm() -> true. --else. -``` - -### genesis_wasm - -```erlang -genesis_wasm() -> false. --endif. -``` - -### eflame - -```erlang -eflame() -> true. --else. -``` - -### eflame - -```erlang -eflame() -> false. --endif. -``` - -### test - -```erlang -test() -> true. --else. -``` - -### test - -```erlang -test() -> false. --endif. -``` - ---- - -*Generated from [hb_features.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_features.erl)* diff --git a/docs/book/src/hb_format.erl.md b/docs/book/src/hb_format.erl.md deleted file mode 100644 index 89462d1b4..000000000 --- a/docs/book/src/hb_format.erl.md +++ /dev/null @@ -1,1201 +0,0 @@ -# hb_format - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_format.erl) - -Formatting and debugging utilities for HyperBEAM. -This module provides text formatting capabilities for debugging output, -message pretty-printing, stack trace formatting, and human-readable -representations of binary data and cryptographic identifiers. -The functions in this module are primarily used for development and -debugging purposes, supporting the logging and diagnostic infrastructure -throughout the HyperBEAM system. - ---- - -## Exported Functions - -- `binary/1` -- `error/2` -- `escape_format/1` -- `eunit_print/2` -- `get_trace/1` -- `indent_lines/2` -- `indent/2` -- `indent/3` -- `indent/4` -- `maybe_multiline/3` -- `message/1` -- `message/2` -- `message/3` -- `print_trace_short/4` -- `print_trace/4` -- `print/1` -- `print/3` -- `print/4` -- `print/5` -- `remove_leading_noise/1` -- `remove_noise/1` -- `remove_trailing_noise/1` -- `short_id/1` -- `term/1` -- `term/2` -- `term/3` -- `trace_macro_helper/5` -- `trace_short/0` -- `trace_short/1` -- `trace_to_list/1` -- `trace/1` - ---- - -### print - -Formatting and debugging utilities for HyperBEAM. -Print a message to the standard error stream, prefixed by the amount - -```erlang -print(X) -> - print(X, <<>>, #{}). -``` - -### print - -```erlang -print(X, Info, Opts) -> - io:format( - standard_error, - "=== HB DEBUG ===~s==>~n~s~n", - [Info, term(X, Opts, 0)] - ), - X. -``` - -### print - -```erlang -print(X, Mod, Func, LineNum) -> - print(X, format_debug_trace(Mod, Func, LineNum, #{}), #{}). -``` - -### print - -```erlang -print(X, Mod, Func, LineNum, Opts) -> - Now = erlang:system_time(millisecond), - Last = erlang:put(last_debug_print, Now), - TSDiff = case Last of undefined -> 0; _ -> Now - Last end, - Info = - hb_util:bin( - io_lib:format( - "[~pms in ~s @ ~s]", - [ - TSDiff, - case server_id() of - undefined -> hb_util:bin(io_lib:format("~p", [self()])); - ServerID -> - hb_util:bin( - io_lib:format( - "~s (~p)", - [short_id(ServerID), self()] - ) - ) - end, - format_debug_trace(Mod, Func, LineNum, Opts) - ] - ) - ), - print(X, Info, Opts). -``` - -### server_id - -Retreive the server ID of the calling process, if known. - -```erlang -server_id() -> - server_id(#{ server_id => undefined }). -``` - -### server_id - -```erlang -server_id(Opts) -> - case hb_opts:get(server_id, undefined, Opts) of - undefined -> get(server_id); - ServerID -> ServerID - end. -``` - -### format_debug_trace - -Generate the appropriate level of trace for a given call. - -```erlang -format_debug_trace(Mod, Func, Line, Opts) -> - case hb_opts:get(debug_print_trace, false, #{}) of - short -> - Trace = - case hb_opts:get(debug_trace_type, erlang, Opts) of - erlang -> get_trace(erlang); - ao -> - % If we are printing AO-Core traces, we add the module - % and line number to the end to show exactly where in - % the handler-flow the event arose. -``` - -### term - -Convert a term to a string for debugging print purposes. - -```erlang -term(X) -> term(X, #{}). -``` - -### term - -Convert a term to a string for debugging print purposes. - -```erlang -term(X, Opts) -> term(X, Opts, 0). -``` - -### term - -Convert a term to a string for debugging print purposes. - -```erlang -term(X, Opts, Indent) -> - try do_debug_fmt(X, Opts, Indent) - catch A:B:C -> - Mode = hb_opts:get(mode, prod, Opts), - PrintFailPreference = hb_opts:get(debug_print_fail_mode, quiet, Opts), - case {Mode, PrintFailPreference} of - {debug, quiet} -> - indent("[!Format failed!] ~p", [X], Opts, Indent); - {debug, _} -> - indent( - "[PRINT FAIL:] ~80p~n===== PRINT ERROR WAS ~p:~p =====~n~s", - [ - X, - A, - B, - hb_util:bin( - format_trace( - C, - hb_opts:get(stack_print_prefixes, [], #{}) - ) - ) - ], - Opts, - Indent - ); - _ -> - indent("[!Format failed!]", [], Opts, Indent) - end - end. -``` - -### do_debug_fmt - -```erlang -do_debug_fmt( - { { {rsa, _PublicExpnt1}, _Priv1, _Priv2 }, - { {rsa, _PublicExpnt2}, Pub } - }, - Opts, Indent -) -> - format_address(Pub, Opts, Indent); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt( - { AtomValue, - { - { {rsa, _PublicExpnt1}, _Priv1, _Priv2 }, - { {rsa, _PublicExpnt2}, Pub } - } - }, - Opts, Indent -) -> - AddressString = format_address(Pub, Opts, Indent), - indent("~p: ~s", [AtomValue, AddressString], Opts, Indent); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt({explicit, X}, Opts, Indent) -> - indent("[Explicit:] ~p", [X], Opts, Indent); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt({string, X}, Opts, Indent) -> - indent("~s", [X], Opts, Indent); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt({trace, Trace}, Opts, Indent) -> - indent("~n~s", [trace(Trace)], Opts, Indent); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt({as, undefined, Msg}, Opts, Indent) -> - "\n" ++ indent("Subresolve => ", [], Opts, Indent) ++ - maybe_multiline(Msg, Opts, Indent + 1); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt({as, DevID, Msg}, Opts, Indent) -> - "\n" ++ indent("Subresolve as ~s => ", [DevID], Opts, Indent) ++ - maybe_multiline(Msg, Opts, Indent + 1); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt({X, Y}, Opts, Indent) when is_atom(X) and is_atom(Y) -> - indent("~p: ~p", [X, Y], Opts, Indent); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt({X, Y}, Opts, Indent) when is_record(Y, tx) -> - indent("~p: [TX item]~n~s", - [X, ar_bundles:format(Y, Indent + 1, Opts)], - Opts, - Indent - ); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt({X, Y}, Opts, Indent) when is_map(Y); is_list(Y) -> - Formatted = maybe_multiline(Y, Opts, Indent + 1), - indent( - case is_binary(X) of - true -> "~s"; - false -> "~p" - end ++ "~s", - [ - X, - case is_multiline(Formatted) of - true -> " ==>" ++ Formatted; - false -> ": " ++ Formatted - end - ], - Opts, - Indent - ); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt({X, Y}, Opts, Indent) -> - indent( - "~s: ~s", - [ - remove_leading_noise(term(X, Opts, Indent)), - remove_leading_noise(term(Y, Opts, Indent)) - ], - Opts, - Indent - ); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt(TX, Opts, Indent) when is_record(TX, tx) -> - indent("[TX item]~n~s", - [ar_bundles:format(TX, Indent, Opts)], - Opts, - Indent - ); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt(MaybePrivMap, Opts, Indent) when is_map(MaybePrivMap) -> - Map = hb_private:reset(MaybePrivMap), - case maybe_format_short(Map, Opts, Indent) of - {ok, SimpleFmt} -> SimpleFmt; - error -> - "\n" ++ lists:flatten(message(Map, Opts, Indent)) - end; -``` - -### do_debug_fmt - -```erlang -do_debug_fmt(Tuple, Opts, Indent) when is_tuple(Tuple) -> - format_tuple(Tuple, Opts, Indent); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt(X, Opts, Indent) when is_binary(X) -> - indent("~s", [binary(X)], Opts, Indent); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt(Str = [X | _], Opts, Indent) when is_integer(X) andalso X >= 32 andalso X < 127 -> - indent("~s", [Str], Opts, Indent); -``` - -### do_debug_fmt - -```erlang -do_debug_fmt(MsgList, Opts, Indent) when is_list(MsgList) -> - format_list(MsgList, Opts, Indent); -``` - -### do_debug_fmt - -If the user attempts to print a wallet, format it as an address. - -```erlang -do_debug_fmt(X, Opts, Indent) -> - indent("~80p", [X], Opts, Indent). -``` - -### format_address - -If the user attempts to print a wallet, format it as an address. - -```erlang -format_address(Wallet, Opts, Indent) -> - indent("Wallet [Addr: ~s]", - [short_id(hb_util:human_id(ar_wallet:to_address(Wallet)))], - Opts, - Indent - ). -``` - -### format_tuple - -Helper function to format tuples with arity greater than 2. - -```erlang -format_tuple(Tuple, Opts, Indent) -> - to_lines(lists:map( - fun(Elem) -> - term(Elem, Opts, Indent) - end, - tuple_to_list(Tuple) - )). -``` - -### format_list - -Format a list. Comes in three forms: all on one line, individual items - -```erlang -format_list(MsgList, Opts, Indent) -> - case maybe_format_short(MsgList, Opts, Indent) of - {ok, SimpleFmt} -> SimpleFmt; - error -> - "\n" ++ - indent("List [~w] {", [length(MsgList)], Opts, Indent) ++ - format_list_lines(MsgList, Opts, Indent) - end. -``` - -### format_list_lines - -Format a list as a multi-line string. - -```erlang -format_list_lines(MsgList, Opts, Indent) -> - Numbered = hb_util:number(MsgList), - Lines = - lists:map( - fun({N, Msg}) -> - format_list_item(N, Msg, Opts, Indent) - end, - Numbered - ), - AnyLong = - lists:any( - fun({Mode, _}) -> Mode == multiline end, - Lines - ), - case AnyLong of - false -> - "\n" ++ - remove_trailing_noise( - lists:flatten( - lists:map( - fun({_, Line}) -> - Line - end, - Lines - ) - ) - ) ++ - "\n" ++ - indent("}", [], Opts, Indent); - true -> - "\n" ++ - lists:flatten(lists:map( - fun({N, Msg}) -> - {_, Line} = format_list_item(multiline, N, Msg, Opts, Indent), - Line - end, - Numbered - )) ++ indent("}", [], Opts, Indent) - end. -``` - -### format_list_item - -Format a single element of a list. - -```erlang -format_list_item(N, Msg, Opts, Indent) -> - case format_list_item(short, N, Msg, Opts, Indent) of - {short, String} -> {short, String}; - error -> format_list_item(multiline, N, Msg, Opts, Indent) - end. -``` - -### format_list_item - -```erlang -format_list_item(short, N, Msg, Opts, Indent) -> - case maybe_format_short(Msg, Opts, Indent) of - {ok, SimpleFmt} -> - {short, indent("~s => ~s~n", [N, SimpleFmt], Opts, Indent + 1)}; - error -> error - end; -``` - -### format_list_item - -```erlang -format_list_item(multiline, N, Msg, Opts, Indent) -> - Formatted = - case is_multiline(Base = term(Msg, Opts, Indent + 2)) of - true -> Base; - false -> remove_leading_noise(Base) - end, - { - multiline, - indent( - "~s => ~s~n", - [N, Formatted], - Opts, - Indent + 1 - ) - }. -``` - -### to_lines - -Join a list of strings and remove trailing noise. - -```erlang -to_lines(Elems) -> - remove_trailing_noise(do_to_lines(Elems)). -``` - -### do_to_lines - -```erlang -do_to_lines([]) -> []; -``` - -### do_to_lines - -```erlang -do_to_lines(In =[RawElem | Rest]) -> - Elem = lists:flatten(RawElem), - case lists:member($\n, Elem) of - true -> lists:flatten(lists:join("\n", In)); - false -> Elem ++ ", " ++ do_to_lines(Rest) - end. -``` - -### remove_noise - -Remove any leading or trailing noise from a string. - -```erlang -remove_noise(Str) -> - remove_leading_noise(remove_trailing_noise(Str)). -``` - -### remove_leading_noise - -Remove any leading whitespace from a string. - -```erlang -remove_leading_noise(Str) -> - remove_leading_noise(Str, ?NOISE_CHARS). -``` - -### remove_leading_noise - -```erlang -remove_leading_noise(Bin, Noise) when is_binary(Bin) -> - hb_util:bin(remove_leading_noise(hb_util:list(Bin), Noise)); -``` - -### remove_leading_noise - -```erlang -remove_leading_noise([], _) -> []; -``` - -### remove_leading_noise - -```erlang -remove_leading_noise([Char|Str], Noise) -> - case lists:member(Char, Noise) of - true -> - remove_leading_noise(Str, Noise); - false -> [Char|Str] - end. -``` - -### remove_trailing_noise - -Remove trailing noise characters from a string. By default, this is - -```erlang -remove_trailing_noise(Str) -> - removing_trailing_noise(Str, ?NOISE_CHARS). -``` - -### removing_trailing_noise - -```erlang -removing_trailing_noise(Bin, Noise) when is_binary(Bin) -> - removing_trailing_noise(binary:bin_to_list(Bin), Noise); -``` - -### removing_trailing_noise - -```erlang -removing_trailing_noise(BinList, Noise) when is_list(BinList) -> - case lists:member(lists:last(BinList), Noise) of - true -> - removing_trailing_noise(lists:droplast(BinList), Noise); - false -> BinList - end. -``` - -### indent - -Format a string with an indentation level. - -```erlang -indent(Str, Indent) -> indent(Str, #{}, Indent). -``` - -### indent - -Format a string with an indentation level. - -```erlang -indent(Str, Opts, Indent) -> indent(Str, [], Opts, Indent). -``` - -### indent - -Format a string with an indentation level. - -```erlang -indent(FmtStr, Terms, Opts, Ind) -> - IndentSpaces = hb_opts:get(debug_print_indent, Opts), - EscapedFmt = escape_format(FmtStr), - lists:droplast( - lists:flatten( - io_lib:format( - [$\s || _ <- lists:seq(1, Ind * IndentSpaces)] ++ - lists:flatten(EscapedFmt) ++ "\n", - Terms - ) - ) - ). -``` - -### escape_format - -Escape a string for use as an io_lib:format specifier. - -```erlang -escape_format(Str) when is_list(Str) -> - re:replace( - Str, - "~([a-z\\-_]+@[0-9]+\\.[0-9]+)", "~~\\1", - [global, {return, list}] - ); -``` - -### escape_format - -Escape a string for use as an io_lib:format specifier. -Format an error message as a string. - -```erlang -escape_format(Else) -> Else. -``` - -### error - -Escape a string for use as an io_lib:format specifier. -Format an error message as a string. - -```erlang -error(ErrorMsg, Opts) -> - Type = hb_ao:get(<<"type">>, ErrorMsg, <<"">>, Opts), - Details = hb_ao:get(<<"details">>, ErrorMsg, <<"">>, Opts), - Stacktrace = hb_ao:get(<<"stacktrace">>, ErrorMsg, <<"">>, Opts), - hb_util:bin( - [ - <<"Termination type: '">>, Type, - <<"'\n\nStacktrace:\n\n">>, Stacktrace, - <<"\n\nError details:\n\n">>, Details - ] - ). -``` - -### indent_lines - -Take a series of strings or a combined string and format as a - -```erlang -indent_lines(Strings, Indent) when is_binary(Strings) -> - indent_lines(binary:split(Strings, <<"\n">>, [global]), Indent); -``` - -### indent_lines - -Take a series of strings or a combined string and format as a - -```erlang -indent_lines(Strings, Indent) when is_list(Strings) -> - hb_util:bin(lists:join( - "\n", - [ - indent(hb_util:list(String), #{}, Indent) - || - String <- Strings - ] - )). -``` - -### binary - -Format a binary as a short string suitable for printing. - -```erlang -binary(Bin) -> - case short_id(Bin) of - undefined -> - MaxBinPrint = hb_opts:get(debug_print_binary_max), - Printable = - binary:part( - Bin, - 0, - case byte_size(Bin) of - X when X < MaxBinPrint -> X; - _ -> MaxBinPrint - end - ), - PrintSegment = - case is_human_binary(Printable) of - true -> Printable; - false -> hb_util:encode(Printable) - end, - lists:flatten( - [ - "\"", - [PrintSegment], - case Printable == Bin of - true -> "\""; - false -> - io_lib:format( - "...\" <~s bytes>", - [hb_util:human_int(byte_size(Bin))] - ) - end - ] - ); - ShortID -> - lists:flatten(io_lib:format("~s", [ShortID])) - end. -``` - -### maybe_multiline - -Format a map as either a single line or a multi-line string depending - -```erlang -maybe_multiline(X, Opts, Indent) -> - case maybe_format_short(X, Opts, Indent) of - {ok, SimpleFmt} -> SimpleFmt; - error -> - "\n" ++ lists:flatten(message(X, Opts, Indent)) - end. -``` - -### maybe_format_short - -Attempt to generate a short formatting of a message, using the given - -```erlang -maybe_format_short(X, Opts, _Indent) -> - MaxLen = hb_opts:get(debug_print_map_line_threshold, 100, Opts), - SimpleFmt = - case is_binary(X) of - true -> binary(X); - false -> io_lib:format("~p", [X]) - end, - case is_multiline(SimpleFmt) orelse (lists:flatlength(SimpleFmt) > MaxLen) of - true -> error; - false -> {ok, SimpleFmt} - end. -``` - -### is_multiline - -Is the given string a multi-line string? - -```erlang -is_multiline(Str) -> - lists:member($\n, Str). -``` - -### eunit_print - -Format and print an indented string to standard error. - -```erlang -eunit_print(FmtStr, FmtArgs) -> - io:format( - standard_error, - "~n~s ", - [indent(FmtStr ++ "...", FmtArgs, #{}, 4)] - ). -``` - -### print_trace - -Print the trace of the current stack, up to the first non-hyperbeam - -```erlang -print_trace(Stack, CallMod, CallFunc, CallLine) -> - print_trace(Stack, "HB TRACE", - lists:flatten(io_lib:format("[~s:~w ~p]", - [CallMod, CallLine, CallFunc]) - )). -``` - -### print_trace - -```erlang -print_trace(Stack, Label, CallerInfo) -> - io:format(standard_error, "=== ~s ===~s==>~n~s", - [ - Label, CallerInfo, - lists:flatten(trace(Stack)) - ]). -``` - -### trace - -Format a stack trace as a list of strings, one for each stack frame. - -```erlang -trace(Stack) -> - format_trace(Stack, hb_opts:get(stack_print_prefixes, [], #{})). -``` - -### format_trace - -```erlang -format_trace([], _) -> []; -``` - -### format_trace - -```erlang -format_trace([Item|Rest], Prefixes) -> - case element(1, Item) of - Atom when is_atom(Atom) -> - case true of %is_hb_module(Atom, Prefixes) of - true -> - [ - format_trace(Item, Prefixes) | - format_trace(Rest, Prefixes) - ]; - false -> [] - end; - _ -> [] - end; -``` - -### format_trace - -```erlang -format_trace({Func, ArityOrTerm, Extras}, Prefixes) -> - format_trace({no_module, Func, ArityOrTerm, Extras}, Prefixes); -``` - -### format_trace - -```erlang -format_trace({Mod, Func, ArityOrTerm, Extras}, _Prefixes) -> - ExtraMap = hb_maps:from_list(Extras), - indent( - "~p:~p/~p [~s]~n", - [ - Mod, Func, ArityOrTerm, - case hb_maps:get(line, ExtraMap, undefined) of - undefined -> "No details"; - Line -> - hb_maps:get(file, ExtraMap) - ++ ":" ++ integer_to_list(Line) - end - ], - #{}, - 1 - ). -``` - -### print_trace_short - -Print a trace to the standard error stream. - -```erlang -print_trace_short(Trace, Mod, Func, Line) -> - io:format(standard_error, "=== [ HB SHORT TRACE ~p:~w ~p ] ==> ~s~n", - [ - Mod, Line, Func, - trace_short(Trace) - ] - ). -``` - -### trace_to_list - -Return a list of calling modules and lines from a trace, removing all - -```erlang -trace_to_list(Trace) -> - Prefixes = hb_opts:get(stack_print_prefixes, [], #{}), - lists:filtermap( - fun(TraceItem) when is_binary(TraceItem) -> - {true, TraceItem}; - (TraceItem) -> - Formatted = format_trace_element(TraceItem), - case hb_util:is_hb_module(Formatted, Prefixes) of - true -> {true, Formatted}; - false -> false - end - end, - Trace - ). -``` - -### trace_short - -Format a trace to a short string. - -```erlang -trace_short() -> trace_short(get_trace(erlang)). -``` - -### trace_short - -Format a trace to a short string. - -```erlang -trace_short(Type) when is_atom(Type) -> trace_short(get_trace(Type)); -``` - -### trace_short - -Format a trace to a short string. -Format a trace element in form `mod:line` or `mod:func` for Erlang - -```erlang -trace_short(Trace) when is_list(Trace) -> - lists:join(" / ", lists:reverse(trace_to_list(Trace))). -``` - -### format_trace_element - -Format a trace to a short string. -Format a trace element in form `mod:line` or `mod:func` for Erlang - -```erlang -format_trace_element(Bin) when is_binary(Bin) -> Bin; -``` - -### format_trace_element - -Format a trace to a short string. -Format a trace element in form `mod:line` or `mod:func` for Erlang - -```erlang -format_trace_element({Mod, Line}) -> - lists:flatten(io_lib:format("~p:~p", [Mod, Line])); -``` - -### format_trace_element - -Format a trace to a short string. -Format a trace element in form `mod:line` or `mod:func` for Erlang - -```erlang -format_trace_element({Mod, _, _, [{file, _}, {line, Line}|_]}) -> - lists:flatten(io_lib:format("~p:~p", [Mod, Line])); -``` - -### format_trace_element - -Format a trace to a short string. -Format a trace element in form `mod:line` or `mod:func` for Erlang -Utility function to help macro `?trace/0` remove the first frame of the - -```erlang -format_trace_element({Mod, Func, _ArityOrTerm, _Extras}) -> - lists:flatten(io_lib:format("~p:~p", [Mod, Func])). -``` - -### trace_macro_helper - -Format a trace to a short string. -Format a trace element in form `mod:line` or `mod:func` for Erlang -Utility function to help macro `?trace/0` remove the first frame of the - -```erlang -trace_macro_helper(Fun, {_, {_, Stack}}, Mod, Func, Line) -> - Fun(Stack, Mod, Func, Line). -``` - -### get_trace - -Get the trace of the current execution. If the argument is `erlang`, - -```erlang -get_trace(erlang) -> - case catch error(debugging_print) of - {_, {_, Stack}} -> normalize_trace(Stack); - _ -> [] - end; -``` - -### get_trace - -Get the trace of the current execution. If the argument is `erlang`, - -```erlang -get_trace(ao) -> - case get(ao_stack) of - undefined -> []; - Stack -> Stack - end. -``` - -### normalize_trace - -Remove all calls from this module from the top of a trace. - -```erlang -normalize_trace([]) -> []; -``` - -### normalize_trace - -Remove all calls from this module from the top of a trace. - -```erlang -normalize_trace([{Mod, _, _, _}|Rest]) when Mod == ?MODULE -> - normalize_trace(Rest); -``` - -### normalize_trace - -Remove all calls from this module from the top of a trace. -Format a message for printing, optionally taking an indentation level - -```erlang -normalize_trace(Trace) -> Trace. -``` - -### message - -Remove all calls from this module from the top of a trace. -Format a message for printing, optionally taking an indentation level - -```erlang -message(Item) -> message(Item, #{}). -``` - -### message - -Remove all calls from this module from the top of a trace. -Format a message for printing, optionally taking an indentation level - -```erlang -message(Item, Opts) -> message(Item, Opts, 0). -``` - -### message - -Remove all calls from this module from the top of a trace. -Format a message for printing, optionally taking an indentation level - -```erlang -message(Bin, Opts, Indent) when is_binary(Bin) -> - indent( - binary(Bin), - Opts, - Indent - ); -``` - -### message - -Remove all calls from this module from the top of a trace. -Format a message for printing, optionally taking an indentation level - -```erlang -message(List, Opts, Indent) when is_list(List) -> - % Remove the leading newline from the formatted list, if it exists. -``` - -### message - -```erlang -message(RawMap, Opts, Indent) when is_map(RawMap) -> - % Should we filter out the priv key? - FilterPriv = hb_opts:get(debug_show_priv, false, Opts), - MainPriv = hb_maps:get(<<"priv">>, RawMap, #{}, Opts), - % Add private keys to the output if they are not hidden. Opt takes 3 forms: - % 1. `false' -- never show priv - % 2. `if_present' -- show priv only if there are keys inside - % 2. `always' -- always show priv - FooterKeys = - case {FilterPriv, MainPriv} of - {false, _} -> []; - {if_present, #{}} -> []; - {_, Priv} -> [{<<"!Private!">>, Priv}] - end, - Map = - case FilterPriv of - false -> RawMap; - _ -> hb_private:reset(RawMap) - end, - % Define helper functions for formatting elements of the map. -``` - -### message - -```erlang -message(Item, Opts, Indent) -> - % Whatever we have is not a message map. -``` - -### short_id - -Return a short ID for the different types of IDs used in AO-Core. - -```erlang -short_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 32 -> - short_id(hb_util:human_id(Bin)); -``` - -### short_id - -Return a short ID for the different types of IDs used in AO-Core. - -```erlang -short_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 43 -> - << FirstTag:5/binary, _:33/binary, LastTag:5/binary >> = Bin, - << FirstTag/binary, "..", LastTag/binary >>; -``` - -### short_id - -Return a short ID for the different types of IDs used in AO-Core. - -```erlang -short_id(Bin) when byte_size(Bin) > 43 andalso byte_size(Bin) < 100 -> - case binary:split(Bin, <<"/">>, [trim_all, global]) of - [First, Second] when byte_size(Second) == 43 -> - FirstEnc = short_id(First), - SecondEnc = short_id(Second), - << FirstEnc/binary, "/", SecondEnc/binary >>; - [First, Key] -> - FirstEnc = short_id(First), - << FirstEnc/binary, "/", Key/binary >>; - _ -> - Bin - end; -``` - -### short_id - -Return a short ID for the different types of IDs used in AO-Core. - -```erlang -short_id(<< "/", SingleElemHashpath/binary >>) -> - Enc = short_id(SingleElemHashpath), - if is_binary(Enc) -> << "/", Enc/binary >>; - true -> undefined - end; -``` - -### short_id - -Return a short ID for the different types of IDs used in AO-Core. - -```erlang -short_id(Key) when byte_size(Key) < 43 -> Key; -``` - -### short_id - -Return a short ID for the different types of IDs used in AO-Core. -Determine whether a binary is human-readable. - -```erlang -short_id(_) -> undefined. -``` - -### is_human_binary - -Return a short ID for the different types of IDs used in AO-Core. -Determine whether a binary is human-readable. - -```erlang -is_human_binary(Bin) when is_binary(Bin) -> - case unicode:characters_to_binary(Bin) of - {error, _, _} -> false; - _ -> true -``` - ---- - -*Generated from [hb_format.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_format.erl)* diff --git a/docs/book/src/hb_gateway_client.erl.md b/docs/book/src/hb_gateway_client.erl.md deleted file mode 100644 index a1cc38bdf..000000000 --- a/docs/book/src/hb_gateway_client.erl.md +++ /dev/null @@ -1,371 +0,0 @@ -# hb_gateway_client - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_gateway_client.erl) - -Implementation of Arweave's GraphQL API to gain access to specific -items of data stored on the network. -This module must be used to get full HyperBEAM `structured@1.0` form messages -from data items stored on the network, as Arweave gateways do not presently -expose all necessary fields to retrieve this information outside of the -GraphQL API. When gateways integrate serving in `httpsig@1.0` form, this -module will be deprecated. - ---- - -## Exported Functions - -- `data/2` -- `item_spec/0` -- `query/2` -- `query/3` -- `query/4` -- `query/5` -- `read/2` -- `result_to_message/2` -- `scheduler_location/2` - ---- - -### read - -Implementation of Arweave's GraphQL API to gain access to specific -Get a data item (including data and tags) by its ID, using the node's - -```erlang -read(ID, Opts) -> - {Query, Variables} = case maps:is_key(<<"subindex">>, Opts) of - true -> - Tags = subindex_to_tags(maps:get(<<"subindex">>, Opts)), - { - << - "query($transactionIds: [ID!]!) { ", - "transactions(ids: $transactionIds,", - "tags: ", (Tags)/binary , ",", - "first: 1){ ", - "edges { ", (item_spec())/binary , " } ", - "} ", - "} " - >>, - #{ - <<"transactionIds">> => [hb_util:human_id(ID)] - } - }; - false -> - { - << - "query($transactionIds: [ID!]!) { ", - "transactions(ids: $transactionIds, first: 1){ ", - "edges { ", (item_spec())/binary , " } ", - "} ", - "} " - >>, - #{ - <<"transactionIds">> => [hb_util:human_id(ID)] - } - } - end, - case query(Query, Variables, Opts) of - {error, Reason} -> {error, Reason}; - {ok, GqlMsg} -> - case hb_ao:get(<<"data/transactions/edges/1/node">>, GqlMsg, Opts) of - not_found -> - ?event({read_not_found, {id, ID}, {gql_msg, GqlMsg}}), - {error, not_found}; - Item -> - ?event({read_found, {id, ID}, {item, Item}}), - result_to_message(ID, Item, Opts) - end - end. -``` - -### item_spec - -Gives the fields of a transaction that are needed to construct an -Get the data associated with a transaction by its ID, using the node's - -```erlang -item_spec() -> - <<""" - node { - id - anchor - signature - recipient - owner { key } - fee { winston } - quantity { winston } - tags { name value } - data { size } - } - cursor - """>>. -``` - -### data - -Gives the fields of a transaction that are needed to construct an -Get the data associated with a transaction by its ID, using the node's - -```erlang -data(ID, Opts) -> - Req = #{ - <<"multirequest-accept-status">> => 200, - <<"multirequest-responses">> => 1, - <<"path">> => <<"/raw/", ID/binary>>, - <<"method">> => <<"GET">> - }, - case hb_http:request(Req, Opts) of - {ok, Res} -> - ?event(gateway, - {data, - {id, ID}, - {response, Res}, - {body, hb_ao:get(<<"body">>, Res, <<>>, Opts)} - } - ), - {ok, hb_ao:get(<<"body">>, Res, <<>>, Opts)}; - Res -> - ?event(gateway, {request_error, {id, ID}, {response, Res}}), - {error, no_viable_gateway} - end. -``` - -### scheduler_location - -Find the location of the scheduler based on its ID, through GraphQL. - -```erlang -scheduler_location(Address, Opts) -> - Query = - <<"query($SchedulerAddrs: [String!]!) { ", - "transactions(", - "owners: $SchedulerAddrs, ", - "tags: { name: \"Type\" values: [\"Scheduler-Location\"] }, ", - "first: 1", - "){ ", - "edges { ", - (item_spec())/binary , - " } ", - "} ", - "}">>, - Variables = #{ <<"SchedulerAddrs">> => [Address] }, - case query(Query, Variables, Opts) of - {error, Reason} -> - ?event({scheduler_location, {query, Query}, {error, Reason}}), - {error, Reason}; - {ok, GqlMsg} -> - ?event({scheduler_location_req, {query, Query}, {response, GqlMsg}}), - case hb_ao:get(<<"data/transactions/edges/1/node">>, GqlMsg, Opts) of - not_found -> - ?event(scheduler_location, - {graphql_scheduler_location_not_found, - {address, Address} - } - ), - {error, not_found}; - Item = #{ <<"id">> := ID } -> - ?event(scheduler_location, - {found_via_graphql, - {address, Address}, - {id, ID} - } - ), - result_to_message(ID, Item, Opts) - end - end. -``` - -### query - -Run a GraphQL request encoded as a binary. The node message may contain - -```erlang -query(Query, Opts) -> - query(Query, undefined, Opts). -``` - -### query - -```erlang -query(Query, Variables, Opts) -> - query(Query, Variables, undefined, Opts). -``` - -### query - -```erlang -query(Query, Variables, Node, Opts) -> - query(Query, Variables, Node, undefined, Opts). -``` - -### query - -```erlang -query(Query, Variables, Node, Operation, Opts) -> - % Either use the given node if provided, or use the local machine's routes - % to find the GraphQL endpoint. -``` - -### result_to_message - -Takes a GraphQL item node, matches it with the appropriate data from a - -```erlang -result_to_message(Item, Opts) -> - case hb_maps:get(<<"id">>, Item, not_found, Opts) of - ExpectedID when is_binary(ExpectedID) -> - result_to_message(ExpectedID, Item, Opts); - _ -> - result_to_message(undefined, Item, Opts) - end. -``` - -### result_to_message - -```erlang -result_to_message(ExpectedID, Item, Opts) -> - GQLOpts = - Opts#{ - hashpath => ignore, - cache_control => [<<"no-cache">>, <<"no-store">>] - }, - % We have the headers, so we can get the data. -``` - -### normalize_null - -```erlang -normalize_null(null) -> <<>>; -``` - -### normalize_null - -```erlang -normalize_null(not_found) -> <<>>; -``` - -### normalize_null - -```erlang -normalize_null(Bin) when is_binary(Bin) -> Bin. -``` - -### decode_id_or_null - -```erlang -decode_id_or_null(Bin) when byte_size(Bin) > 0 -> - hb_util:human_id(Bin); -``` - -### decode_id_or_null - -```erlang -decode_id_or_null(_) -> - <<>>. -``` - -### decode_or_null - -```erlang -decode_or_null(Bin) when is_binary(Bin) -> - hb_util:decode(Bin); -``` - -### decode_or_null - -```erlang -decode_or_null(_) -> - <<>>. -``` - -### subindex_to_tags - -Takes a list of messages with `name` and `value` fields, and formats - -```erlang -subindex_to_tags(Subindex) -> - Formatted = - lists:map( - fun(Spec) -> - io_lib:format( - "{ name: \"~s\", values: [\"~s\"]}", - [ - hb_ao:get(<<"name">>, Spec), - hb_ao:get(<<"value">>, Spec) - ] - ) - end, - hb_util:message_to_ordered_list(Subindex) - ), - ListInner = - hb_util:bin( - string:join([lists:flatten(E) || E <- Formatted], ", ") - ), - <<"[", ListInner/binary, "]">>. -%%% Tests -``` - -### ans104_no_data_item_test - -Takes a list of messages with `name` and `value` fields, and formats - -```erlang -ans104_no_data_item_test() -> - % Start a random node so that all of the services come up. -``` - -### scheduler_location_test - -Test that we can get the scheduler location. - -```erlang -scheduler_location_test() -> - % Start a random node so that all of the services come up. -``` - -### l1_transaction_test - -Test l1 message from graphql -Test l2 message from graphql - -```erlang -l1_transaction_test() -> - _Node = hb_http_server:start_node(#{}), - {ok, Res} = read(<<"uJBApOt4ma3pTfY6Z4xmknz5vAasup4KcGX7FJ0Of8w">>, #{}), - ?event(gateway, {l1_transaction, Res}), - Data = maps:get(<<"data">>, Res), - ?assertEqual(<<"Hello World">>, Data). -``` - -### l2_dataitem_test - -Test l1 message from graphql -Test l2 message from graphql -Test optimistic index - -```erlang -l2_dataitem_test() -> - _Node = hb_http_server:start_node(#{}), - {ok, Res} = read(<<"oyo3_hCczcU7uYhfByFZ3h0ELfeMMzNacT-KpRoJK6g">>, #{}), - ?event(gateway, {l2_dataitem, Res}), - Data = maps:get(<<"data">>, Res), - ?assertEqual(<<"Hello World">>, Data). -``` - -### ao_dataitem_test - -Test l1 message from graphql -Test l2 message from graphql -Test optimistic index - -```erlang -ao_dataitem_test() -> - _Node = hb_http_server:start_node(#{}), - {ok, Res} = read(<<"oyo3_hCczcU7uYhfByFZ3h0ELfeMMzNacT-KpRoJK6g">>, #{ }), - ?event(gateway, {l2_dataitem, Res}), - Data = maps:get(<<"data">>, Res), -``` - ---- - -*Generated from [hb_gateway_client.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_gateway_client.erl)* diff --git a/docs/book/src/hb_http.erl.md b/docs/book/src/hb_http.erl.md deleted file mode 100644 index ce0d291c0..000000000 --- a/docs/book/src/hb_http.erl.md +++ /dev/null @@ -1,914 +0,0 @@ -# hb_http - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http.erl) - -Hyperbeam's core HTTP request/reply functionality. The functions in this -module generally take a message request from their caller and return a -response in message form, as granted by the peer. This module is mostly -used by hb_client, but can also be used by other modules that need to make -HTTP requests. - ---- - -## Exported Functions - -- `accept_to_codec/2` -- `get/2` -- `get/3` -- `message_to_request/2` -- `post/3` -- `post/4` -- `reply/4` -- `req_to_tabm_singleton/3` -- `request/2` -- `request/4` -- `request/5` -- `start/0` - ---- - -### start - -```erlang -start() -> - httpc:set_options([{max_keep_alive_length, 0}]), - ok. -``` - -### get - -Gets a URL via HTTP and returns the resulting message in deserialized - -```erlang -get(Node, Opts) -> get(Node, <<"/">>, Opts). -``` - -### get - -Gets a URL via HTTP and returns the resulting message in deserialized - -```erlang -get(Node, PathBin, Opts) when is_binary(PathBin) -> - get(Node, #{ <<"path">> => PathBin }, Opts); -``` - -### get - -Gets a URL via HTTP and returns the resulting message in deserialized - -```erlang -get(Node, Message, Opts) -> - request( - <<"GET">>, - Node, - hb_ao:get(<<"path">>, Message, <<"/">>, Opts), - Message, - Opts - ). -``` - -### post - -Posts a message to a URL on a remote peer via HTTP. Returns the - -```erlang -post(Node, Path, Opts) when is_binary(Path) -> - post(Node, #{ <<"path">> => Path }, Opts); -``` - -### post - -Posts a message to a URL on a remote peer via HTTP. Returns the - -```erlang -post(Node, Message, Opts) -> - post(Node, - hb_ao:get( - <<"path">>, - Message, - <<"/">>, - Opts#{ topic => ao_internal } - ), - Message, - Opts - ). -``` - -### post - -```erlang -post(Node, Path, Message, Opts) -> - case request(<<"POST">>, Node, Path, Message, Opts) of - {ok, Res} -> - ?event(http, {post_response, Res}), - {ok, Res}; - Error -> Error - end. -``` - -### request - -Posts a binary to a URL on a remote peer via HTTP, returning the raw - -```erlang -request(Message, Opts) -> - % Special case: We are not given a peer and a path, so we need to - % preprocess the URL to find them. -``` - -### request - -```erlang -request(Method, Peer, Path, Opts) -> - request(Method, Peer, Path, #{}, Opts). -``` - -### request - -```erlang -request(Method, Config = #{ <<"nodes">> := Nodes }, Path, Message, Opts) when is_list(Nodes) -> - % The request has a `route' (see `dev_router' for more details), so we use the - % `multirequest' functionality, rather than a single request. -``` - -### request - -```erlang -request(Method, #{ <<"opts">> := ReqOpts, <<"uri">> := URI }, _Path, Message, Opts) -> - % The request has a set of additional options, so we apply them to the - % request. -``` - -### request - -```erlang -request(Method, Peer, Path, RawMessage, Opts) -> - ?event({request, {method, Method}, {peer, Peer}, {path, Path}, {message, RawMessage}}), - Req = - prepare_request( - hb_maps:get( - <<"codec-device">>, - RawMessage, - <<"httpsig@1.0">>, - Opts - ), - Method, - Peer, - Path, - RawMessage, - Opts - ), - StartTime = os:system_time(millisecond), - % Perform the HTTP request. -``` - -### response_status_to_atom - -Convert a HTTP status code to a status atom. - -```erlang -response_status_to_atom(Status) -> - case Status of - 201 -> created; - X when X < 400 -> ok; - X when X < 500 -> error; - _ -> failure - end. -``` - -### outbound_result_to_message - -Convert an HTTP response to a message. - -```erlang -outbound_result_to_message(<<"ans104@1.0">>, Status, Headers, Body, Opts) -> - ?event(http_outbound, - {result_is_ans104, {headers, Headers}, {body, Body}}, - Opts - ), - try ar_bundles:deserialize(Body) of - Deserialized -> - { - response_status_to_atom(Status), - hb_message:convert( - Deserialized, - <<"structured@1.0">>, - <<"ans104@1.0">>, - Opts - ) - } - catch - _Class:ExceptionPattern:Stacktrace -> - % The response message had a `codec-device: ans104@1.0', but we - % failed to deserialize it, so we fallback to HTTPSig. -``` - -### outbound_result_to_message - -```erlang -outbound_result_to_message(<<"httpsig@1.0">>, Status, Headers, Body, Opts) -> - ?event(http_outbound, {result_is_httpsig, {body, Body}}, Opts), - { - response_status_to_atom(Status), - http_response_to_httpsig(Status, Headers, Body, Opts) - }. -``` - -### http_response_to_httpsig - -Convert a HTTP response to a httpsig message. -Given a message, return the information needed to make the request. - -```erlang -http_response_to_httpsig(Status, HeaderMap, Body, Opts) -> - (hb_message:convert( - hb_maps:merge( - HeaderMap#{ <<"status">> => hb_util:bin(Status) }, - case Body of - <<>> -> #{}; - _ -> #{ <<"body">> => Body } - end, - Opts - ), - #{ <<"device">> => <<"structured@1.0">>, <<"bundle">> => true }, - <<"httpsig@1.0">>, - Opts - ))#{ <<"status">> => hb_util:int(Status) }. -``` - -### message_to_request - -Convert a HTTP response to a httpsig message. -Given a message, return the information needed to make the request. - -```erlang -message_to_request(M, Opts) -> - % Get the route for the message - Res = route_to_request(M, RouteRes = dev_router:route(M, Opts), Opts), - ?event(debug_http, {route_res, {route_res, RouteRes}, {full_res, Res}, {msg, M}}), - Res. -``` - -### route_to_request - -Parse a `dev_router:route` response and return a tuple of request - -```erlang -route_to_request(M, {ok, URI}, Opts) when is_binary(URI) -> - route_to_request(M, {ok, #{ <<"uri">> => URI, <<"opts">> => #{} }}, Opts); -``` - -### route_to_request - -Parse a `dev_router:route` response and return a tuple of request - -```erlang -route_to_request(M, {ok, #{ <<"uri">> := XPath, <<"opts">> := ReqOpts}}, Opts) -> - % The request is a direct HTTP URL, so we need to split the path into a - % host and path. -``` - -### route_to_request - -```erlang -route_to_request(M, {ok, Routes}, Opts) -> - ?event(http_outbound, {found_routes, {req, M}, {routes, Routes}}), - % The result is a route, so we leave it to `request' to handle it. -``` - -### route_to_request - -```erlang -route_to_request(M, {error, Reason}, _Opts) -> - {error, {no_viable_route, {reason, Reason}, {message, M}}}. -``` - -### prepare_request - -Turn a set of request arguments into a request message, formatted in the - -```erlang -prepare_request(Format, Method, Peer, Path, RawMessage, Opts) -> - Message = hb_ao:normalize_keys(RawMessage, Opts), - % Generate a `cookie' key for the message, if an unencoded cookie is - % present. -``` - -### reply - -Reply to the client's HTTP request with a message. - -```erlang -reply(Req, TABMReq, Message, Opts) -> - Status = - case hb_ao:get(<<"status">>, Message, Opts) of - not_found -> 200; - S-> S - end, - reply(Req, TABMReq, Status, Message, Opts). -``` - -### reply - -```erlang -reply(Req, TABMReq, BinStatus, RawMessage, Opts) when is_binary(BinStatus) -> - reply(Req, TABMReq, binary_to_integer(BinStatus), RawMessage, Opts); -``` - -### reply - -```erlang -reply(InitReq, TABMReq, Status, RawMessage, Opts) -> - KeyNormMessage = hb_ao:normalize_keys(RawMessage, Opts), - {ok, Req, Message} = reply_handle_cookies(InitReq, KeyNormMessage, Opts), - {ok, HeadersBeforeCors, EncodedBody} = - encode_reply( - Status, - TABMReq, - Message, - Opts - ), - % Get the CORS request headers from the message, if they exist. -``` - -### reply_handle_cookies - -Handle replying with cookies if the message contains them. Returns the - -```erlang -reply_handle_cookies(Req, Message, Opts) -> - {ok, Cookies} = dev_codec_cookie:extract(Message, #{}, Opts), - ?event(debug_cookie, {encoding_reply_cookies, {explicit, Cookies}}), - case Cookies of - NoCookies when map_size(NoCookies) == 0 -> {ok, Req, Message}; - _ -> - % The internal values of the `cookie' field will be stored in the - % `priv_store' by default, so we let `dev_codec_cookie:opts/1' - % reset the options. -``` - -### add_cors_headers - -Add permissive CORS headers to a message, if the message has not already - -```erlang -add_cors_headers(Msg, ReqHdr, Opts) -> - CorHeaders = #{ - <<"access-control-allow-origin">> => <<"*">>, - <<"access-control-allow-methods">> => <<"GET, POST, PUT, DELETE, OPTIONS">>, - <<"access-control-expose-headers">> => <<"*">> - }, - WithAllowHeaders = case ReqHdr of - <<>> -> CorHeaders; - _ -> CorHeaders#{ - <<"access-control-allow-headers">> => ReqHdr - } - end, - % Keys in the given message will overwrite the defaults listed below if - % included, due to `hb_maps:merge''s precidence order. -``` - -### encode_reply - -Generate the headers and body for a HTTP response message. - -```erlang -encode_reply(Status, TABMReq, Message, Opts) -> - Codec = accept_to_codec(TABMReq, Message, Opts), - ?event(http, {encoding_reply, {codec, Codec}, {message, Message}}), - BaseHdrs = - hb_maps:merge( - #{ - <<"codec-device">> => Codec - }, - case codec_to_content_type(Codec, Opts) of - undefined -> #{}; - CT -> #{ <<"content-type">> => CT } - end, - Opts - ), - AcceptBundle = - hb_util:atom( - hb_maps:get(<<"accept-bundle">>, TABMReq, false, Opts) - ), - ?event(http, - {encoding_reply, - {status, Status}, - {codec, Codec}, - {should_bundle, AcceptBundle}, - {response_message, Message} - } - ), - % Codecs generally do not need to specify headers outside of the content-type, - % aside the default `httpsig@1.0' codec, which expresses its form in HTTP - % documents, and subsequently must set its own headers. -``` - -### accept_to_codec - -Calculate the codec name to use for a reply given the original parsed - -```erlang -accept_to_codec(OriginalReq, Opts) -> - accept_to_codec(OriginalReq, undefined, Opts). -``` - -### accept_to_codec - -```erlang -accept_to_codec(#{ <<"require-codec">> := RequiredCodec }, _Reply, Opts) -> - mime_to_codec(RequiredCodec, Opts); -``` - -### accept_to_codec - -```erlang -accept_to_codec(_OriginalReq, #{ <<"content-type">> := _ }, _Opts) -> - <<"httpsig@1.0">>; -``` - -### accept_to_codec - -```erlang -accept_to_codec(OriginalReq, _, Opts) -> - Accept = hb_maps:get(<<"accept">>, OriginalReq, <<"*/*">>, Opts), - ?event(debug_accept, - {accept_to_codec, - {original_req, OriginalReq}, - {accept, Accept} - } - ), - mime_to_codec(Accept, Opts). -``` - -### mime_to_codec - -Find a codec name from a mime-type. - -```erlang -mime_to_codec(<<"application/", Mime/binary>>, Opts) -> - Name = - case binary:match(Mime, <<"@">>) of - nomatch -> << Mime/binary, "@1.0" >>; - _ -> Mime - end, - case hb_ao:load_device(Name, Opts) of - {ok, _} -> Name; - {error, _} -> - Default = default_codec(Opts), - ?event(http, - {codec_parsing_error, - {given, Name}, - {defaulting_to, Default} - } - ), - Default - end; -``` - -### mime_to_codec - -Find a codec name from a mime-type. - -```erlang -mime_to_codec(<<"device/", Name/binary>>, _Opts) -> Name; -``` - -### mime_to_codec - -Find a codec name from a mime-type. - -```erlang -mime_to_codec(Device, Opts) -> - case binary:match(Device, <<"@">>) of - nomatch -> default_codec(Opts); - _ -> Device - end. -``` - -### default_codec - -Return the default codec for the given options. -Call the `content-type` key on a message with the given codec, using - -```erlang -default_codec(Opts) -> - hb_opts:get(default_codec, <<"httpsig@1.0">>, Opts). -``` - -### codec_to_content_type - -Return the default codec for the given options. -Call the `content-type` key on a message with the given codec, using - -```erlang -codec_to_content_type(Codec, Opts) -> - FastOpts = - Opts#{ - hashpath => ignore, - cache_control => [<<"no-cache">>, <<"no-store">>], - cache_lookup_hueristics => false, - load_remote_devices => false, - error_strategy => continue - }, - case hb_ao:get(<<"content-type">>, #{ <<"device">> => Codec }, FastOpts) of - not_found -> undefined; - CT -> CT - end. -``` - -### req_to_tabm_singleton - -Convert a cowboy request to a normalized message. We first parse the - -```erlang -req_to_tabm_singleton(Req, Body, Opts) -> - FullPath = - << - (cowboy_req:path(Req))/binary, - "?", - (cowboy_req:qs(Req))/binary - >>, - Headers = cowboy_req:headers(Req), - {ok, _Path, QueryKeys} = hb_singleton:from_path(FullPath), - PrimitiveMsg = maps:merge(Headers, QueryKeys), - Codec = - case hb_maps:find(<<"codec-device">>, PrimitiveMsg, Opts) of - {ok, ExplicitCodec} -> ExplicitCodec; - error -> - case hb_maps:find(<<"content-type">>, PrimitiveMsg, Opts) of - {ok, ContentType} -> mime_to_codec(ContentType, Opts); - error -> default_codec(Opts) - end - end, - ?event(http, - {parsing_req, - {path, FullPath}, - {query, QueryKeys}, - {headers, Headers}, - {primitive_message, PrimitiveMsg} - } - ), - ?event({req_to_tabm_singleton, {codec, Codec}}), - case Codec of - <<"httpsig@1.0">> -> - ?event( - {req_to_tabm_singleton, - {request, {explicit, Req}, - {body, {string, Body}} - }} - ), - httpsig_to_tabm_singleton(PrimitiveMsg, Req, Body, Opts); - <<"ans104@1.0">> -> - Item = ar_bundles:deserialize(Body), - ?event(debug_accept, - {deserialized_ans104, - {item, Item}, - {exact, {explicit, Item}} - } - ), - case ar_bundles:verify_item(Item) of - true -> - ?event(ans104, {valid_ans104_signature, Item}), - ANS104 = - hb_message:convert( - Item, - <<"structured@1.0">>, - <<"ans104@1.0">>, - Opts - ), - normalize_unsigned(PrimitiveMsg, Req, ANS104, Opts); - false -> - throw({invalid_ans104_signature, Item}) - end; - Codec -> - % Assume that the codec stores the encoded message in the `body' field. -``` - -### httpsig_to_tabm_singleton - -HTTPSig messages are inherently mixed into the transport layer, so they - -```erlang -httpsig_to_tabm_singleton(PrimMsg, Req, Body, Opts) -> - {ok, Decoded} = - hb_message:with_only_committed( - hb_message:convert( - PrimMsg#{ <<"body">> => Body }, - <<"structured@1.0">>, - <<"httpsig@1.0">>, - Opts - ), - Opts - ), - ?event(http, {decoded, Decoded}, Opts), - ForceSignedRequests = hb_opts:get(force_signed_requests, false, Opts), - case (not ForceSignedRequests) orelse hb_message:verify(Decoded, all, Opts) of - true -> - ?event(http_verify, {verified_signature, Decoded}), - Signers = hb_message:signers(Decoded, Opts), - case Signers =/= [] andalso hb_opts:get(store_all_signed, false, Opts) of - true -> - ?event(http_verify, {storing_signed_from_wire, Decoded}), - {ok, _} = - hb_cache:write(Decoded, - Opts#{ - store => - #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-http">> - } - } - ); - false -> - do_nothing - end, - normalize_unsigned(PrimMsg, Req, Decoded, Opts); - false -> - ?event(http_verify, - {invalid_signature, - {signed, Decoded}, - {force, ForceSignedRequests} - } - ), - throw({invalid_commitments, Decoded}) - end. -``` - -### normalize_unsigned - -Add the method and path to a message, if they are not already present. - -```erlang -normalize_unsigned(PrimMsg, Req = #{ headers := RawHeaders }, Msg, Opts) -> - ?event({adding_method_and_path_from_request, {explicit, Req}}), - Method = cowboy_req:method(Req), - MsgPath = - hb_maps:get( - <<"path">>, - Msg, - hb_maps:get( - <<"path">>, - RawHeaders, - iolist_to_binary( - cowboy_req:uri( - Req, - #{ - host => undefined, - port => undefined, - scheme => undefined - } - ) - ), - Opts - ), - Opts - ), - FilterKeys = hb_opts:get(http_inbound_filter_keys, ?DEFAULT_FILTER_KEYS, Opts), - FilteredMsg = hb_message:without_unless_signed(FilterKeys, Msg, Opts), - BaseMsg = - FilteredMsg#{ - <<"method">> => Method, - <<"path">> => MsgPath, - <<"accept-bundle">> => - maps:get( - <<"accept-bundle">>, - Msg, - maps:get( - <<"accept-bundle">>, - PrimMsg, - maps:get(<<"accept-bundle">>, RawHeaders, false) - ) - ), - <<"accept">> => - Accept = maps:get( - <<"accept">>, - Msg, - maps:get( - <<"accept">>, - PrimMsg, - maps:get(<<"accept">>, RawHeaders, <<"*/*">>) - ) - ) - }, - ?event(debug_accept, {normalize_unsigned, {accept, Accept}}), - % Parse and add the cookie from the request, if present. We reinstate the - % `cookie' field in the message, as it is not typically signed, yet should - % be honored by the node anyway. -``` - -### simple_ao_resolve_unsigned_test - -```erlang -simple_ao_resolve_unsigned_test() -> - URL = hb_http_server:start_node(), - TestMsg = #{ <<"path">> => <<"/key1">>, <<"key1">> => <<"Value1">> }, - ?assertEqual({ok, <<"Value1">>}, post(URL, TestMsg, #{})). -``` - -### simple_ao_resolve_signed_test - -```erlang -simple_ao_resolve_signed_test() -> - URL = hb_http_server:start_node(), - TestMsg = #{ <<"path">> => <<"/key1">>, <<"key1">> => <<"Value1">> }, - Wallet = hb:wallet(), - {ok, Res} = - post( - URL, - hb_message:commit(TestMsg, Wallet), - #{} - ), - ?assertEqual(<<"Value1">>, Res). -``` - -### nested_ao_resolve_test - -```erlang -nested_ao_resolve_test() -> - URL = hb_http_server:start_node(), - Wallet = hb:wallet(), - {ok, Res} = - post( - URL, - hb_message:commit(#{ - <<"path">> => <<"/key1/key2/key3">>, - <<"key1">> => - #{<<"key2">> => - #{ - <<"key3">> => <<"Value2">> - } - } - }, Wallet), - #{} - ), - ?assertEqual(<<"Value2">>, Res). -``` - -### wasm_compute_request - -```erlang -wasm_compute_request(ImageFile, Func, Params) -> - wasm_compute_request(ImageFile, Func, Params, <<"">>). -``` - -### wasm_compute_request - -```erlang -wasm_compute_request(ImageFile, Func, Params, ResultPath) -> - {ok, Bin} = file:read_file(ImageFile), - Wallet = hb:wallet(), - hb_message:commit(#{ - <<"path">> => <<"/init/compute/results", ResultPath/binary>>, - <<"device">> => <<"wasm-64@1.0">>, - <<"function">> => Func, - <<"parameters">> => Params, - <<"body">> => Bin - }, Wallet). -``` - -### run_wasm_unsigned_test - -```erlang -run_wasm_unsigned_test() -> - Node = hb_http_server:start_node(#{force_signed => false}), - Msg = wasm_compute_request(<<"test/test-64.wasm">>, <<"fac">>, [3.0]), - {ok, Res} = post(Node, Msg, #{}), - ?event({res, Res}), - ?assertEqual(6.0, hb_ao:get(<<"output/1">>, Res, #{})). -``` - -### run_wasm_signed_test - -```erlang -run_wasm_signed_test() -> - Opts = #{ priv_wallet => hb:wallet() }, - URL = hb_http_server:start_node(#{force_signed => true}), - Msg = wasm_compute_request(<<"test/test-64.wasm">>, <<"fac">>, [3.0], <<"">>), - {ok, Res} = post(URL, hb_message:commit(Msg, Opts), Opts), - ?assertEqual(6.0, hb_ao:get(<<"output/1">>, Res, #{})). -``` - -### get_deep_unsigned_wasm_state_test - -```erlang -get_deep_unsigned_wasm_state_test() -> - URL = hb_http_server:start_node(#{force_signed => false}), - Msg = wasm_compute_request(<<"test/test-64.wasm">>, <<"fac">>, [3.0], <<"">>), - {ok, Res} = post(URL, Msg, #{}), - ?assertEqual(6.0, hb_ao:get(<<"/output/1">>, Res, #{})). -``` - -### get_deep_signed_wasm_state_test - -```erlang -get_deep_signed_wasm_state_test() -> - URL = hb_http_server:start_node(#{force_signed => true}), - Msg = - wasm_compute_request( - <<"test/test-64.wasm">>, - <<"fac">>, - [3.0], - <<"/output">> - ), - {ok, Res} = post(URL, Msg, #{}), - ?assertEqual(6.0, hb_ao:get(<<"1">>, Res, #{})). -``` - -### cors_get_test - -```erlang -cors_get_test() -> - URL = hb_http_server:start_node(), - {ok, Res} = get(URL, <<"/~meta@1.0/info">>, #{}), - ?assertEqual( - <<"*">>, - hb_ao:get(<<"access-control-allow-origin">>, Res, #{}) - ). -``` - -### ans104_wasm_test - -```erlang -ans104_wasm_test() -> - TestStore = [hb_test_utils:test_store()], - TestOpts = - #{ - force_signed => true, - store => TestStore, - priv_wallet => ar_wallet:new() - }, - ClientStore = [hb_test_utils:test_store()], - ClientOpts = #{ store => ClientStore, priv_wallet => hb:wallet() }, - URL = hb_http_server:start_node(TestOpts), - {ok, Bin} = file:read_file(<<"test/test-64.wasm">>), - Msg = - hb_message:commit( - #{ - <<"require-codec">> => <<"ans104@1.0">>, - <<"codec-device">> => <<"ans104@1.0">>, - <<"device">> => <<"wasm-64@1.0">>, - <<"function">> => <<"fac">>, - <<"parameters">> => [3.0], - <<"body">> => Bin - }, - ClientOpts, - #{ <<"device">> => <<"ans104@1.0">>, <<"bundle">> => true } - ), - ?assert(hb_message:verify(Msg, all, ClientOpts)), - ?event({msg, Msg}), - {ok, Res} = - post( - URL, - Msg#{ <<"path">> => <<"/init/compute/results">> }, - ClientOpts - ), - ?event({res, Res}), - ?assertEqual(6.0, hb_ao:get(<<"output/1">>, Res, ClientOpts)). -``` - -### send_large_signed_request_test - -```erlang -send_large_signed_request_test() -> - % Note: If the signature scheme ever changes, we will need to run the - % following to get a freshly signed request. -``` - -### index_test - -```erlang -index_test() -> - NodeURL = hb_http_server:start_node(), - {ok, Res} = - get( - NodeURL, - #{ - <<"path">> => <<"/~test-device@1.0/load">>, - <<"accept-bundle">> => false - }, - #{} - ), - ?assertEqual(<<"i like turtles!">>, hb_ao:get(<<"body">>, Res, #{})). -``` - -### index_request_test - -```erlang -index_request_test() -> - URL = hb_http_server:start_node(), - {ok, Res} = - get( - URL, - #{ - <<"path">> => <<"/~test-device@1.0/load?name=dogs">>, - <<"accept-bundle">> => false - }, - #{} - ), - ?assertEqual(<<"i like dogs!">>, hb_ao:get(<<"body">>, Res, #{})). -``` - ---- - -*Generated from [hb_http.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http.erl)* diff --git a/docs/book/src/hb_http_benchmark_tests.erl.md b/docs/book/src/hb_http_benchmark_tests.erl.md deleted file mode 100644 index 75b5c72f3..000000000 --- a/docs/book/src/hb_http_benchmark_tests.erl.md +++ /dev/null @@ -1,8 +0,0 @@ -# hb_http_benchmark_tests - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_benchmark_tests.erl) - - ---- - -*Generated from [hb_http_benchmark_tests.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_benchmark_tests.erl)* diff --git a/docs/book/src/hb_http_client.erl.md b/docs/book/src/hb_http_client.erl.md deleted file mode 100644 index 49afa1f68..000000000 --- a/docs/book/src/hb_http_client.erl.md +++ /dev/null @@ -1,923 +0,0 @@ -# hb_http_client - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_client.erl) - -A wrapper library for gun. This module originates from the Arweave -project, and has been modified for use in HyperBEAM. - ---- - -## Exported Functions - -- `handle_call/3` -- `handle_cast/2` -- `handle_info/2` -- `init/1` -- `req/2` -- `start_link/1` -- `terminate/2` - ---- - -### start_link - -A wrapper library for gun. This module originates from the Arweave - -```erlang -start_link(Opts) -> - gen_server:start_link({local, ?MODULE}, ?MODULE, Opts, []). -``` - -### req - -```erlang -req(Args, Opts) -> req(Args, false, Opts). -``` - -### req - -```erlang -req(Args, ReestablishedConnection, Opts) -> - case hb_opts:get(http_client, gun, Opts) of - gun -> gun_req(Args, ReestablishedConnection, Opts); - httpc -> httpc_req(Args, ReestablishedConnection, Opts) - end. -``` - -### httpc_req - -```erlang -httpc_req(Args, _, Opts) -> - #{ - peer := Peer, - path := Path, - method := RawMethod, - headers := Headers, - body := Body - } = Args, - ?event({httpc_req, Args}), - {Host, Port} = parse_peer(Peer, Opts), - Scheme = case Port of - 443 -> "https"; - _ -> "http" - end, - ?event(http_client, {httpc_req, {explicit, Args}}), - URL = binary_to_list(iolist_to_binary([Scheme, "://", Host, ":", integer_to_binary(Port), Path])), - FilteredHeaders = hb_maps:without([<<"content-type">>, <<"cookie">>], Headers, Opts), - HeaderKV = - [ - {binary_to_list(Key), binary_to_list(Value)} - || - {Key, Value} <- hb_maps:to_list(FilteredHeaders, Opts) - ] ++ - [ - {<<"cookie">>, CookieLine} - || - CookieLine <- - case hb_maps:get(<<"cookie">>, Headers, [], Opts) of - Binary when is_binary(Binary) -> - [Binary]; - List when is_list(List) -> - List - end - ], - Method = binary_to_existing_atom(hb_util:to_lower(RawMethod)), - ContentType = hb_maps:get(<<"content-type">>, Headers, <<"application/octet-stream">>, Opts), - Request = - case Method of - get -> - { - URL, - HeaderKV - }; - _ -> - { - URL, - HeaderKV, - binary_to_list(ContentType), - Body - } - end, - ?event({http_client_outbound, Method, URL, Request}), - HTTPCOpts = [{full_result, true}, {body_format, binary}], - StartTime = os:system_time(millisecond), - case httpc:request(Method, Request, [], HTTPCOpts) of - {ok, {{_, Status, _}, RawRespHeaders, RespBody}} -> - EndTime = os:system_time(millisecond), - RespHeaders = - [ - {list_to_binary(Key), list_to_binary(Value)} - || - {Key, Value} <- RawRespHeaders - ], - ?event(http_client, {httpc_resp, Status, RespHeaders, RespBody}), - record_duration(#{ - <<"request-method">> => method_to_bin(Method), - <<"request-path">> => hb_util:bin(Path), - <<"status-class">> => get_status_class(Status), - <<"duration">> => EndTime - StartTime - }, - Opts - ), - {ok, Status, RespHeaders, RespBody}; - {error, Reason} -> - ?event(http_client, {httpc_error, Reason}), - {error, Reason} - end. -``` - -### gun_req - -```erlang -gun_req(Args, ReestablishedConnection, Opts) -> - StartTime = os:system_time(millisecond), - #{ peer := Peer, path := Path, method := Method } = Args, - Response = - case catch gen_server:call(?MODULE, {get_connection, Args, Opts}, infinity) of - {ok, PID} -> - ar_rate_limiter:throttle(Peer, Path, Opts), - case request(PID, Args, Opts) of - {error, Error} when Error == {shutdown, normal}; - Error == noproc -> - case ReestablishedConnection of - true -> - {error, client_error}; - false -> - req(Args, true, Opts) - end; - Reply -> - Reply - end; - {'EXIT', _} -> - {error, client_error}; - Error -> - Error - end, - EndTime = os:system_time(millisecond), - %% Only log the metric for the top-level call to req/2 - not the recursive call - %% that happens when the connection is reestablished. -``` - -### record_duration - -Record the duration of the request in an async process. We write the - -```erlang -record_duration(Details, Opts) -> - spawn( - fun() -> - % First, write to prometheus if it is enabled. Prometheus works - % only with strings as lists, so we encode the data before granting - % it. -``` - -### maybe_invoke_monitor - -Invoke the HTTP monitor message with AO-Core, if it is set in the - -```erlang -maybe_invoke_monitor(Details, Opts) -> - case hb_ao:get(<<"http_monitor">>, Opts, Opts) of - not_found -> ok; - Monitor -> - % We have a monitor message. Place the `details' into the body, set - % the `method' to "POST", add the `http_reference' (if applicable) - % and sign the request. We use the node message's wallet as the - % source of the key. -``` - -### init - -```erlang -init(Opts) -> - case hb_opts:get(prometheus, not hb_features:test(), Opts) of - true -> - ?event({starting_prometheus_application, - {test_mode, hb_features:test()} - } - ), - try - application:ensure_all_started([prometheus, prometheus_cowboy]), - init_prometheus(Opts) - catch - Type:Reason:Stack -> - ?event(warning, - {prometheus_not_started, - {type, Type}, - {reason, Reason}, - {stack, Stack} - } - ), - {ok, #state{ opts = Opts }} - end; - false -> {ok, #state{ opts = Opts }} - end. -``` - -### init_prometheus - -```erlang -init_prometheus(Opts) -> - application:ensure_all_started([prometheus, prometheus_cowboy]), - prometheus_counter:new([ - {name, gun_requests_total}, - {labels, [http_method, route, status_class]}, - { - help, - "The total number of GUN requests." - } - ]), - prometheus_gauge:new([{name, outbound_connections}, - {help, "The current number of the open outbound network connections"}]), - prometheus_histogram:new([ - {name, http_request_duration_seconds}, - {buckets, [0.01, 0.1, 0.5, 1, 5, 10, 30, 60]}, - {labels, [http_method, route, status_class]}, - { - help, - "The total duration of an hb_http_client:req call. This includes more than" - " just the GUN request itself (e.g. establishing a connection, " - "throttling, etc...)" - } - ]), - prometheus_histogram:new([ - {name, http_client_get_chunk_duration_seconds}, - {buckets, [0.1, 1, 10, 60]}, - {labels, [status_class, peer]}, - { - help, - "The total duration of an HTTP GET chunk request made to a peer." - } - ]), - prometheus_counter:new([ - {name, http_client_downloaded_bytes_total}, - {help, "The total amount of bytes requested via HTTP, per remote endpoint"}, - {labels, [route]} - ]), - prometheus_counter:new([ - {name, http_client_uploaded_bytes_total}, - {help, "The total amount of bytes posted via HTTP, per remote endpoint"}, - {labels, [route]} - ]), - ?event(started), - {ok, #state{ opts = Opts }}. -``` - -### handle_call - -```erlang -handle_call({get_connection, Args, Opts}, From, - #state{ pid_by_peer = PIDPeer, status_by_pid = StatusByPID } = State) -> - Peer = hb_maps:get(peer, Args, undefined, Opts), - case hb_maps:get(Peer, PIDPeer, not_found, Opts) of - not_found -> - {ok, PID} = open_connection(Args, hb_maps:merge(State#state.opts, Opts, Opts)), - MonitorRef = monitor(process, PID), - PIDPeer2 = hb_maps:put(Peer, PID, PIDPeer, Opts), - StatusByPID2 = - hb_maps:put( - PID, - {{connecting, [{From, Args}]}, MonitorRef, Peer}, - StatusByPID, - Opts - ), - { - reply, - {ok, PID}, - State#state{ - pid_by_peer = PIDPeer2, - status_by_pid = StatusByPID2 - } - }; - PID -> - case hb_maps:get(PID, StatusByPID, undefined, Opts) of - {{connecting, PendingRequests}, MonitorRef, Peer} -> - StatusByPID2 = - hb_maps:put(PID, - { - {connecting, [{From, Args} | PendingRequests]}, - MonitorRef, - Peer - }, - StatusByPID, - Opts - ), - {noreply, State#state{ status_by_pid = StatusByPID2 }}; - {connected, _MonitorRef, Peer} -> - {reply, {ok, PID}, State} - end - end; -``` - -### handle_call - -```erlang -handle_call(Request, _From, State) -> - ?event(warning, {unhandled_call, {module, ?MODULE}, {request, Request}}), - {reply, ok, State}. -``` - -### handle_cast - -```erlang -handle_cast(Cast, State) -> - ?event(warning, {unhandled_cast, {module, ?MODULE}, {cast, Cast}}), - {noreply, State}. -``` - -### handle_info - -```erlang -handle_info({gun_up, PID, _Protocol}, #state{ status_by_pid = StatusByPID } = State) -> - case hb_maps:get(PID, StatusByPID, not_found) of - not_found -> - %% A connection timeout should have occurred. -``` - -### handle_info - -```erlang -handle_info({gun_error, PID, Reason}, - #state{ pid_by_peer = PIDByPeer, status_by_pid = StatusByPID } = State) -> - case hb_maps:get(PID, StatusByPID, not_found) of - not_found -> - ?event(warning, {gun_connection_error_with_unknown_pid}), - {noreply, State}; - {Status, _MonitorRef, Peer} -> - PIDByPeer2 = hb_maps:remove(Peer, PIDByPeer), - StatusByPID2 = hb_maps:remove(PID, StatusByPID), - Reason2 = - case Reason of - timeout -> - connect_timeout; - {Type, _} -> - Type; - _ -> - Reason - end, - case Status of - {connecting, PendingRequests} -> - reply_error(PendingRequests, Reason2); - connected -> - dec_prometheus_gauge(outbound_connections), - ok - end, - gun:shutdown(PID), - ?event({connection_error, {reason, Reason}}), - {noreply, State#state{ status_by_pid = StatusByPID2, pid_by_peer = PIDByPeer2 }} - end; -``` - -### handle_info - -```erlang -handle_info({gun_down, PID, Protocol, Reason, _KilledStreams, _UnprocessedStreams}, - #state{ pid_by_peer = PIDByPeer, status_by_pid = StatusByPID } = State) -> - case hb_maps:get(PID, StatusByPID, not_found) of - not_found -> - ?event(warning, - {gun_connection_down_with_unknown_pid, {protocol, Protocol}}), - {noreply, State}; - {Status, _MonitorRef, Peer} -> - PIDByPeer2 = hb_maps:remove(Peer, PIDByPeer), - StatusByPID2 = hb_maps:remove(PID, StatusByPID), - Reason2 = - case Reason of - {Type, _} -> - Type; - _ -> - Reason - end, - case Status of - {connecting, PendingRequests} -> - reply_error(PendingRequests, Reason2); - _ -> - dec_prometheus_gauge(outbound_connections), - ok - end, - {noreply, - State#state{ - status_by_pid = StatusByPID2, - pid_by_peer = PIDByPeer2 - } - } - end; -``` - -### handle_info - -```erlang -handle_info({'DOWN', _Ref, process, PID, Reason}, - #state{ pid_by_peer = PIDByPeer, status_by_pid = StatusByPID } = State) -> - case hb_maps:get(PID, StatusByPID, not_found) of - not_found -> - {noreply, State}; - {Status, _MonitorRef, Peer} -> - PIDByPeer2 = hb_maps:remove(Peer, PIDByPeer), - StatusByPID2 = hb_maps:remove(PID, StatusByPID), - case Status of - {connecting, PendingRequests} -> - reply_error(PendingRequests, Reason); - _ -> - dec_prometheus_gauge(outbound_connections), - ok - end, - {noreply, - State#state{ - status_by_pid = StatusByPID2, - pid_by_peer = PIDByPeer2 - } - } - end; -``` - -### handle_info - -```erlang -handle_info(Message, State) -> - ?event(warning, {unhandled_info, {module, ?MODULE}, {message, Message}}), - {noreply, State}. -``` - -### terminate - -```erlang -terminate(Reason, #state{ status_by_pid = StatusByPID }) -> - ?event(info,{http_client_terminating, {reason, Reason}}), - hb_maps:map(fun(PID, _Status) -> gun:shutdown(PID) end, StatusByPID), - ok. -``` - -### inc_prometheus_gauge - -Safe wrapper for prometheus_gauge:inc/2. - -```erlang -inc_prometheus_gauge(Name) -> - case application:get_application(prometheus) of - undefined -> ok; - _ -> - try prometheus_gauge:inc(Name) - catch _:_ -> - init_prometheus(#{}), - prometheus_gauge:inc(Name) - end - end. -``` - -### dec_prometheus_gauge - -Safe wrapper for prometheus_gauge:dec/2. - -```erlang -dec_prometheus_gauge(Name) -> - case application:get_application(prometheus) of - undefined -> ok; - _ -> prometheus_gauge:dec(Name) - end. -``` - -### inc_prometheus_counter - -```erlang -inc_prometheus_counter(Name, Labels, Value) -> - case application:get_application(prometheus) of - undefined -> ok; - _ -> prometheus_counter:inc(Name, Labels, Value) - end. -``` - -### open_connection - -```erlang -open_connection(#{ peer := Peer }, Opts) -> - {Host, Port} = parse_peer(Peer, Opts), - ?event(http_outbound, {parsed_peer, {peer, Peer}, {host, Host}, {port, Port}}), - BaseGunOpts = - #{ - http_opts => - #{ - keepalive => - hb_opts:get( - http_keepalive, - no_keepalive_timeout, - Opts - ) - }, - retry => 0, - connect_timeout => - hb_opts:get( - http_connect_timeout, - no_connect_timeout, - Opts - ) - }, - Transport = - case Port of - 443 -> tls; - _ -> tcp - end, - DefaultProto = - case hb_features:http3() of - true -> http3; - false -> http2 - end, - % Fallback through earlier HTTP versions if the protocol is not supported. -``` - -### parse_peer - -```erlang -parse_peer(Peer, Opts) -> - Parsed = uri_string:parse(Peer), - case Parsed of - #{ host := Host, port := Port } -> - {hb_util:list(Host), Port}; - URI = #{ host := Host } -> - { - hb_util:list(Host), - case hb_maps:get(scheme, URI, undefined, Opts) of - <<"https">> -> 443; - _ -> hb_opts:get(port, 8734, Opts) - end - } - end. -``` - -### reply_error - -```erlang -reply_error([], _Reason) -> - ok; -``` - -### reply_error - -```erlang -reply_error([PendingRequest | PendingRequests], Reason) -> - ReplyTo = element(1, PendingRequest), - Args = element(2, PendingRequest), - Method = hb_maps:get(method, Args), - Path = hb_maps:get(path, Args), - record_response_status(Method, Path, {error, Reason}), - gen_server:reply(ReplyTo, {error, Reason}), - reply_error(PendingRequests, Reason). -``` - -### record_response_status - -```erlang -record_response_status(Method, Path, Response) -> - inc_prometheus_counter(gun_requests_total, - [ - hb_util:list(method_to_bin(Method)), - Path, - hb_util:list(get_status_class(Response)) - ], - 1 - ). -``` - -### method_to_bin - -```erlang -method_to_bin(get) -> - <<"GET">>; -``` - -### method_to_bin - -```erlang -method_to_bin(post) -> - <<"POST">>; -``` - -### method_to_bin - -```erlang -method_to_bin(put) -> - <<"PUT">>; -``` - -### method_to_bin - -```erlang -method_to_bin(head) -> - <<"HEAD">>; -``` - -### method_to_bin - -```erlang -method_to_bin(delete) -> - <<"DELETE">>; -``` - -### method_to_bin - -```erlang -method_to_bin(connect) -> - <<"CONNECT">>; -``` - -### method_to_bin - -```erlang -method_to_bin(options) -> - <<"OPTIONS">>; -``` - -### method_to_bin - -```erlang -method_to_bin(trace) -> - <<"TRACE">>; -``` - -### method_to_bin - -```erlang -method_to_bin(patch) -> - <<"PATCH">>; -``` - -### method_to_bin - -```erlang -method_to_bin(_) -> - <<"unknown">>. -``` - -### request - -```erlang -request(PID, Args, Opts) -> - Timer = - inet:start_timer( - hb_opts:get(http_request_send_timeout, no_request_send_timeout, Opts) - ), - Method = hb_maps:get(method, Args, undefined, Opts), - Path = hb_maps:get(path, Args, undefined, Opts), - HeaderMap = hb_maps:get(headers, Args, #{}, Opts), - % Normalize cookie header lines from the header map. We support both - % lists of cookie lines and a single cookie line. -``` - -### await_response - -```erlang -await_response(Args, Opts) -> - #{ pid := PID, stream_ref := Ref, timer := Timer, limit := Limit, - counter := Counter, acc := Acc, method := Method, path := Path } = Args, - case gun:await(PID, Ref, inet:timeout(Timer)) of - {response, fin, Status, Headers} -> - upload_metric(Args), - ?event(http, {gun_response, {status, Status}, {headers, Headers}, {body, none}}), - {ok, Status, Headers, <<>>}; - {response, nofin, Status, Headers} -> - await_response(Args#{ status => Status, headers => Headers }, Opts); - {data, nofin, Data} -> - case Limit of - infinity -> - await_response(Args#{ acc := [Acc | Data] }, Opts); - Limit -> - Counter2 = size(Data) + Counter, - case Limit >= Counter2 of - true -> - await_response( - Args#{ - counter := Counter2, - acc := [Acc | Data] - }, - Opts - ); - false -> - ?event(error, {http_fetched_too_much_data, Args, - <<"Fetched too much data">>, Opts}), - {error, too_much_data} - end - end; - {data, fin, Data} -> - FinData = iolist_to_binary([Acc | Data]), - download_metric(FinData, Args), - upload_metric(Args), - {ok, - hb_maps:get(status, Args, undefined, Opts), - hb_maps:get(headers, Args, undefined, Opts), - FinData - }; - {error, timeout} = Response -> - record_response_status(Method, Path, Response), - gun:cancel(PID, Ref), - log(warn, gun_await_process_down, Args, Response, Opts), - Response; - {error, Reason} = Response when is_tuple(Reason) -> - record_response_status(Method, Path, Response), - log(warn, gun_await_process_down, Args, Reason, Opts), - Response; - Response -> - record_response_status(Method, Path, Response), - log(warn, gun_await_unknown, Args, Response, Opts), - Response - end. -``` - -### log - -```erlang -log(Type, Event, #{method := Method, peer := Peer, path := Path}, Reason, Opts) -> - ?event( - http, - {gun_log, - {type, Type}, - {event, Event}, - {method, Method}, - {peer, Peer}, - {path, Path}, - {reason, Reason} - }, - Opts - ), - ok. -``` - -### download_metric - -```erlang -download_metric(Data, #{path := Path}) -> - inc_prometheus_counter( - http_client_downloaded_bytes_total, - [Path], - byte_size(Data) - ). -``` - -### upload_metric - -```erlang -upload_metric(#{method := post, path := Path, body := Body}) -> - inc_prometheus_counter( - http_client_uploaded_bytes_total, - [Path], - byte_size(Body) - ); -``` - -### upload_metric - -```erlang -upload_metric(_) -> - ok. -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({ok, {{Status, _}, _, _, _, _}}) -> - get_status_class(Status); -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error, connection_closed}) -> - <<"connection_closed">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error, connect_timeout}) -> - <<"connect_timeout">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error, timeout}) -> - <<"timeout">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error,{shutdown,timeout}}) -> - <<"shutdown_timeout">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error, econnrefused}) -> - <<"econnrefused">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error, {shutdown,econnrefused}}) -> - <<"shutdown_econnrefused">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error, {shutdown,ehostunreach}}) -> - <<"shutdown_ehostunreach">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error, {shutdown,normal}}) -> - <<"shutdown_normal">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error, {closed,_}}) -> - <<"closed">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class({error, noproc}) -> - <<"noproc">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class(208) -> - <<"already_processed">>; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class(Data) when is_integer(Data), Data > 0 -> - hb_util:bin(prometheus_http:status_class(Data)); -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class(Data) when is_binary(Data) -> - case catch binary_to_integer(Data) of - {_, _} -> - <<"unknown">>; - Status -> - get_status_class(Status) - end; -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class(Data) when is_atom(Data) -> - atom_to_binary(Data); -``` - -### get_status_class - -Return the HTTP status class label for cowboy_requests_total and - -```erlang -get_status_class(_) -> -``` - ---- - -*Generated from [hb_http_client.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_client.erl)* diff --git a/docs/book/src/hb_http_client_sup.erl.md b/docs/book/src/hb_http_client_sup.erl.md deleted file mode 100644 index 7c59be6af..000000000 --- a/docs/book/src/hb_http_client_sup.erl.md +++ /dev/null @@ -1,34 +0,0 @@ -# hb_http_client_sup - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_client_sup.erl) - -The supervisor for the gun HTTP client wrapper. - ---- - -## Exported Functions - -- `init/1` -- `start_link/1` - ---- - -### start_link - -The supervisor for the gun HTTP client wrapper. - -```erlang -start_link(Opts) -> - supervisor:start_link({local, ?MODULE}, ?MODULE, Opts). -``` - -### init - -```erlang -init(Opts) -> - {ok, {{one_for_one, 5, 10}, [?CHILD(hb_http_client, worker, Opts)]}}. -``` - ---- - -*Generated from [hb_http_client_sup.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_client_sup.erl)* diff --git a/docs/book/src/hb_http_multi.erl.md b/docs/book/src/hb_http_multi.erl.md deleted file mode 100644 index 1cbd093b2..000000000 --- a/docs/book/src/hb_http_multi.erl.md +++ /dev/null @@ -1,393 +0,0 @@ -# hb_http_multi - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_multi.erl) - -An interface for resolving requests across multiple HTTP servers, either -concurrently or sequentially, and processing the results in a configurable -manner. -The `Config` message for a call to `request/5` may contain the following -fields: -- `multirequest-nodes`: A list of nodes to request from. -- `multirequest-responses`: The number of responses to gather. -- `multirequest-stop-after`: Whether to stop after the required number of - responses. -- `multirequest-parallel`: Whether to run the requests in parallel. -- `multirequest-admissible`: A message to resolve against the response. -- `multirequest-admissible-status`: The statuses that are admissible. -The `admissible` message is executed as a `base` message, with its `path` -field moved to the request (or set to `is-admissible` if not present): -``` - resolve(Base, Response#{ <<"path">> => Base/path OR /is-admissible }, Opts) -''' - ---- - -## Exported Functions - -- `request/5` - ---- - -### request - -An interface for resolving requests across multiple HTTP servers, either -Dispatch the same HTTP request to many nodes. Can be configured to - -```erlang -request(Config, Method, Path, Message, Opts) -> - #{ - nodes := Nodes, - responses := Responses, - stop_after := StopAfter, - admissible := Admissible, - admissible_status := Statuses, - parallel := Parallel - } = multirequest_opts(Config, Message, Opts), - MultirequestMsg = - hb_message:without_unless_signed( - lists:filter( - fun(<<"multirequest-", _/binary>>) -> true; (_) -> false end, - hb_maps:keys(Message) - ), - Message, - Opts - ), - ?event(debug_multi, - {multirequest_opts_parsed, - {config, Config}, - {method, Method}, - {path, Path}, - {raw_message, Message}, - {message_to_send, MultirequestMsg} - }), - AllResults = - if Parallel -> - parallel_multirequest( - Nodes, - Responses, - StopAfter, - Method, - Path, - MultirequestMsg, - Admissible, - Statuses, - Opts - ); - true -> - serial_multirequest( - Nodes, - Responses, - Method, - Path, - MultirequestMsg, - Admissible, - Statuses, - Opts - ) - end, - ?event(http, {multirequest_results, {results, AllResults}}), - case AllResults of - [] -> {error, no_viable_responses}; - Results -> if Responses == 1 -> hd(Results); true -> Results end - end. -``` - -### multirequest_opts - -Get the multirequest options from the config or message. The options in - -```erlang -multirequest_opts(Config, Message, Opts) -> - Opts#{ - nodes => - multirequest_opt(<<"nodes">>, Config, Message, #{}, Opts), - responses => - multirequest_opt(<<"responses">>, Config, Message, 1, Opts), - stop_after => - multirequest_opt(<<"stop-after">>, Config, Message, true, Opts), - admissible => - multirequest_opt(<<"admissible">>, Config, Message, undefined, Opts), - admissible_status => - multirequest_opt(<<"admissible-status">>, Config, Message, <<"All">>, Opts), - parallel => - multirequest_opt(<<"parallel">>, Config, Message, false, Opts) - }. -``` - -### multirequest_opt - -Get a value for a multirequest option from the config or message. - -```erlang -multirequest_opt(Key, Config, Message, Default, Opts) -> - hb_ao:get_first( - [ - {Message, <<"multirequest-", Key/binary>>}, - {Config, Key} - ], - Default, - Opts#{ hashpath => ignore } - ). -``` - -### is_admissible - -Check if a response is admissible, according to the configuration. First, - -```erlang -is_admissible(ok, Res, Admissible, Statuses, Opts) -> - ?event(debug_multi, - {is_admissible, - {response, Res}, - {admissible, Admissible}, - {statuses, Statuses} - } - ), - AdmissibleStatus = admissible_status(Res, Statuses), - ?event(debug_multi, {admissible_status, {result, AdmissibleStatus}}), - AdmissibleResponse = admissible_response(Res, Admissible, Opts), - ?event(debug_multi, {admissible_response, {result, AdmissibleResponse}}), - AdmissibleStatus andalso AdmissibleResponse; -``` - -### is_admissible - -Check if a response is admissible, according to the configuration. First, -Serially request a message, collecting responses until the required - -```erlang -is_admissible(_, _, _, _, _) -> false. -``` - -### serial_multirequest - -Check if a response is admissible, according to the configuration. First, -Serially request a message, collecting responses until the required - -```erlang -serial_multirequest(_Nodes, 0, _Method, _Path, _Message, _Admissible, _Statuses, _Opts) -> []; -``` - -### serial_multirequest - -Check if a response is admissible, according to the configuration. First, -Serially request a message, collecting responses until the required - -```erlang -serial_multirequest([], _, _Method, _Path, _Message, _Admissible, _Statuses, _Opts) -> []; -``` - -### serial_multirequest - -Check if a response is admissible, according to the configuration. First, -Serially request a message, collecting responses until the required - -```erlang -serial_multirequest([Node|Nodes], Remaining, Method, Path, Message, Admissible, Statuses, Opts) -> - {ErlStatus, Res} = hb_http:request(Method, Node, Path, Message, Opts), - case is_admissible(ErlStatus, Res, Admissible, Statuses, Opts) of - true -> - ?event(http, {admissible_status, {response, Res}}), - [ - {ErlStatus, Res} - | - serial_multirequest( - Nodes, - Remaining - 1, - Method, - Path, - Message, - Admissible, - Statuses, - Opts - ) - ]; - false -> - ?event(http, {inadmissible_status, {response, Res}}), - serial_multirequest( - Nodes, - Remaining, - Method, - Path, - Message, - Admissible, - Statuses, - Opts - ) - end. -``` - -### parallel_multirequest - -Dispatch the same HTTP request to many nodes in parallel. - -```erlang -parallel_multirequest(Nodes, Responses, StopAfter, Method, Path, Message, Admissible, Statuses, Opts) -> - Ref = make_ref(), - Parent = self(), - Procs = - lists:map( - fun(Node) -> - spawn( - fun() -> - Res = hb_http:request(Method, Node, Path, Message, Opts), - receive no_reply -> stopping - after 0 -> Parent ! {Ref, self(), Res} - end - end - ) - end, - Nodes - ), - parallel_responses([], Procs, Ref, Responses, StopAfter, Admissible, Statuses, Opts). -``` - -### admissible_status - -Check if a status is allowed, according to the configuration. Statuses - -```erlang -admissible_status(_, <<"All">>) -> true; -``` - -### admissible_status - -Check if a status is allowed, according to the configuration. Statuses - -```erlang -admissible_status(_ResponseMsg = #{ <<"status">> := Status }, Statuses) -> - admissible_status(Status, Statuses); -``` - -### admissible_status - -Check if a status is allowed, according to the configuration. Statuses - -```erlang -admissible_status(Status, Statuses) when is_integer(Statuses) -> - admissible_status(Status, [Statuses]); -``` - -### admissible_status - -Check if a status is allowed, according to the configuration. Statuses - -```erlang -admissible_status(Status, Statuses) when is_binary(Status) -> - admissible_status(binary_to_integer(Status), Statuses); -``` - -### admissible_status - -Check if a status is allowed, according to the configuration. Statuses - -```erlang -admissible_status(Status, Statuses) when is_binary(Statuses) -> - % Convert the statuses to a list of integers. -``` - -### admissible_status - -```erlang -admissible_status(Status, Statuses) when is_list(Statuses) -> - lists:member(Status, Statuses). -``` - -### admissible_response - -If an `admissable` message is set for the request, check if the response - -```erlang -admissible_response(_Response, undefined, _Opts) -> true; -``` - -### admissible_response - -If an `admissable` message is set for the request, check if the response - -```erlang -admissible_response(Response, Msg, Opts) -> - Path = hb_maps:get(<<"path">>, Msg, <<"is-admissible">>, Opts), - Req = Response#{ <<"path">> => Path }, - Base = hb_message:without_unless_signed([<<"path">>], Msg, Opts), - ?event(debug_multi, - {executing_admissible_message, {message, Base}, {req, Req}} - ), - case hb_ao:resolve(Base, Req, Opts) of - {ok, Res} when is_atom(Res) or is_binary(Res) -> - ?event(debug_multi, {admissible_result, {result, Res}}), - hb_util:atom(Res) == true; - {error, Reason} -> - ?event(debug_multi, {admissible_error, {reason, Reason}}), - false - end. -``` - -### parallel_responses - -Collect the necessary number of responses, and stop workers if - -```erlang -parallel_responses(Res, Procs, Ref, 0, false, _Admissible, _Statuses, _Opts) -> - lists:foreach(fun(P) -> P ! no_reply end, Procs), - empty_inbox(Ref), - {ok, Res}; -``` - -### parallel_responses - -Collect the necessary number of responses, and stop workers if - -```erlang -parallel_responses(Res, Procs, Ref, 0, true, _Admissible, _Statuses, _Opts) -> - lists:foreach(fun(P) -> exit(P, kill) end, Procs), - empty_inbox(Ref), - Res; -``` - -### parallel_responses - -Collect the necessary number of responses, and stop workers if - -```erlang -parallel_responses(Res, Procs, Ref, Awaiting, StopAfter, Admissible, Statuses, Opts) -> - receive - {Ref, Pid, {Status, NewRes}} -> - case is_admissible(Status, NewRes, Admissible, Statuses, Opts) of - true -> - parallel_responses( - [NewRes | Res], - lists:delete(Pid, Procs), - Ref, - Awaiting - 1, - StopAfter, - Admissible, - Statuses, - Opts - ); - false -> - parallel_responses( - Res, - lists:delete(Pid, Procs), - Ref, - Awaiting, - StopAfter, - Admissible, - Statuses, - Opts - ) - end -end. -``` - -### empty_inbox - -Empty the inbox of the current process for all messages with the given - -```erlang -empty_inbox(Ref) -> -``` - ---- - -*Generated from [hb_http_multi.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_multi.erl)* diff --git a/docs/book/src/hb_http_server.erl.md b/docs/book/src/hb_http_server.erl.md deleted file mode 100644 index 201f860e1..000000000 --- a/docs/book/src/hb_http_server.erl.md +++ /dev/null @@ -1,607 +0,0 @@ -# hb_http_server - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_server.erl) - -A router that attaches a HTTP server to the AO-Core resolver. -Because AO-Core is built to speak in HTTP semantics, this module -only has to marshal the HTTP request into a message, and then -pass it to the AO-Core resolver. -`hb_http:reply/4` is used to respond to the client, handling the -process of converting a message back into an HTTP response. -The router uses an `Opts` message as its Cowboy initial state, -such that changing it on start of the router server allows for -the execution parameters of all downstream requests to be controlled. - ---- - -## Exported Functions - -- `allowed_methods/2` -- `get_opts/0` -- `get_opts/1` -- `init/2` -- `set_default_opts/1` -- `set_opts/1` -- `set_opts/2` -- `set_proc_server_id/1` -- `start_node/0` -- `start_node/1` -- `start/0` -- `start/1` - ---- - -### start - -A router that attaches a HTTP server to the AO-Core resolver. -Starts the HTTP server. Optionally accepts an `Opts` message, which - -```erlang -start() -> - ?event(http, {start_store, <<"cache-mainnet">>}), - Loaded = - case hb_opts:load(Loc = hb_opts:get(hb_config_location, <<"config.flat">>)) of - {ok, Conf} -> - ?event(boot, {loaded_config, Loc, Conf}), - Conf; - {error, Reason} -> - ?event(boot, {failed_to_load_config, Loc, Reason}), - #{} - end, - MergedConfig = - hb_maps:merge( - hb_opts:default_message_with_env(), - Loaded - ), - %% Apply store defaults before starting store - StoreOpts = hb_opts:get(store, no_store, MergedConfig), - StoreDefaults = hb_opts:get(store_defaults, #{}, MergedConfig), - UpdatedStoreOpts = - case StoreOpts of - no_store -> no_store; - _ when is_list(StoreOpts) -> hb_store_opts:apply(StoreOpts, StoreDefaults); - _ -> StoreOpts - end, - hb_store:start(UpdatedStoreOpts), - PrivWallet = - hb:wallet( - hb_opts:get( - priv_key_location, - <<"hyperbeam-key.json">>, - Loaded - ) - ), - maybe_greeter(MergedConfig, PrivWallet), - start( - Loaded#{ - priv_wallet => PrivWallet, - store => UpdatedStoreOpts, - port => hb_opts:get(port, 8734, Loaded), - cache_writers => [hb_util:human_id(ar_wallet:to_address(PrivWallet))] - } - ). -``` - -### start - -```erlang -start(Opts) -> - application:ensure_all_started([ - kernel, - stdlib, - inets, - ssl, - ranch, - cowboy, - gun, - os_mon - ]), - hb:init(), - BaseOpts = set_default_opts(Opts), - {ok, Listener, _Port} = new_server(BaseOpts), - {ok, Listener}. -``` - -### maybe_greeter - -Print the greeter message to the console if we are not running tests. - -```erlang -maybe_greeter(MergedConfig, PrivWallet) -> - case hb_features:test() of - false -> - print_greeter(MergedConfig, PrivWallet); - true -> - ok - end. -``` - -### print_greeter - -Print the greeter message to the console. Includes the version, operator - -```erlang -print_greeter(Config, PrivWallet) -> - FormattedConfig = hb_format:term(Config, Config, 2), - io:format("~n" - "===========================================================~n" - "== ██╗ ██╗██╗ ██╗██████╗ ███████╗██████╗ ==~n" - "== ██║ ██║╚██╗ ██╔╝██╔══██╗██╔════╝██╔══██╗ ==~n" - "== ███████║ ╚████╔╝ ██████╔╝█████╗ ██████╔╝ ==~n" - "== ██╔══██║ ╚██╔╝ ██╔═══╝ ██╔══╝ ██╔══██╗ ==~n" - "== ██║ ██║ ██║ ██║ ███████╗██║ ██║ ==~n" - "== ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═╝ ==~n" - "== ==~n" - "== ██████╗ ███████╗ █████╗ ███╗ ███╗ VERSION: ==~n" - "== ██╔══██╗██╔════╝██╔══██╗████╗ ████║ v~p. ==~n" - "== ██████╔╝█████╗ ███████║██╔████╔██║ ==~n" - "== ██╔══██╗██╔══╝ ██╔══██║██║╚██╔╝██║ EAT GLASS, ==~n" - "== ██████╔╝███████╗██║ ██║██║ ╚═╝ ██║ BUILD THE ==~n" - "== ╚═════╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ FUTURE. ==~n" - "===========================================================~n" - "== Node activate at: ~s ==~n" - "== Operator: ~s ==~n" - "===========================================================~n" - "== Config: ==~n" - "===========================================================~n" - " ~s~n" - "===========================================================~n", - [ - ?HYPERBEAM_VERSION, - string:pad( - lists:flatten( - io_lib:format( - "http://~s:~p", - [ - hb_opts:get(host, <<"localhost">>, Config), - hb_opts:get(port, 8734, Config) - ] - ) - ), - 35, leading, $ - ), - hb_util:human_id(ar_wallet:to_address(PrivWallet)), - FormattedConfig - ] - ). -``` - -### new_server - -Trigger the creation of a new HTTP server node. Accepts a `NodeMsg` - -```erlang -new_server(RawNodeMsg) -> - RawNodeMsgWithDefaults = - hb_maps:merge( - hb_opts:default_message_with_env(), - RawNodeMsg#{ only => local } - ), - HookMsg = #{ <<"body">> => RawNodeMsgWithDefaults }, - NodeMsg = - case dev_hook:on(<<"start">>, HookMsg, RawNodeMsgWithDefaults) of - {ok, #{ <<"body">> := NodeMsgAfterHook }} -> NodeMsgAfterHook; - Unexpected -> - ?event(http, - {failed_to_start_server, - {unexpected_hook_result, Unexpected} - } - ), - throw( - {failed_to_start_server, - {unexpected_hook_result, Unexpected} - } - ) - end, - % Put server ID into node message so it's possible to update current server - hb_http:start(), - ServerID = - hb_util:human_id( - ar_wallet:to_address( - hb_opts:get( - priv_wallet, - no_wallet, - NodeMsg - ) - ) - ), - % Put server ID into node message so it's possible to update current server - % params - NodeMsgWithID = hb_maps:put(http_server, ServerID, NodeMsg), - Dispatcher = cowboy_router:compile([{'_', [{'_', ?MODULE, ServerID}]}]), - ProtoOpts = #{ - env => #{dispatch => Dispatcher, node_msg => NodeMsgWithID}, - stream_handlers => [cowboy_stream_h], - max_connections => infinity, - idle_timeout => hb_opts:get(idle_timeout, 300000, NodeMsg) - }, - PrometheusOpts = - case hb_opts:get(prometheus, not hb_features:test(), NodeMsg) of - true -> - ?event(prometheus, - {starting_prometheus, {test_mode, hb_features:test()}} - ), - % Attempt to start the prometheus application, if possible. -``` - -### start_http3 - -```erlang -start_http3(ServerID, ProtoOpts, NodeMsg) -> - ?event(http, {start_http3, ServerID}), - Parent = self(), - ServerPID = - spawn(fun() -> - application:ensure_all_started(quicer), - {ok, Listener} = cowboy:start_quic( - ServerID, - TransOpts = #{ - socket_opts => [ - {certfile, "test/test-tls.pem"}, - {keyfile, "test/test-tls.key"}, - {port, Port = hb_opts:get(port, 8734, NodeMsg)} - ] - }, - ProtoOpts - ), - ranch_server:set_new_listener_opts( - ServerID, - 1024, - ranch:normalize_opts( - hb_maps:to_list(TransOpts#{ port => Port }) - ), - ProtoOpts, - [] - ), - ranch_server:set_addr(ServerID, {<<"localhost">>, Port}), - % Bypass ranch's requirement to have a connection supervisor define - % to support updating protocol opts. -``` - -### http3_conn_sup_loop - -```erlang -http3_conn_sup_loop() -> - receive - _ -> - % Ignore any other messages - http3_conn_sup_loop() - end. -``` - -### start_http2 - -```erlang -start_http2(ServerID, ProtoOpts, NodeMsg) -> - ?event(http, {start_http2, ServerID}), - StartRes = cowboy:start_clear( - ServerID, - [ - {port, Port = hb_opts:get(port, 8734, NodeMsg)} - ], - ProtoOpts - ), - case StartRes of - {ok, Listener} -> - ?event(debug_router_info, {http2_started, {listener, Listener}, {port, Port}}), - {ok, Port, Listener}; - {error, {already_started, Listener}} -> - ?event(http, {http2_already_started, {listener, Listener}}), - ?event(debug_router_info, - {restarting, - {id, ServerID}, - {node_msg, NodeMsg} - } - ), - cowboy:set_env(ServerID, node_msg, #{}), - % {ok, Port, Listener} - cowboy:stop_listener(ServerID), - start_http2(ServerID, ProtoOpts, NodeMsg) - end. -``` - -### init - -Entrypoint for all HTTP requests. Receives the Cowboy request option and - -```erlang -init(Req, ServerID) -> - case cowboy_req:method(Req) of - <<"OPTIONS">> -> cors_reply(Req, ServerID); - _ -> - {ok, Body} = read_body(Req), - handle_request(Req, Body, ServerID) - end. -``` - -### read_body - -Helper to grab the full body of a HTTP request, even if it's chunked. - -```erlang -read_body(Req) -> read_body(Req, <<>>). -``` - -### read_body - -Helper to grab the full body of a HTTP request, even if it's chunked. - -```erlang -read_body(Req0, Acc) -> - case cowboy_req:read_body(Req0) of - {ok, Data, _Req} -> {ok, << Acc/binary, Data/binary >>}; - {more, Data, Req} -> read_body(Req, << Acc/binary, Data/binary >>) - end. -``` - -### cors_reply - -Reply to CORS preflight requests. - -```erlang -cors_reply(Req, _ServerID) -> - Req2 = cowboy_req:reply(204, #{ - <<"access-control-allow-origin">> => <<"*">>, - <<"access-control-allow-headers">> => <<"*">>, - <<"access-control-allow-methods">> => - <<"GET, POST, PUT, DELETE, OPTIONS, PATCH">> - }, Req), - ?event(http_debug, {cors_reply, {req, Req}, {req2, Req2}}), - {ok, Req2, no_state}. -``` - -### handle_request - -Handle all non-CORS preflight requests as AO-Core requests. Execution - -```erlang -handle_request(RawReq, Body, ServerID) -> - % Insert the start time into the request so that it can be used by the - % `hb_http' module to calculate the duration of the request. -``` - -### handle_error - -Return a 500 error response to the client. - -```erlang -handle_error(Req, Singleton, Type, Details, Stacktrace, NodeMsg) -> - DetailsStr = hb_util:bin(hb_format:message(Details, NodeMsg, 1)), - StacktraceStr = hb_util:bin(hb_format:trace(Stacktrace)), - ErrorMsg = - #{ - <<"status">> => 500, - <<"type">> => hb_util:bin(hb_format:message(Type)), - <<"details">> => DetailsStr, - <<"stacktrace">> => StacktraceStr - }, - ErrorBin = hb_format:error(ErrorMsg, NodeMsg), - ?event( - http_error, - {returning_500_error, - {string, - hb_format:indent_lines( - <<"\n", ErrorBin/binary, "\n">>, - 1 - ) - } - } - ), - % Remove leading and trailing noise from the stacktrace and details. -``` - -### allowed_methods - -Return the list of allowed methods for the HTTP server. - -```erlang -allowed_methods(Req, State) -> - { - [<<"GET">>, <<"POST">>, <<"PUT">>, <<"DELETE">>, <<"OPTIONS">>, <<"PATCH">>], - Req, - State - }. -``` - -### set_opts - -Merges the provided `Opts` with uncommitted values from `Request`, - -```erlang -set_opts(Opts) -> - case hb_opts:get(http_server, no_server_ref, Opts) of - no_server_ref -> - ok; - ServerRef -> - ok = cowboy:set_env(ServerRef, node_msg, Opts) - end. -``` - -### set_opts - -```erlang -set_opts(Request, Opts) -> - PreparedOpts = - hb_opts:mimic_default_types( - Opts, - false, - Opts - ), - PreparedRequest = - hb_opts:mimic_default_types( - hb_message:uncommitted(Request), - false, - Opts - ), - MergedOpts = - maps:merge( - PreparedOpts, - PreparedRequest - ), - ?event(set_opts, {merged_opts, {explicit, MergedOpts}}), - History = - hb_opts:get(node_history, [], Opts) - ++ [ hb_private:reset(maps:without([node_history], PreparedRequest)) ], - FinalOpts = MergedOpts#{ - http_server => hb_opts:get(http_server, no_server, Opts), - node_history => History - }, - {set_opts(FinalOpts), FinalOpts}. -``` - -### get_opts - -Get the node message for the current process. - -```erlang -get_opts() -> - get_opts(#{ http_server => get(server_id) }). -``` - -### get_opts - -```erlang -get_opts(NodeMsg) -> - ServerRef = hb_opts:get(http_server, no_server_ref, NodeMsg), - cowboy:get_env(ServerRef, node_msg, no_node_msg). -``` - -### set_proc_server_id - -Initialize the server ID for the current process. - -```erlang -set_proc_server_id(ServerID) -> - put(server_id, ServerID). -``` - -### set_default_opts - -Apply the default node message to the given opts map. - -```erlang -set_default_opts(Opts) -> - % Create a temporary opts map that does not include the defaults. -``` - -### start_node - -Test that we can start the server, send a message, and get a response. - -```erlang -start_node() -> - start_node(#{}). -``` - -### start_node - -```erlang -start_node(Opts) -> - application:ensure_all_started([ - kernel, - stdlib, - inets, - ssl, - ranch, - cowboy, - gun, - os_mon - ]), - hb:init(), - hb_sup:start_link(Opts), - ServerOpts = set_default_opts(Opts), - {ok, _Listener, Port} = new_server(ServerOpts), - <<"http://localhost:", (integer_to_binary(Port))/binary, "/">>. -%%% Tests -%%% The following only covering the HTTP server initialization process. For tests -%%% of HTTP server requests/responses, see `hb_http.erl'. -``` - -### set_node_opts_test - -Ensure that the `start` hook can be used to modify the node options. We -Test the set_opts/2 function that merges request with options, - -```erlang -set_node_opts_test() -> - Node = - start_node(#{ - on => #{ - <<"start">> => #{ - <<"device">> => - #{ - <<"start">> => - fun(_, #{ <<"body">> := NodeMsg }, _) -> - {ok, #{ - <<"body">> => - NodeMsg#{ <<"test-success">> => true } - }} - end - } - } - } - }), - {ok, LiveOpts} = hb_http:get(Node, <<"/~meta@1.0/info">>, #{}), - ?assert(hb_ao:get(<<"test-success">>, LiveOpts, false, #{})). -``` - -### set_opts_test - -Ensure that the `start` hook can be used to modify the node options. We -Test the set_opts/2 function that merges request with options, - -```erlang -set_opts_test() -> - DefaultOpts = hb_opts:default_message_with_env(), - start_node(DefaultOpts#{ - priv_wallet => Wallet = ar_wallet:new(), - port => rand:uniform(10000) + 10000 - }), - Opts = get_opts(#{ - http_server => hb_util:human_id(ar_wallet:to_address(Wallet)) - }), - NodeHistory = hb_opts:get(node_history, [], Opts), - ?event(debug_node_history, {node_history_length, length(NodeHistory)}), - ?assert(length(NodeHistory) == 0), - % Test case 1: Empty node_history case - Request1 = #{ - <<"hello">> => <<"world">> - }, - {ok, UpdatedOpts1} = set_opts(Request1, Opts), - NodeHistory1 = hb_opts:get(node_history, not_found, UpdatedOpts1), - Key1 = hb_opts:get(<<"hello">>, not_found, UpdatedOpts1), - ?event(debug_node_history, {node_history_length, length(NodeHistory1)}), - ?assert(length(NodeHistory1) == 1), - ?assert(Key1 == <<"world">>), - % Test case 2: Non-empty node_history case - Request2 = #{ - <<"hello2">> => <<"world2">> - }, - {ok, UpdatedOpts2} = set_opts(Request2, UpdatedOpts1), - NodeHistory2 = hb_opts:get(node_history, not_found, UpdatedOpts2), - Key2 = hb_opts:get(<<"hello2">>, not_found, UpdatedOpts2), - ?event(debug_node_history, {node_history_length, length(NodeHistory2)}), - ?assert(length(NodeHistory2) == 2), - ?assert(Key2 == <<"world2">>), - % Test case 3: Non-empty node_history case - {ok, UpdatedOpts3} = set_opts(#{}, UpdatedOpts2#{ <<"hello3">> => <<"world3">> }), - NodeHistory3 = hb_opts:get(node_history, not_found, UpdatedOpts3), - Key3 = hb_opts:get(<<"hello3">>, not_found, UpdatedOpts3), - ?event(debug_node_history, {node_history_length, length(NodeHistory3)}), - ?assert(length(NodeHistory3) == 3), - ?assert(Key3 == <<"world3">>). -``` - -### restart_server_test - -Ensure that the `start` hook can be used to modify the node options. We -Test the set_opts/2 function that merges request with options, - -```erlang -restart_server_test() -> - % We force HTTP2, overriding the HTTP3 feature, because HTTP3 restarts don't work yet. -``` - ---- - -*Generated from [hb_http_server.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_http_server.erl)* diff --git a/docs/book/src/hb_json.erl.md b/docs/book/src/hb_json.erl.md deleted file mode 100644 index e048b5d6b..000000000 --- a/docs/book/src/hb_json.erl.md +++ /dev/null @@ -1,39 +0,0 @@ -# hb_json - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_json.erl) - -Wrapper for encoding and decoding JSON. Supports maps and Jiffy's old -`ejson` format. This module abstracts the underlying JSON library, allowing -us to switch between libraries as needed in the future. - ---- - -## Exported Functions - -- `decode/1` -- `decode/2` -- `encode/1` - ---- - -### encode - -Wrapper for encoding and decoding JSON. Supports maps and Jiffy's old -Takes a term in Erlang's native form and encodes it as a JSON string. - -```erlang -encode(Term) -> - iolist_to_binary(json:encode(Term)). -``` - -### decode - -Takes a JSON string and decodes it into an Erlang term. - -```erlang -decode(Bin) -> json:decode(Bin). -``` - ---- - -*Generated from [hb_json.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_json.erl)* diff --git a/docs/book/src/hb_keccak.erl.md b/docs/book/src/hb_keccak.erl.md deleted file mode 100644 index 81ca7b8e7..000000000 --- a/docs/book/src/hb_keccak.erl.md +++ /dev/null @@ -1,123 +0,0 @@ -# hb_keccak - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_keccak.erl) - -## Exported Functions - -- `keccak_256/1` -- `key_to_ethereum_address/1` -- `sha3_256/1` - ---- - -### init - -```erlang -init() -> - SoName = filename:join([code:priv_dir(hb), "hb_keccak"]), - erlang:load_nif(SoName, 0). -``` - -### sha3_256 - -```erlang -sha3_256(_Bin) -> - erlang:nif_error(not_loaded). -``` - -### keccak_256 - -```erlang -keccak_256(_Bin) -> - erlang:nif_error(not_loaded). -``` - -### to_hex - -```erlang -to_hex(Bin) when is_binary(Bin) -> - binary:encode_hex(Bin). -``` - -### key_to_ethereum_address - -```erlang -key_to_ethereum_address(Key) when is_binary(Key) -> - <<_Prefix: 1/binary, NoCompressionByte/binary>> = Key, - Prefix = hb_util:to_hex(hb_keccak:keccak_256(NoCompressionByte)), - Last40 = binary:part(Prefix, byte_size(Prefix) - 40, 40), - Hash = hb_keccak:keccak_256(Last40), - HashHex = hb_util:to_hex(Hash), - ChecksumAddress = hash_to_checksum_address(Last40, HashHex), - ChecksumAddress. -``` - -### hash_to_checksum_address - -```erlang -hash_to_checksum_address(Last40, Hash) when - is_binary(Last40), - is_binary(Hash), - byte_size(Last40) =:= 40 -> - Checksummed = lists:zip(binary:bin_to_list(Last40), binary:bin_to_list(binary:part(Hash, 0, 40))), - Formatted = lists:map(fun({Char, H}) -> - case H >= $8 of - true -> string:to_upper([Char]); - false -> [Char] - end - end, Checksummed), - <<"0x", (list_to_binary(lists:append(Formatted)))/binary>>. -%% Test functions -``` - -### keccak_256_test - -```erlang -keccak_256_test() -> - Input = <<"testing">>, - Expected = <<"5F16F4C7F149AC4F9510D9CF8CF384038AD348B3BCDC01915F95DE12DF9D1B02">>, - Actual = to_hex(hb_keccak:keccak_256(Input)), - ?assertEqual(Expected, Actual). -``` - -### keccak_256_key_test - -```erlang -keccak_256_key_test() -> - Input = <<"BAoixXds4JhW42pzlLb83B3-I21lX78j3Q7cPaoFiCjMgjYwYLDj-xL132J147ifZFwRBmzmEMC8eYAXzbRNWuA">>, - BinaryInput = hb_util:decode(Input), - <<_Prefix: 1/binary, NoCompressionByte/binary>> = BinaryInput, - Prefix = hb_keccak:keccak_256(NoCompressionByte), - PrefixHex = hb_util:to_hex(Prefix), - ?assertEqual(PrefixHex, <<"12f9afe6abd38444cab38e8cb7b4360f7f6298de2e7a11009270f35f189bd77e">>), - Last40 = binary:part(PrefixHex, byte_size(PrefixHex) - 40, 40), - ?assertEqual(Last40, <<"b7b4360f7f6298de2e7a11009270f35f189bd77e">>), - Hash = hb_keccak:keccak_256(Last40), - HashHex = hb_util:to_hex(Hash), - ChecksumAddress = hash_to_checksum_address(Last40, HashHex), - ?assertEqual(ChecksumAddress, <<"0xb7B4360F7F6298dE2e7a11009270F35F189Bd77E">>). -``` - -### keccak_256_key_to_address_test - -```erlang -keccak_256_key_to_address_test() -> - Input = <<"BAoixXds4JhW42pzlLb83B3-I21lX78j3Q7cPaoFiCjMgjYwYLDj-xL132J147ifZFwRBmzmEMC8eYAXzbRNWuA">>, - ChecksumAddress = key_to_ethereum_address(hb_util:decode(Input)), - ?assertEqual(ChecksumAddress, <<"0xb7B4360F7F6298dE2e7a11009270F35F189Bd77E">>). -``` - -### sha3_256_test - -```erlang -sha3_256_test() -> - %% "abc" => known SHA3-256 hash from NIST - Input = <<"testing">>, - Expected = <<"7F5979FB78F082E8B1C676635DB8795C4AC6FABA03525FB708CB5FD68FD40C5E">>, - Actual = to_hex(hb_keccak:sha3_256(Input)), - ?assertEqual(Expected, Actual). -``` - ---- - -*Generated from [hb_keccak.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_keccak.erl)* diff --git a/docs/book/src/hb_link.erl.md b/docs/book/src/hb_link.erl.md deleted file mode 100644 index 21de8caa6..000000000 --- a/docs/book/src/hb_link.erl.md +++ /dev/null @@ -1,264 +0,0 @@ -# hb_link - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_link.erl) - -Utility functions for working with links. - ---- - -## Exported Functions - -- `decode_all_links/1` -- `format_unresolved/1` -- `format_unresolved/2` -- `format_unresolved/3` -- `format/1` -- `format/2` -- `format/3` -- `is_link_key/1` -- `normalize/2` -- `normalize/3` -- `remove_link_specifier/1` - ---- - -### normalize - -Utility functions for working with links. -Takes a message and ensures that it is normalized: - -```erlang -normalize(Msg, Opts) when is_map(Opts) -> - normalize(Msg, hb_opts:get(linkify_mode, offload, Opts), Opts). -``` - -### normalize - -```erlang -normalize(Msg, false, _Opts) -> - Msg; -``` - -### normalize - -```erlang -normalize(Msg, Mode, Opts) when is_map(Msg) -> - maps:merge( - maps:with([<<"commitments">>, <<"priv">>], Msg), - maps:from_list( - lists:map( - fun({Key, {link, ID, LinkOpts = #{ <<"type">> := <<"link">> }}}) -> - % The value is a link. Deconstruct it and ensure it is - % normalized (lazy links are made greedy, and both are - % returned in binary TABM form). -``` - -### normalize - -```erlang -normalize(OtherVal, Mode, Opts) when is_list(OtherVal) -> - lists:map(fun(X) -> normalize(X, Mode, Opts) end, OtherVal); -``` - -### normalize - -```erlang -normalize(OtherVal, _Mode, _Opts) -> - OtherVal. -``` - -### decode_all_links - -Decode links embedded in the headers of a message. - -```erlang -decode_all_links(Msg) when is_map(Msg) -> - maps:from_list( - lists:map( - fun({Key, MaybeID}) -> - case is_link_key(Key) of - true -> - NewKey = binary:part(Key, 0, byte_size(Key) - 5), - {NewKey, - { - link, - MaybeID, - #{ - <<"type">> => <<"link">>, - <<"lazy">> => false - } - } - }; - _ -> {Key, MaybeID} - end - end, - maps:to_list(Msg) - ) - ); -``` - -### decode_all_links - -Decode links embedded in the headers of a message. - -```erlang -decode_all_links(List) when is_list(List) -> - lists:map(fun(X) -> decode_all_links(X) end, List); -``` - -### decode_all_links - -Decode links embedded in the headers of a message. - -```erlang -decode_all_links(OtherVal) -> - OtherVal. -``` - -### is_link_key - -Determine if a key is an encoded link. - -```erlang -is_link_key(Key) when byte_size(Key) >= 5 -> - binary:part(Key, byte_size(Key) - 5, 5) =:= <<"+link">>; -``` - -### is_link_key - -Determine if a key is an encoded link. -Remove any `+link` suffixes from a key. - -```erlang -is_link_key(_) -> false. -``` - -### remove_link_specifier - -Determine if a key is an encoded link. -Remove any `+link` suffixes from a key. - -```erlang -remove_link_specifier(Key) -> - case is_link_key(Key) of - true -> binary:part(Key, 0, byte_size(Key) - 5); - false -> Key - end. -``` - -### format - -Format a link as a short string suitable for printing. Checks the node - -```erlang -format(Link) -> format(Link, #{}). -``` - -### format - -Format a link as a short string suitable for printing. Checks the node - -```erlang -format(Link, Opts) -> - format(Link, Opts, 0). -``` - -### format - -```erlang -format(Link, Opts, Indent) -> - case hb_opts:get(debug_resolve_links, false, Opts) of - true -> - try - hb_format:message( - hb_cache:ensure_all_loaded(Link, Opts), - Opts, - Indent - ) - catch - _:_ -> << "!UNRESOLVABLE! ", (format_unresolved(Link, Opts))/binary >> - end; - false -> format_unresolved(Link, Opts, Indent) - end. -``` - -### format_unresolved - -Format a link without resolving it. - -```erlang -format_unresolved(Link) -> - format_unresolved(Link, #{}). -``` - -### format_unresolved - -```erlang -format_unresolved({link, ID, Opts}, BaseOpts) -> - format_unresolved({link, ID, Opts}, BaseOpts, 0). -``` - -### format_unresolved - -```erlang -format_unresolved({link, ID, Opts}, BaseOpts, Indent) -> - hb_util:bin( - hb_format:indent( - "~s~s: ~s", - [ - case maps:get(<<"lazy">>, Opts, false) of - true -> <<"Lazy link">>; - false -> <<"Link">> - end, - case maps:get(<<"type">>, Opts, no_type) of - no_type -> <<>>; - Type -> <<" (to ", (hb_util:bin(Type))/binary, ")" >> - end, - ID - ], - BaseOpts, - Indent - ) - ). -``` - -### offload_linked_message_test - -```erlang -offload_linked_message_test() -> - Opts = #{}, - Msg = #{ - <<"immediate-key">> => <<"immediate-value">>, - <<"link-key">> => #{ - <<"immediate-key-2">> => <<"link-value">>, - <<"link-key-2">> => #{ - <<"immediate-key-3">> => <<"link-value-2">> - } - } - }, - Offloaded = normalize(Msg, offload, Opts), - Structured = hb_message:convert(Offloaded, <<"structured@1.0">>, tabm, Opts), - ?event(linkify, {test_recvd_linkified, {msg, Structured}}), - Loaded = hb_cache:ensure_all_loaded(Structured, Opts), - ?event(linkify, {test_recvd_loaded, {msg, Loaded}}), - ?assertEqual(Msg, Loaded). -``` - -### offload_list_test - -```erlang -offload_list_test() -> - Opts = #{}, - Msg = #{ - <<"list-key">> => [1.0, 2.0, 3.0] - }, - TABM = hb_message:convert(Msg, tabm, <<"structured@1.0">>, Opts), - Linkified = normalize(TABM, offload, Opts), - Msg2 = hb_message:convert(Linkified, <<"structured@1.0">>, tabm, Opts), - Res = hb_cache:ensure_all_loaded(Msg2, Opts), - ?assertEqual(Msg, Res). -``` - ---- - -*Generated from [hb_link.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_link.erl)* diff --git a/docs/book/src/hb_logger.erl.md b/docs/book/src/hb_logger.erl.md deleted file mode 100644 index 8e17454ab..000000000 --- a/docs/book/src/hb_logger.erl.md +++ /dev/null @@ -1,130 +0,0 @@ -# hb_logger - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_logger.erl) - -## Exported Functions - -- `log/2` -- `register/1` -- `report/1` -- `start/0` -- `start/1` - ---- - -### start - -```erlang -start() -> start(undefined). -``` - -### start - -```erlang -start(Client) -> - spawn(fun() -> - loop(#state{client = Client}) - end). -``` - -### log - -```erlang -log(Monitor, Data) -> - Monitor ! {log, Data}. -``` - -### register - -```erlang -register(Monitor) -> - ?event({self(), registering}), - Monitor ! {register, self()}. -``` - -### report - -```erlang -report(Monitor) -> - Monitor ! {report, self()}, - receive - {report, Activity} -> - Activity - end. -``` - -### loop - -```erlang -loop(#state { processes = [], client = undefined }) -> done; -``` - -### loop - -```erlang -loop(#state { processes = [], client = C, activity = A }) -> - C ! {?MODULE, self(), done, A}; -``` - -### loop - -```erlang -loop(State) -> - receive - {log, Activity} -> - console(State, Activity), - loop(State#state{ activity = [Activity | State#state.activity] }); - {register, PID} -> - ?event(registered), - %erlang:monitor(process, PID), - console(State, Act = {ok, registered, PID}), - ?event({registered, PID}), - loop(State#state{ - processes = - [PID | case State#state.processes of waiting -> []; L -> L end], - activity = [Act | State#state.activity] - }); - {'DOWN', _MonitorRef, process, PID, Reason} -> - console(State, Act = {terminated, Reason, PID}), - ?event({dead, PID}), - loop(State#state{ - processes = State#state.processes -- [PID], - activity = [Act | State#state.activity] - }); - {report, PID} -> - PID ! {report, State#state.activity}, - loop(State) - end. -``` - -### console - -```erlang -console(#state { console = false }, _) -> - not_printing; -``` - -### console - -```erlang -console(S, {Status, Type, Details}) when is_record(Details, tx) -> - console(S, {Status, Type, hb_util:id(Details)}); -``` - -### console - -```erlang -console(_S, {Status, Type, Details}) -> - io:format("### MU PUSH REPORT ~p ###~n~p: ~p~n~p~n~n", - [self(), Status, Type, Details]); -``` - -### console - -```erlang -console(_S, Act) -> -``` - ---- - -*Generated from [hb_logger.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_logger.erl)* diff --git a/docs/book/src/hb_maps.erl.md b/docs/book/src/hb_maps.erl.md deleted file mode 100644 index ff082504f..000000000 --- a/docs/book/src/hb_maps.erl.md +++ /dev/null @@ -1,398 +0,0 @@ -# hb_maps - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_maps.erl) - -An abstraction for working with maps in HyperBEAM, matching the -generic `maps` module, but additionally supporting the resolution of -links as they are encountered. These functions must be used extremely -carefully. In virtually all circumstances, the `hb_ao:resolve/3` or -`hb_ao:get/3` functions should be used instead, as they will execute the -full AO-Core protocol upon requests (normalizing keys, applying the -appropriate device's functions, as well as resolving links). By using this -module's functions, you are implicitly making the assumption that the message -in question is of the `~message@1.0` form, ignoring any other keys that its -actual device may present. This module is intended for the extremely rare -circumstances in which the additional overhead of the full AO-Core -execution cycle is not acceptable, and the data in question is known to -conform to the `~message@1.0` form. -If you do not understand any/all of the above, you are in the wrong place! -Utilise the `hb_ao` module and read the documentation therein, saving -yourself from the inevitable issues that will arise from using this -module without understanding the full implications. You have been warned. - ---- - -## Exported Functions - -- `filter/2` -- `filter/3` -- `filtermap/2` -- `filtermap/3` -- `find/2` -- `find/3` -- `fold/3` -- `fold/4` -- `from_list/1` -- `get/2` -- `get/3` -- `get/4` -- `is_key/2` -- `is_key/3` -- `keys/1` -- `keys/2` -- `map/2` -- `map/3` -- `merge/2` -- `merge/3` -- `put/3` -- `put/4` -- `remove/2` -- `remove/3` -- `size/1` -- `size/2` -- `take/2` -- `take/3` -- `to_list/1` -- `to_list/2` -- `update_with/3` -- `update_with/4` -- `values/1` -- `values/2` -- `with/2` -- `with/3` -- `without/2` -- `without/3` - ---- - -### get - -Get a value from a map, resolving links as they are encountered in both - -```erlang --spec get( - Key :: term(), - Map :: map(), - Default :: term(), - Opts :: map() -) -> term(). -``` - -```erlang -get(Key, Map, Default, Opts) -> - hb_cache:ensure_loaded( - maps:get( - Key, - hb_cache:ensure_loaded(Map, Opts), - Default - ), - Opts - ). -``` - -### put - -```erlang --spec put( - Key :: term(), - Value :: term(), - Map :: map(), - Opts :: map() -) -> map(). -``` - -```erlang -put(Key, Value, Map, Opts) -> - maps:put(Key, Value, hb_cache:ensure_loaded(Map, Opts)). -``` - -### map - -```erlang --spec map( - Fun :: fun((Key :: term(), Value :: term()) -> term()), - Map :: map() -) -> map(). -``` - -```erlang -map(Fun, Map) -> - map(Fun, Map, #{}). -``` - -### map - -```erlang --spec map( - Fun :: fun((Key :: term(), Value :: term()) -> term()), - Map :: map(), - Opts :: map() -) -> map(). -``` - -```erlang -map(Fun, Map, Opts) -> - maps:map( - fun(K, V) -> Fun(K, hb_cache:ensure_loaded(V, Opts)) end, - hb_cache:ensure_loaded(Map, Opts) - ). -``` - -### filter - -```erlang --spec filter( - Fun :: fun((Key :: term(), Value :: term()) -> boolean()), - Map :: map() -) -> map(). -``` - -```erlang -filter(Fun, Map) -> - filter(Fun, Map, #{}). -``` - -### filter - -```erlang --spec filter( - Fun :: fun((Key :: term(), Value :: term()) -> boolean()), - Map :: map(), - Opts :: map() -) -> map(). -``` - -```erlang -filter(Fun, Map, Opts) -> - maps:filtermap( - fun(K, V) -> - case Fun(K, Loaded = hb_cache:ensure_loaded(V, Opts)) of - true -> {true, Loaded}; - false -> false - end - end, - hb_cache:ensure_loaded(Map, Opts) - ). -``` - -### filtermap - -```erlang --spec filtermap( - Fun :: fun((Key :: term(), Value :: term()) -> {boolean(), term()}), - Map :: map() -) -> map(). -``` - -```erlang -filtermap(Fun, Map) -> - filtermap(Fun, Map, #{}). -``` - -### filtermap - -```erlang --spec filtermap( - Fun :: fun((Key :: term(), Value :: term()) -> {boolean(), term()}), - Map :: map(), - Opts :: map() -) -> map(). -``` - -```erlang -filtermap(Fun, Map, Opts) -> - maps:filtermap( - fun(K, V) -> Fun(K, hb_cache:ensure_loaded(V, Opts)) end, - hb_cache:ensure_loaded(Map, Opts) - ). -``` - -### fold - -```erlang --spec fold( - Fun :: fun((Key :: term(), Value :: term(), Acc :: term()) -> term()), - Acc :: term(), - Map :: map() -) -> term(). -``` - -```erlang -fold(Fun, Acc, Map) -> - fold(Fun, Acc, Map, #{}). -``` - -### fold - -```erlang --spec fold( - Fun :: fun((Key :: term(), Value :: term(), Acc :: term()) -> term()), - Acc :: term(), - Map :: map(), - Opts :: map() -) -> term(). -``` - -```erlang -fold(Fun, Acc, Map, Opts) -> - maps:fold( - fun(K, V, CurrAcc) -> Fun(K, hb_cache:ensure_loaded(V, Opts), CurrAcc) end, - Acc, - hb_cache:ensure_loaded(Map, Opts) - ). -``` - -### update_with - -```erlang --spec update_with( - Key :: term(), - Fun :: fun((Value :: term()) -> term()), - Map :: map() -) -> map(). -``` - -```erlang -update_with(Key, Fun, Map) -> - update_with(Key, Fun, Map, #{}). -``` - -### update_with - -```erlang --spec update_with( - Key :: term(), - Fun :: fun((Value :: term()) -> term()), - Map :: map(), - Opts :: map() -) -> map(). -``` - -```erlang -update_with(Key, Fun, Map, Opts) -> - maps:update_with(Key, Fun, hb_cache:ensure_loaded(Map, Opts), Opts). -``` - -### get_with_link_test - -```erlang --spec to_list(Map :: map(), Opts :: map()) -> [{Key :: term(), Value :: term()}]. -to_list(Map, Opts) -> - maps:to_list(hb_cache:ensure_loaded(Map, Opts)). -``` - -```erlang -get_with_link_test() -> - Bin = <<"TEST DATA">>, - Opts = #{}, - {ok, Location} = hb_cache:write(Bin, Opts), - Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, - ?assertEqual(Bin, get(2, Map)). -``` - -### map_with_link_test - -```erlang -map_with_link_test() -> - Bin = <<"TEST DATA">>, - Opts = #{}, - {ok, Location} = hb_cache:write(Bin, Opts), - Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, - ?assertEqual(#{1 => 1, 2 => Bin, 3 => 3}, map(fun(_K, V) -> V end, Map, #{})). -``` - -### get_with_typed_link_test - -```erlang -get_with_typed_link_test() -> - Bin = <<"123">>, - Opts = #{}, - {ok, Location} = hb_cache:write(Bin, Opts), - Map = #{ 1 => 1, 2 => {link, Location, #{ <<"type">> => integer }}, 3 => 3 }, - ?assertEqual(123, get(2, Map, undefined)). -``` - -### resolve_on_link_test - -```erlang -resolve_on_link_test() -> - Msg = #{ <<"test-key">> => <<"test-value">> }, - Opts = #{}, - {ok, ID} = hb_cache:write(Msg, Opts), - ?assertEqual( - {ok, <<"test-value">>}, - hb_ao:resolve({link, ID, #{}}, <<"test-key">>, #{}) - ). -``` - -### filter_with_link_test - -```erlang -filter_with_link_test() -> - Bin = <<"TEST DATA">>, - Opts = #{}, - {ok, Location} = hb_cache:write(Bin, Opts), - Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, - ?assertEqual(#{1 => 1, 3 => 3}, filter(fun(_, V) -> V =/= Bin end, Map)). -``` - -### filtermap_with_link_test - -```erlang -filtermap_with_link_test() -> - Bin = <<"TEST DATA">>, - Opts = #{}, - {ok, Location} = hb_cache:write(Bin, Opts), - Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, - ?assertEqual( - #{2 => <<"FOUND">>}, - filtermap( - fun(_, <<"TEST DATA">>) -> {true, <<"FOUND">>}; - (_K, _V) -> false - end, - Map - ) - ). -``` - -### fold_with_typed_link_test - -```erlang -fold_with_typed_link_test() -> - Bin = <<"123">>, - Opts = #{}, - {ok, Location} = hb_cache:write(Bin, Opts), - Map = #{ 1 => 1, 2 => {link, Location, #{ <<"type">> => integer }}, 3 => 3 }, - ?assertEqual(127, fold(fun(_, V, Acc) -> V + Acc end, 0, Map)). -``` - -### filter_passively_loads_test - -```erlang -filter_passively_loads_test() -> - Bin = <<"TEST DATA">>, - Opts = #{}, - {ok, Location} = hb_cache:write(Bin, Opts), - Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, - ?assertEqual( - #{1 => 1, 2 => <<"TEST DATA">>, 3 => 3}, - filter(fun(_, _) -> true end, Map) - ). -``` - -### filtermap_passively_loads_test - -```erlang -filtermap_passively_loads_test() -> - Bin = <<"TEST DATA">>, - Opts = #{}, - {ok, Location} = hb_cache:write(Bin, Opts), - Map = #{ 1 => 1, 2 => {link, Location, #{}}, 3 => 3 }, - ?assertEqual( - #{ 1 => 1, 2 => <<"TEST DATA">>, 3 => 3 }, - filtermap(fun(_, V) -> {true, V} end, Map) -``` - ---- - -*Generated from [hb_maps.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_maps.erl)* diff --git a/docs/book/src/hb_message.erl.md b/docs/book/src/hb_message.erl.md deleted file mode 100644 index e890e1dee..000000000 --- a/docs/book/src/hb_message.erl.md +++ /dev/null @@ -1,1150 +0,0 @@ -# hb_message - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_message.erl) - -This module acts an adapter between messages, as modeled in the -AO-Core protocol, and their uderlying binary representations and formats. -Unless you are implementing a new message serialization codec, you should -not need to interact with this module directly. Instead, use the -`hb_ao` interfaces to interact with all messages. The `dev_message` -module implements a device interface for abstracting over the different -message formats. -`hb_message` and the HyperBEAM caches can interact with multiple different -types of message formats: - - Richly typed AO-Core structured messages. - - Arweave transations. - - ANS-104 data items. - - HTTP Signed Messages. - - Flat Maps. -This module is responsible for converting between these formats. It does so -by normalizing messages to a common format: `Type Annotated Binary Messages` -(TABM). TABMs are deep Erlang maps with keys than only contain either other -TABMs or binary values. By marshalling all messages into this format, they -can easily be coerced into other output formats. For example, generating a -`HTTP Signed Message` format output from an Arweave transaction. TABM is -also a simple format from a computational perspective (only binary literals -and O(1) access maps), such that operations upon them are efficient. -The structure of the conversions is as follows: -
-    Arweave TX/ANS-104 ==> dev_codec_ans104:from/1 ==> TABM
-    HTTP Signed Message ==> dev_codec_httpsig_conv:from/1 ==> TABM
-    Flat Maps ==> dev_codec_flat:from/1 ==> TABM
-    TABM ==> dev_codec_structured:to/1 ==> AO-Core Message
-    AO-Core Message ==> dev_codec_structured:from/1 ==> TABM
-    TABM ==> dev_codec_ans104:to/1 ==> Arweave TX/ANS-104
-    TABM ==> dev_codec_httpsig_conv:to/1 ==> HTTP Signed Message
-    TABM ==> dev_codec_flat:to/1 ==> Flat Maps
-    ...
-
-Additionally, this module provides a number of utility functions for -manipulating messages. For example, `hb_message:sign/2` to sign a message of -arbitrary type, or `hb_formatter:format_msg/1` to print an AO-Core/TABM message in -a human-readable format. -The `hb_cache` module is responsible for storing and retrieving messages in -the HyperBEAM stores configured on the node. Each store has its own storage -backend, but each works with simple key-value pairs. Subsequently, the -`hb_cache` module uses TABMs as the internal format for storing and -retrieving messages. -Test vectors to ensure the functioning of this module and the codecs that -interact with it are found in `hb_message_test_vectors.erl`. - ---- - -## Exported Functions - -- `commit/2` -- `commit/3` -- `commitment_devices/2` -- `commitment/2` -- `commitment/3` -- `commitments/3` -- `committed/3` -- `convert/3` -- `convert/4` -- `default_tx_list/0` -- `diff/3` -- `filter_default_keys/1` -- `find_target/3` -- `id/1` -- `id/2` -- `id/3` -- `is_signed_key/3` -- `match/2` -- `match/3` -- `match/4` -- `minimize/1` -- `normalize_commitments/2` -- `print/1` -- `signers/2` -- `type/1` -- `uncommitted/1` -- `uncommitted/2` -- `verify/1` -- `verify/2` -- `verify/3` -- `with_commitments/3` -- `with_only_committed/2` -- `with_only_committers/2` -- `with_only_committers/3` -- `without_commitments/3` -- `without_unless_signed/3` - ---- - -### convert - -This module acts an adapter between messages, as modeled in the -Convert a message from one format to another. Taking a message in the - -```erlang -convert(Msg, TargetFormat, Opts) -> - convert(Msg, TargetFormat, <<"structured@1.0">>, Opts). -``` - -### convert - -This module acts an adapter between messages, as modeled in the -Convert a message from one format to another. Taking a message in the - -```erlang -convert(Msg, TargetFormat, tabm, Opts) -> - OldPriv = - if is_map(Msg) -> maps:get(<<"priv">>, Msg, #{}); - true -> #{} - end, - from_tabm(Msg, TargetFormat, OldPriv, Opts); -``` - -### convert - -This module acts an adapter between messages, as modeled in the -Convert a message from one format to another. Taking a message in the - -```erlang -convert(Msg, TargetFormat, SourceFormat, Opts) -> - OldPriv = - if is_map(Msg) -> maps:get(<<"priv">>, Msg, #{}); - true -> #{} - end, - TABM = - to_tabm( - case is_map(Msg) of - true -> hb_maps:without([<<"priv">>], Msg, Opts); - false -> Msg - end, - SourceFormat, - Opts - ), - case TargetFormat of - tabm -> restore_priv(TABM, OldPriv, Opts); - _ -> from_tabm(TABM, TargetFormat, OldPriv, Opts) - end. -``` - -### to_tabm - -```erlang -to_tabm(Msg, SourceFormat, Opts) -> - {SourceCodecMod, Params} = conversion_spec_to_req(SourceFormat, Opts), - % We use _from_ here because the codecs are labelled from the perspective - % of their own format. `dev_codec_ans104:from/1' will convert _from_ - % an ANS-104 message _into_ a TABM. -``` - -### from_tabm - -```erlang -from_tabm(Msg, TargetFormat, OldPriv, Opts) -> - {TargetCodecMod, Params} = conversion_spec_to_req(TargetFormat, Opts), - % We use the _to_ function here because each of the codecs we may call in - % this step are labelled from the perspective of the target format. For - % example, `dev_codec_httpsig:to/1' will convert _from_ a TABM to an - % HTTPSig message. -``` - -### restore_priv - -Add the existing `priv` sub-map back to a converted message, honoring - -```erlang -restore_priv(Msg, EmptyPriv, _Opts) when map_size(EmptyPriv) == 0 -> Msg; -``` - -### restore_priv - -Add the existing `priv` sub-map back to a converted message, honoring -Get a codec device and request params from the given conversion request. - -```erlang -restore_priv(Msg, OldPriv, Opts) -> - MsgPriv = hb_maps:get(<<"priv">>, Msg, #{}, Opts), - ?event({restoring_priv, {msg_priv, MsgPriv}, {old_priv, OldPriv}}), - NewPriv = hb_util:deep_merge(MsgPriv, OldPriv, Opts), - ?event({new_priv, NewPriv}), - Msg#{ <<"priv">> => NewPriv }. -``` - -### conversion_spec_to_req - -Add the existing `priv` sub-map back to a converted message, honoring -Get a codec device and request params from the given conversion request. - -```erlang -conversion_spec_to_req(Spec, Opts) when is_binary(Spec) or (Spec == tabm) -> - conversion_spec_to_req(#{ <<"device">> => Spec }, Opts); -``` - -### conversion_spec_to_req - -Add the existing `priv` sub-map back to a converted message, honoring -Get a codec device and request params from the given conversion request. - -```erlang -conversion_spec_to_req(Spec, Opts) -> - try - Device = - hb_maps:get( - <<"device">>, - Spec, - no_codec_device_in_conversion_spec, - Opts - ), - { - case Device of - tabm -> tabm; - _ -> - hb_ao:message_to_device( - #{ - <<"device">> => Device - }, - Opts - ) - end, - hb_maps:without([<<"device">>], Spec, Opts) - } - catch _:_ -> - throw({message_codec_not_extractable, Spec}) - end. -``` - -### id - -Return the ID of a message. - -```erlang -id(Msg) -> id(Msg, uncommitted). -``` - -### id - -Return the ID of a message. - -```erlang -id(Msg, Opts) when is_map(Opts) -> id(Msg, uncommitted, Opts); -``` - -### id - -Return the ID of a message. - -```erlang -id(Msg, Committers) -> id(Msg, Committers, #{}). -``` - -### id - -Return the ID of a message. - -```erlang -id(Msg, RawCommitters, Opts) -> - CommSpec = - case RawCommitters of - none -> #{ <<"committers">> => <<"none">> }; - uncommitted -> #{ <<"committers">> => <<"none">> }; - unsigned -> #{ <<"committers">> => <<"none">> }; - all -> #{ <<"committers">> => <<"all">> }; - signed -> #{ <<"committers">> => <<"all">> }; - List when is_list(List) -> #{ <<"committers">> => List } - end, - ?event({getting_id, {msg, Msg}, {spec, CommSpec}}), - {ok, ID} = - dev_message:id( - Msg, - CommSpec#{ <<"path">> => <<"id">> }, - Opts - ), - hb_util:human_id(ID). -``` - -### normalize_commitments - -Normalize the IDs in a message, ensuring that there is at least one - -```erlang -normalize_commitments(Msg, Opts) when is_map(Msg) -> - NormMsg = - maps:map( - fun(Key, Val) when Key == <<"commitments">> orelse Key == <<"priv">> -> - Val; - (_Key, Val) -> normalize_commitments(Val, Opts) - end, - Msg - ), - case hb_maps:get(<<"commitments">>, NormMsg, not_found, Opts) of - not_found -> - {ok, #{ <<"commitments">> := Commitments }} = - dev_message:commit( - NormMsg, - #{ <<"type">> => <<"unsigned">> }, - Opts - ), - NormMsg#{ <<"commitments">> => Commitments }; - _ -> NormMsg - end; -``` - -### normalize_commitments - -Normalize the IDs in a message, ensuring that there is at least one - -```erlang -normalize_commitments(Msg, Opts) when is_list(Msg) -> - lists:map(fun(X) -> normalize_commitments(X, Opts) end, Msg); -``` - -### normalize_commitments - -Normalize the IDs in a message, ensuring that there is at least one - -```erlang -normalize_commitments(Msg, _Opts) -> - Msg. -``` - -### with_only_committed - -Return a message with only the committed keys. If no commitments are - -```erlang -with_only_committed(Msg, Opts) when is_map(Msg) -> - ?event({with_only_committed, {msg, Msg}, {opts, Opts}}), - Comms = hb_maps:get(<<"commitments">>, Msg, not_found, Opts), - case is_map(Msg) andalso Comms /= not_found of - true -> - try - CommittedKeys = - hb_message:committed( - Msg, - #{ <<"commitments">> => <<"all">> }, - Opts - ), - % Add the ao-body-key to the committed list if it is not - % already present. -``` - -### with_only_committed - -```erlang -with_only_committed(Msg, _) -> - % If the message is not a map, it cannot be signed. -``` - -### with_links - -Filter keys from a map that do not match either the list of keys or - -```erlang -with_links(Keys, Map, Opts) -> - hb_maps:with( - Keys ++ - lists:map( - fun(Key) -> - <<(hb_link:remove_link_specifier(Key))/binary, "+link">> - end, - Keys - ), - Map, - Opts - ). -``` - -### with_only_committers - -Return the message with only the specified committers attached. - -```erlang -with_only_committers(Msg, Committers) -> - with_only_committers(Msg, Committers, #{}). -``` - -### with_only_committers - -```erlang -with_only_committers(Msg, Committers, Opts) when is_map(Msg) -> - NewCommitments = - hb_maps:filter( - fun(_, #{ <<"committer">> := Committer }) -> - lists:member(Committer, Committers); - (_, _) -> false - end, - hb_maps:get(<<"commitments">>, Msg, #{}, Opts), - Opts - ), - Msg#{ <<"commitments">> => NewCommitments }; -``` - -### with_only_committers - -```erlang -with_only_committers(Msg, _Committers, _Opts) -> - throw({unsupported_message_type, Msg}). -``` - -### is_signed_key - -Determine whether a specific key is part of a message's commitments. - -```erlang -is_signed_key(Key, Msg, Opts) -> - lists:member(Key, hb_message:committed(Msg, all, Opts)). -``` - -### without_unless_signed - -Remove the any of the given keys that are not signed from a message. - -```erlang -without_unless_signed(Key, Msg, Opts) when not is_list(Key) -> - without_unless_signed([Key], Msg, Opts); -``` - -### without_unless_signed - -Remove the any of the given keys that are not signed from a message. - -```erlang -without_unless_signed(Keys, Msg, Opts) -> - SignedKeys = hb_message:committed(Msg, all, Opts), - maps:without( - lists:filter(fun(K) -> not lists:member(K, SignedKeys) end, Keys), - Msg - ). -``` - -### commit - -Sign a message with the given wallet. - -```erlang -commit(Msg, WalletOrOpts) -> - commit( - Msg, - WalletOrOpts, - hb_opts:get( - commitment_device, - no_viable_commitment_device, - case is_map(WalletOrOpts) of - true -> WalletOrOpts; - false -> #{ priv_wallet => WalletOrOpts } - end - ) - ). -``` - -### commit - -```erlang -commit(Msg, Wallet, Format) when not is_map(Wallet) -> - commit(Msg, #{ priv_wallet => Wallet }, Format); -``` - -### commit - -```erlang -commit(Msg, Opts, CodecName) when is_binary(CodecName) -> - commit(Msg, Opts, #{ <<"commitment-device">> => CodecName }); -``` - -### commit - -```erlang -commit(Msg, Opts, Spec) -> - {ok, Signed} = - dev_message:commit( - Msg, - Spec#{ - <<"commitment-device">> => - case hb_maps:get(<<"commitment-device">>, Spec, none, Opts) of - none -> - case hb_maps:get(<<"device">>, Spec, none, Opts) of - none -> - throw( - { - no_commitment_device_in_codec_spec, - Spec - } - ); - Device -> Device - end; - CommitmentDevice -> CommitmentDevice - end - }, - Opts - ), - Signed. -``` - -### committed - -Return the list of committed keys from a message. - -```erlang -committed(Msg, all, Opts) -> - committed(Msg, #{ <<"committers">> => <<"all">> }, Opts); -``` - -### committed - -Return the list of committed keys from a message. - -```erlang -committed(Msg, none, Opts) -> - committed(Msg, #{ <<"committers">> => <<"none">> }, Opts); -``` - -### committed - -Return the list of committed keys from a message. - -```erlang -committed(Msg, List, Opts) when is_list(List) -> - committed(Msg, #{ <<"commitments">> => List }, Opts); -``` - -### committed - -Return the list of committed keys from a message. - -```erlang -committed(Msg, CommittersMsg, Opts) -> - ?event( - {committed, - {msg, {explicit, Msg}}, - {committers_msg, {explicit, CommittersMsg}}, - {opts, Opts} - } - ), - {ok, CommittedKeys} = dev_message:committed(Msg, CommittersMsg, Opts), - CommittedKeys. -``` - -### verify - -wrapper function to verify a message. - -```erlang -verify(Msg) -> verify(Msg, all). -``` - -### verify - -wrapper function to verify a message. - -```erlang -verify(Msg, Committers) -> - verify(Msg, Committers, #{}). -``` - -### verify - -```erlang -verify(Msg, all, Opts) -> - verify(Msg, <<"all">>, Opts); -``` - -### verify - -```erlang -verify(Msg, signers, Opts) -> - verify(Msg, hb_message:signers(Msg, Opts), Opts); -``` - -### verify - -```erlang -verify(Msg, Committers, Opts) when not is_map(Committers) -> - verify( - Msg, - #{ - <<"committers">> => - case ?IS_ID(Committers) of - true -> [Committers]; - false -> Committers - end - }, - Opts - ); -``` - -### verify - -```erlang -verify(Msg, Spec, Opts) -> - ?event(verify, {verify, {spec, Spec}}), - {ok, Res} = - dev_message:verify( - Msg, - Spec, - Opts - ), - Res. -``` - -### uncommitted - -Return the unsigned version of a message in AO-Core format. - -```erlang -uncommitted(Msg) -> uncommitted(Msg, #{}). -``` - -### uncommitted - -Return the unsigned version of a message in AO-Core format. - -```erlang -uncommitted(Bin, _Opts) when is_binary(Bin) -> Bin; -``` - -### uncommitted - -Return the unsigned version of a message in AO-Core format. -Return all of the committers on a message that have 'normal', 256 bit, - -```erlang -uncommitted(Msg, Opts) -> - hb_maps:remove(<<"commitments">>, Msg, Opts). -``` - -### signers - -Return the unsigned version of a message in AO-Core format. -Return all of the committers on a message that have 'normal', 256 bit, - -```erlang -signers(Msg, Opts) -> - hb_util:ok(dev_message:committers(Msg, #{}, Opts)). -``` - -### print - -Pretty-print a message. - -```erlang -print(Msg) -> print(Msg, 0). -``` - -### print - -Pretty-print a message. -Return the type of an encoded message. - -```erlang -print(Msg, Indent) -> - io:format(standard_error, "~s", [lists:flatten(hb_format:message(Msg, #{}, Indent))]). -``` - -### type - -Pretty-print a message. -Return the type of an encoded message. - -```erlang -type(TX) when is_record(TX, tx) -> tx; -``` - -### type - -Pretty-print a message. -Return the type of an encoded message. - -```erlang -type(Binary) when is_binary(Binary) -> binary; -``` - -### type - -Pretty-print a message. -Return the type of an encoded message. - -```erlang -type(Msg) when is_map(Msg) -> - IsDeep = lists:any( - fun({_, Value}) -> is_map(Value) end, - lists:filter( - fun({Key, _}) -> not hb_private:is_private(Key) end, - hb_maps:to_list(Msg) - ) - ), - case IsDeep of - true -> deep; - false -> shallow - end. -``` - -### match - -Check if two maps match, including recursively checking nested maps. - -```erlang -match(Map1, Map2) -> - match(Map1, Map2, strict). -``` - -### match - -```erlang -match(Map1, Map2, Mode) -> - match(Map1, Map2, Mode, #{}). -``` - -### match - -```erlang -match(Map1, Map2, Mode, Opts) -> - try unsafe_match(Map1, Map2, Mode, [], Opts) - catch _:Details -> Details - end. -``` - -### unsafe_match - -Match two maps, returning `true` if they match, or throwing an error - -```erlang -unsafe_match(Map1, Map2, Mode, Path, Opts) -> - Keys1 = - hb_maps:keys( - NormMap1 = hb_util:lower_case_key_map(minimize( - normalize(hb_ao:normalize_keys(Map1, Opts), Opts), - [<<"content-type">>, <<"ao-body-key">>] - ), Opts) - ), - Keys2 = - hb_maps:keys( - NormMap2 = hb_util:lower_case_key_map(minimize( - normalize(hb_ao:normalize_keys(Map2, Opts), Opts), - [<<"content-type">>, <<"ao-body-key">>] - ), Opts) - ), - PrimaryKeysPresent = - (Mode == primary) andalso - lists:all( - fun(Key) -> lists:member(Key, Keys1) end, - Keys1 - ), - ?event(match, - {match, - {keys1, Keys1}, - {keys2, Keys2}, - {mode, Mode}, - {primary_keys_present, PrimaryKeysPresent}, - {msg1, Map1}, - {msg2, Map2} - } - ), - case (Keys1 == Keys2) or (Mode == only_present) or PrimaryKeysPresent of - true -> - lists:all( - fun(Key) -> - ?event(match, {matching_key, Key}), - Val1 = - hb_ao:normalize_keys( - hb_maps:get(Key, NormMap1, not_found, Opts), - Opts - ), - Val2 = - hb_ao:normalize_keys( - hb_maps:get(Key, NormMap2, not_found, Opts), - Opts - ), - BothPresent = (Val1 =/= not_found) and (Val2 =/= not_found), - case (not BothPresent) and (Mode == only_present) of - true -> true; - false -> - case is_map(Val1) andalso is_map(Val2) of - true -> - unsafe_match(Val1, Val2, Mode, Path ++ [Key], Opts); - false -> - case {Val1, Val2} of - {V, V} -> true; - {V, '_'} when V =/= not_found -> true; - {'_', V} when V =/= not_found -> true; - {'_', '_'} -> true; - _ -> - throw( - {value_mismatch, - hb_format:short_id( - hb_path:to_binary( - Path ++ [Key] - ) - ), - {val1, Val1}, - {val2, Val2} - } - ) - end - end - end - end, - Keys1 - ); - false -> - throw( - {keys_mismatch, - {path, hb_format:short_id(hb_path:to_binary(Path))}, - {keys1, Keys1}, - {keys2, Keys2} - } - ) - end. -``` - -### matchable_keys - -```erlang -matchable_keys(Map) -> - lists:sort(lists:map(fun hb_ao:normalize_key/1, hb_maps:keys(Map))). -``` - -### diff - -Return the numeric differences between two messages, matching deeply - -```erlang -diff(Msg1, Msg2, Opts) when is_map(Msg1) andalso is_map(Msg2) -> - maps:filtermap( - fun(Key, Val2) -> - case hb_maps:get(Key, Msg1, not_found, Opts) of - Val2 -> - % The key is present in both maps, and the values match. -``` - -### diff - -```erlang -diff(_Val1, _Val2, _Opts) -> - not_found. -``` - -### with_commitments - -Filter messages that do not match the 'spec' given. The underlying match - -```erlang -with_commitments(ID, Msg, Opts) when ?IS_ID(ID) -> - with_commitments([ID], Msg, Opts); -``` - -### with_commitments - -Filter messages that do not match the 'spec' given. The underlying match - -```erlang -with_commitments(Spec, Msg = #{ <<"commitments">> := Commitments }, Opts) -> - ?event({with_commitments, {spec, Spec}, {commitments, Commitments}}), - FilteredCommitments = - hb_maps:filter( - fun(ID, CommMsg) -> - if is_list(Spec) -> - lists:member(ID, Spec); - is_map(Spec) -> - match(Spec, CommMsg, primary, Opts) == true - end - end, - Commitments, - Opts - ), - ?event({with_commitments, {filtered_commitments, FilteredCommitments}}), - Msg#{ <<"commitments">> => FilteredCommitments }; -``` - -### with_commitments - -Filter messages that do not match the 'spec' given. The underlying match - -```erlang -with_commitments(_Spec, Msg, _Opts) -> - Msg. -``` - -### without_commitments - -Filter messages that match the 'spec' given. Inverts the `with_commitments/2` - -```erlang -without_commitments(Spec, Msg = #{ <<"commitments">> := Commitments }, Opts) -> - ?event({without_commitments, {spec, Spec}, {msg, Msg}, {commitments, Commitments}}), - FilteredCommitments = - hb_maps:without( - hb_maps:keys( - hb_maps:get( - <<"commitments">>, - with_commitments(Spec, Msg, Opts), - #{}, - Opts - ) - ), - Commitments - ), - ?event({without_commitments, {filtered_commitments, FilteredCommitments}}), - Msg#{ <<"commitments">> => FilteredCommitments }; -``` - -### without_commitments - -Filter messages that match the 'spec' given. Inverts the `with_commitments/2` - -```erlang -without_commitments(_Spec, Msg, _Opts) -> - Msg. -``` - -### commitment - -Extract a commitment from a message given a `committer` or `commitment` - -```erlang -commitment(ID, Msg) -> - commitment(ID, Msg, #{}). -``` - -### commitment - -```erlang -commitment(ID, Link, Opts) when ?IS_LINK(Link) -> - commitment(ID, hb_cache:ensure_loaded(Link, Opts), Opts); -``` - -### commitment - -```erlang -commitment(ID, #{ <<"commitments">> := Commitments }, Opts) - when is_binary(ID), is_map_key(ID, Commitments) -> - hb_maps:get( - ID, - Commitments, - not_found, - Opts - ); -``` - -### commitment - -```erlang -commitment(Spec, Msg, Opts) -> - Matches = commitments(Spec, Msg, Opts), - ?event(debug_commitment, {commitment, {spec, Spec}, {matches, Matches}}), - if - map_size(Matches) == 0 -> not_found; - map_size(Matches) == 1 -> - CommID = hd(hb_maps:keys(Matches)), - {ok, CommID, hb_util:ok(hb_maps:find(CommID, Matches, Opts))}; - true -> - ?event(commitment, {multiple_matches, {matches, Matches}}), - multiple_matches - end; -``` - -### commitment - -```erlang -commitment(_Spec, _Msg, _Opts) -> - % The message has no commitments, so the spec can never match. -``` - -### commitments - -Return a list of all commitments that match the spec. - -```erlang -commitments(ID, Link, Opts) when ?IS_LINK(Link) -> - commitments(ID, hb_cache:ensure_loaded(Link, Opts), Opts); -``` - -### commitments - -Return a list of all commitments that match the spec. - -```erlang -commitments(CommitterID, Msg, Opts) when is_binary(CommitterID) -> - commitments(#{ <<"committer">> => CommitterID }, Msg, Opts); -``` - -### commitments - -Return a list of all commitments that match the spec. - -```erlang -commitments(Spec, #{ <<"commitments">> := Commitments }, Opts) -> - hb_maps:filtermap( - fun(_ID, CommMsg) -> - case match(Spec, CommMsg, primary, Opts) of - true -> {true, CommMsg}; - _ -> false - end - end, - Commitments, - Opts - ); -``` - -### commitments - -Return a list of all commitments that match the spec. - -```erlang -commitments(_Spec, _Msg, _Opts) -> - #{}. -``` - -### commitment_devices - -Return the devices for which there are commitments on a message. - -```erlang -commitment_devices(#{ <<"commitments">> := Commitments }, Opts) -> - lists:map( - fun(CommMsg) -> - hb_ao:get(<<"commitment-device">>, CommMsg, Opts) - end, - maps:values(Commitments) - ); -``` - -### commitment_devices - -Return the devices for which there are commitments on a message. - -```erlang -commitment_devices(_Msg, _Opts) -> - []. -``` - -### find_target - -Implements a standard pattern in which the target for an operation is - -```erlang -find_target(Self, Req, Opts) -> - GetOpts = Opts#{ - hashpath => ignore, - cache_control => [<<"no-cache">>, <<"no-store">>] - }, - {ok, - case hb_maps:get(<<"target">>, Req, <<"self">>, GetOpts) of - <<"self">> -> Self; - Key -> - hb_maps:get( - Key, - Req, - hb_maps:get(<<"body">>, Req, GetOpts), - GetOpts - ) - end - }. -``` - -### minimize - -Remove keys from the map that can be regenerated. Optionally takes an - -```erlang -minimize(Msg) -> minimize(Msg, []). -``` - -### minimize - -Remove keys from the map that can be regenerated. Optionally takes an - -```erlang -minimize(RawVal, _) when not is_map(RawVal) -> RawVal; -``` - -### minimize - -Remove keys from the map that can be regenerated. Optionally takes an - -```erlang -minimize(Map, ExtraKeys) -> - NormKeys = - lists:map(fun hb_ao:normalize_key/1, ?REGEN_KEYS) - ++ lists:map(fun hb_ao:normalize_key/1, ExtraKeys), - maps:filter( - fun(Key, _) -> - (not lists:member(hb_ao:normalize_key(Key), NormKeys)) - andalso (not hb_private:is_private(Key)) - end, - maps:map(fun(_K, V) -> minimize(V) end, Map) - ). -``` - -### normalize - -Return a map with only the keys that necessary, without those that can - -```erlang -normalize(Map, Opts) when is_map(Map) orelse is_list(Map) -> - NormalizedMap = hb_ao:normalize_keys(Map, Opts), - FilteredMap = filter_default_keys(NormalizedMap), - hb_maps:with(matchable_keys(FilteredMap), FilteredMap); -``` - -### normalize - -Return a map with only the keys that necessary, without those that can - -```erlang -normalize(Other, _Opts) -> - Other. -``` - -### filter_default_keys - -Remove keys from a map that have the default values found in the tx - -```erlang -filter_default_keys(Map) -> - DefaultsMap = default_tx_message(), - maps:filter( - fun(Key, Value) -> - case hb_maps:find(hb_ao:normalize_key(Key), DefaultsMap) of - {ok, Value} -> false; - _ -> true - end - end, - Map - ). -``` - -### default_tx_message - -Get the normalized fields and default values of the tx record. - -```erlang -default_tx_message() -> - hb_maps:from_list(default_tx_list()). -``` - -### default_tx_list - -Get the ordered list of fields as AO-Core keys and default values of - -```erlang -default_tx_list() -> - Keys = lists:map(fun hb_ao:normalize_key/1, record_info(fields, tx)), -``` - ---- - -*Generated from [hb_message.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_message.erl)* diff --git a/docs/book/src/hb_message_test_vectors.erl.md b/docs/book/src/hb_message_test_vectors.erl.md deleted file mode 100644 index 36e40e41e..000000000 --- a/docs/book/src/hb_message_test_vectors.erl.md +++ /dev/null @@ -1,1736 +0,0 @@ -# hb_message_test_vectors - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_message_test_vectors.erl) - -A battery of test vectors for message codecs, implementing the -`message@1.0` encoding and commitment APIs. Additionally, this module -houses tests that ensure the general functioning of the `hb_message` API. - ---- - -### run_test - -A battery of test vectors for message codecs, implementing the -Test invocation function, making it easier to run a specific test. - -```erlang -run_test() -> - hb:init(), - nested_structured_fields_test( - #{ <<"device">> => <<"json@1.0">>, <<"bundle">> => true }, - test_opts(normal) - ). -``` - -### test_codecs - -Return a list of codecs to test. Disable these as necessary if you need - -```erlang -test_codecs() -> - [ - <<"structured@1.0">>, - <<"httpsig@1.0">>, - #{ <<"device">> => <<"httpsig@1.0">>, <<"bundle">> => true }, - <<"flat@1.0">>, - <<"ans104@1.0">>, - #{ <<"device">> => <<"ans104@1.0">>, <<"bundle">> => true }, - <<"json@1.0">>, - #{ <<"device">> => <<"json@1.0">>, <<"bundle">> => true } - ]. -``` - -### suite_test_opts - -Return a set of options for testing, taking the codec name as an - -```erlang -suite_test_opts() -> - [ - #{ - name => normal, - desc => <<"Default opts">>, - opts => test_opts(normal) - } - ]. -``` - -### suite_test_opts - -```erlang -suite_test_opts(OptsName) -> - [ O || O = #{ name := OName } <- suite_test_opts(), OName == OptsName ]. -``` - -### test_opts - -```erlang -test_opts(normal) -> - #{ - store => hb_test_utils:test_store(), - priv_wallet => hb:wallet() - }. -``` - -### test_suite - -```erlang -test_suite() -> - [ - % Basic operations - {<<"Binary to binary">>, - fun binary_to_binary_test/2}, - {<<"Match">>, - fun match_test/2}, - {<<"Basic message encoding and decoding">>, - fun basic_message_codec_test/2}, - {<<"Priv survives conversion">>, - fun priv_survives_conversion_test/2}, - {<<"Message with body">>, - fun set_body_codec_test/2}, - {<<"Message with large keys">>, - fun message_with_large_keys_test/2}, - {<<"Structured field atom parsing">>, - fun structured_field_atom_parsing_test/2}, - {<<"Structured field decimal parsing">>, - fun structured_field_decimal_parsing_test/2}, - {<<"Unsigned id">>, - fun unsigned_id_test/2}, - % Nested structures - {<<"Simple nested message">>, - fun simple_nested_message_test/2}, - {<<"Message with simple embedded list">>, - fun message_with_simple_embedded_list_test/2}, - {<<"Nested empty map">>, - fun nested_empty_map_test/2}, - {<<"Empty body">>, - fun empty_body_test/2}, - {<<"Nested structured fields">>, - fun nested_structured_fields_test/2}, - {<<"Single layer message to encoding">>, - fun single_layer_message_to_encoding_test/2}, - {<<"Nested body list">>, - fun nested_body_list_test/2}, - {<<"Empty string in nested tag">>, - fun empty_string_in_nested_tag_test/2}, - {<<"Deep typed message ID">>, - fun deep_typed_message_id_test/2}, - {<<"Encode small balance table">>, - fun encode_small_balance_table_test/2}, - {<<"Encode large balance table">>, - fun encode_large_balance_table_test/2}, - {<<"Normalize commitments">>, - fun normalize_commitments_test/2}, - % Signed messages - {<<"Signed message to message and back">>, - fun signed_message_encode_decode_verify_test/2}, - {<<"Specific order signed message">>, - fun specific_order_signed_message_test/2}, - {<<"Specific order deeply nested signed message">>, - fun specific_order_deeply_nested_signed_message_test/2}, - {<<"Signed only committed data field">>, - fun signed_only_committed_data_field_test/2}, - {<<"Signed simple nested message">>, - fun simple_signed_nested_message_test/2}, - {<<"Signed nested message">>, - fun signed_nested_message_with_child_test/2}, - {<<"Committed keys">>, - fun committed_keys_test/2}, - {<<"Committed empty keys">>, - fun committed_empty_keys_test/2}, - {<<"Signed list HTTP response">>, - fun signed_list_test/2}, - {<<"Sign node message">>, - fun sign_node_message_test/2}, - {<<"Complex signed message">>, - fun complex_signed_message_test/2}, - {<<"Nested message with large keys">>, - fun nested_message_with_large_keys_test/2}, - {<<"Signed nested complex signed message">>, - fun verify_nested_complex_signed_test/2}, - % Complex structures - {<<"Nested message with large keys and content">>, - fun nested_message_with_large_keys_and_content_test/2}, - {<<"Nested message with large content">>, - fun nested_message_with_large_content_test/2}, - {<<"Deeply nested message with content">>, - fun deeply_nested_message_with_content_test/2}, - {<<"Deeply nested message with only content">>, - fun deeply_nested_message_with_only_content/2}, - {<<"Signed deep serialize and deserialize">>, - fun signed_deep_message_test/2}, - {<<"Signed nested data key">>, - fun signed_nested_data_key_test/2}, - {<<"Signed message with hashpath">>, - fun hashpath_sign_verify_test/2}, - {<<"Message with derived components">>, - fun signed_message_with_derived_components_test/2}, - {<<"Large body committed keys">>, - fun large_body_committed_keys_test/2}, - {<<"Signed with inner signed">>, - fun signed_with_inner_signed_message_test/2}, - {<<"Recursive nested list">>, - fun recursive_nested_list_test/2}, - {<<"Sign links">>, - fun sign_links_test/2}, - {<<"ID of linked message">>, - fun id_of_linked_message_test/2}, - {<<"Sign deep message from lazy cache read">>, - fun sign_deep_message_from_lazy_cache_read_test/2}, - {<<"ID of deep message and link message match">>, - fun id_of_deep_message_and_link_message_match_test/2}, - {<<"Signed non-bundle is bundlable">>, - fun signed_non_bundle_is_bundlable_test/2}, - {<<"Bundled ordering">>, - fun bundled_ordering_test/2}, - {<<"Codec round-trip conversion is idempotent">>, - fun codec_roundtrip_conversion_is_idempotent_test/2}, - {<<"Bundled and unbundled IDs differ">>, - fun bundled_and_unbundled_ids_differ_test/2}, - {<<"Tabm conversion is idempotent">>, - fun tabm_conversion_is_idempotent_test/2} - ]. -``` - -### suite_test_ - -Organizes a test battery for the `hb_message` module and its codecs. - -```erlang -suite_test_() -> - hb_test_utils:suite_with_opts( - codec_test_suite( - test_codecs(), - normal - ), - suite_test_opts(normal) - ). -``` - -### codec_test_suite - -Run the test suite for a set of codecs, using the given options type. - -```erlang -codec_test_suite(Codecs, OptsType) -> - lists:flatmap( - fun(CodecName) -> - lists:map(fun({Desc, Test}) -> - TestName = - binary_to_list( - << (suite_name(CodecName))/binary, ": ", Desc/binary >> - ), - TestSpecificOpts = test_opts(OptsType), - { - Desc, - TestName, - fun(_SuiteOpts) -> Test(CodecName, TestSpecificOpts) end - } - end, test_suite()) - end, - Codecs - ). -``` - -### suite_name - -Create a name for a suite from a codec spec. - -```erlang -suite_name(CodecSpec) when is_binary(CodecSpec) -> CodecSpec; -``` - -### suite_name - -Create a name for a suite from a codec spec. - -```erlang -suite_name(CodecSpec) when is_map(CodecSpec) -> - CodecName = maps:get(<<"device">>, CodecSpec, <<"[! NO CODEC !]">>), - case maps:get(<<"bundle">>, CodecSpec, false) of - false -> CodecName; - true -> << CodecName/binary, " (bundle)">> - end. -``` - -### is_idempotent - -Tests a message transforming function to ensure that it is idempotent. - -```erlang -is_idempotent(Func, Msg, Opts) -> - Run = fun(M) -> case Func(M) of {ok, Res} -> Res; Res -> Res end end, - After1 = Run(Msg), - After2 = Run(After1), - After3 = Run(After2), - MatchRes1 = hb_message:match(After1, After2, strict, Opts), - MatchRes2 = hb_message:match(After2, After3, strict, Opts), - ?event({is_idempotent, {match_res1, MatchRes1}, {match_res2, MatchRes2}}), - MatchRes1 andalso MatchRes2. -``` - -### tabm_conversion_is_idempotent_test - -Ensure that converting a message to/from TABM multiple times repeatedly - -```erlang -tabm_conversion_is_idempotent_test(_Codec, Opts) -> - From = fun(M) -> hb_message:convert(M, <<"structured@1.0">>, tabm, Opts) end, - To = fun(M) -> hb_message:convert(M, tabm, <<"structured@1.0">>, Opts) end, - SimpleMsg = #{ <<"a">> => <<"x">>, <<"b">> => <<"y">>, <<"c">> => <<"z">> }, - ComplexMsg = - #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - Signed = hb_message:commit( - #{ - <<"type">> => <<"Message">>, - <<"function">> => <<"fac">>, - <<"parameters">> => #{ - <<"a">> => 1 - }, - <<"content-type">> => <<"application/html">>, - <<"body">> => - << - """ - -

Hello, multiline message

- - """ - >> - }, - Opts, - <<"structured@1.0">> - ) - }, - ?assert(is_idempotent(From, SimpleMsg, Opts)), - ?assert(is_idempotent(From, Signed, Opts)), - ?assert(is_idempotent(From, ComplexMsg, Opts)), - ?assert(is_idempotent(To, SimpleMsg, Opts)), - ?assert(is_idempotent(To, Signed, Opts)), - ?assert(is_idempotent(To, ComplexMsg, Opts)). -``` - -### codec_roundtrip_conversion_is_idempotent_test - -Ensure that converting a message to a codec, then back to TABM multiple - -```erlang -codec_roundtrip_conversion_is_idempotent_test(Codec, Opts) -> - Roundtrip = - fun(M) -> - hb_message:convert( - hb_message:convert(M, Codec, <<"structured@1.0">>, Opts), - <<"structured@1.0">>, - Codec, - Opts - ) - end, - SimpleMsg = #{ <<"a">> => <<"x">>, <<"b">> => <<"y">>, <<"c">> => <<"z">> }, - ComplexMsg = - #{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - Signed = hb_message:commit( - #{ - <<"type">> => <<"Message">>, - <<"function">> => <<"fac">>, - <<"parameters">> => #{ - <<"a">> => 1 - }, - <<"content-type">> => <<"application/html">>, - <<"body">> => - << - """ - -

Hello, multiline message

- - """ - >> - }, - Opts, - Codec - ) - }, - ?assert(is_idempotent(Roundtrip, SimpleMsg, Opts)), - ?assert(is_idempotent(Roundtrip, Signed, Opts)), - ?assert(is_idempotent(Roundtrip, ComplexMsg, Opts)). -``` - -### default_keys_removed_test - -Test that the filter_default_keys/1 function removes TX fields - -```erlang -default_keys_removed_test() -> - TX = #tx { unsigned_id = << 1:256 >>, anchor = << 2:256 >> }, - TXMap = #{ - <<"unsigned_id">> => TX#tx.unsigned_id, - <<"anchor">> => TX#tx.anchor, - <<"owner">> => TX#tx.owner, - <<"target">> => TX#tx.target, - <<"data">> => TX#tx.data - }, - FilteredMap = hb_message:filter_default_keys(TXMap), - ?assertEqual(<< 1:256 >>, hb_maps:get(<<"unsigned_id">>, FilteredMap)), - ?assertEqual(<< 2:256 >>, hb_maps:get(<<"anchor">>, FilteredMap, not_found)), - ?assertEqual(not_found, hb_maps:get(<<"owner">>, FilteredMap, not_found)), - ?assertEqual(not_found, hb_maps:get(<<"target">>, FilteredMap, not_found)). -``` - -### minimization_test - -Test that the filter_default_keys/1 function removes TX fields - -```erlang -minimization_test() -> - Msg = #{ - <<"unsigned_id">> => << 1:256 >>, - <<"id">> => << 2:256 >> - }, - MinimizedMsg = hb_message:minimize(Msg), - ?event({minimized, MinimizedMsg}), - ?assertEqual(1, hb_maps:size(MinimizedMsg)). -``` - -### match_modes_test - -```erlang -match_modes_test() -> - Msg1 = #{ <<"a">> => 1, <<"b">> => 2 }, - Msg2 = #{ <<"a">> => 1 }, - Msg3 = #{ <<"a">> => 1, <<"b">> => 2, <<"c">> => 3 }, - ?assert(hb_message:match(Msg1, Msg2, only_present)), - ?assert(hb_message:match(Msg2, Msg1, strict) =/= true), - ?assert(hb_message:match(Msg1, Msg3, primary)), - ?assert(hb_message:match(Msg3, Msg1, primary) =/= true). -``` - -### basic_message_codec_test - -```erlang -basic_message_codec_test(Codec, Opts) -> - Msg = #{ <<"normal_key">> => <<"NORMAL_VALUE">> }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### set_body_codec_test - -```erlang -set_body_codec_test(Codec, Opts) -> - Msg = #{ <<"body">> => <<"NORMAL_VALUE">>, <<"test-key">> => <<"Test-Value">> }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### single_layer_message_to_encoding_test - -Test that we can convert a message into a tx record and back. - -```erlang -single_layer_message_to_encoding_test(Codec, Opts) -> - Msg = #{ - <<"anchor">> => << 2:256 >>, - <<"target">> => << 4:256 >>, - <<"data">> => <<"DATA">>, - <<"special-key">> => <<"SPECIAL_VALUE">> - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?event({matching, {input, Msg}, {output, Decoded}}), - MatchRes = hb_message:match(Msg, Decoded, strict, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes). -``` - -### signed_only_committed_data_field_test - -```erlang -signed_only_committed_data_field_test(Codec, Opts) -> - Msg = hb_message:commit(#{ <<"data">> => <<"DATA">> }, Opts, Codec), - ?event({signed_msg, Msg}), - {ok, OnlyCommitted} = hb_message:with_only_committed(Msg, Opts), - ?event({only_committed, OnlyCommitted}), - Encoded = hb_message:convert(OnlyCommitted, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - MatchRes = hb_message:match(Msg, OnlyCommitted, strict, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes), - ?assert(hb_message:verify(OnlyCommitted, all, Opts)). -``` - -### signed_nested_data_key_test - -```erlang -signed_nested_data_key_test(Codec, Opts) -> - Msg = - #{ - <<"outer-data">> => <<"outer">>, - <<"body">> => - #{ - <<"inner-data">> => <<"inner">>, - <<"data">> => <<"DATA">> - } - }, - Signed = hb_message:commit(Msg, Opts, Codec), - ?event({signed, Signed}), - Encoded = hb_message:convert(Signed, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - LoadedMsg = hb_cache:ensure_all_loaded(Decoded, Opts), - ?event({matching, {input, Msg}, {output, LoadedMsg}}), - ?assert(hb_message:match(Msg, LoadedMsg, primary, Opts)). -``` - -### match_test - -Test that the message matching function works. - -```erlang -match_test(Codec, Opts) -> - Msg = #{ <<"a">> => 1, <<"b">> => 2 }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### binary_to_binary_test - -```erlang -binary_to_binary_test(Codec, Opts) -> - % Serialization must be able to turn a raw binary into a TX, then turn - % that TX back into a binary and have the result match the original. -``` - -### structured_field_atom_parsing_test - -Structured field parsing tests. - -```erlang -structured_field_atom_parsing_test(Codec, Opts) -> - Msg = #{ highly_unusual_http_header => highly_unusual_value }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### structured_field_decimal_parsing_test - -```erlang -structured_field_decimal_parsing_test(Codec, Opts) -> - Msg = #{ integer_field => 1234567890 }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### message_with_large_keys_test - -Test that the data field is correctly managed when we have multiple - -```erlang -message_with_large_keys_test(Codec, Opts) -> - Msg = #{ - <<"normal_key">> => <<"normal_value">>, - <<"large_key">> => << 0:((1 + 1024) * 8) >>, - <<"another_large_key">> => << 0:((1 + 1024) * 8) >>, - <<"another_normal_key">> => <<"another_normal_value">> - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### verify_nested_complex_signed_test - -Check that a nested signed message with an embedded typed list can - -```erlang -verify_nested_complex_signed_test(Codec, Opts) -> - Msg = - hb_message:commit(#{ - <<"path">> => <<"schedule">>, - <<"method">> => <<"POST">>, - <<"body">> => - Inner = hb_message:commit( - #{ - <<"type">> => <<"Message">>, - <<"function">> => <<"fac">>, - <<"parameters">> => #{ - <<"a">> => 1 - }, - <<"content-type">> => <<"application/html">>, - <<"body">> => - << - """ - -

Hello, multiline message

- - """ - >> - }, - Opts, - Codec - ) - }, - Opts, - Codec - ), - ?event({signed, Msg}), - ?event({inner, Inner}), - % Ensure that the messages verify prior to conversion. -``` - -### nested_message_with_large_keys_and_content_test - -Check that large keys and data fields are correctly handled together. - -```erlang -nested_message_with_large_keys_and_content_test(Codec, Opts) -> - MainBodyKey = - case Codec of - <<"ans104@1.0">> -> <<"data">>; - _ -> <<"body">> - end, - Msg = #{ - <<"normal_key">> => <<"normal_value">>, - <<"large_key">> => << 0:(1024 * 16) >>, - <<"another_large_key">> => << 0:(1024 * 16) >>, - <<"another_normal_key">> => <<"another_normal_value">>, - MainBodyKey => <<"Hey from the data field!">> - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### simple_nested_message_test - -```erlang -simple_nested_message_test(Codec, Opts) -> - Msg = #{ - <<"a">> => <<"1">>, - <<"nested">> => #{ <<"b">> => <<"1">> }, - <<"c">> => <<"3">> - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### simple_signed_nested_message_test - -```erlang -simple_signed_nested_message_test(Codec, Opts) -> - Msg = - hb_message:commit( - #{ - <<"a">> => <<"1">>, - <<"nested">> => #{ <<"b">> => <<"1">> }, - <<"c">> => <<"3">> - }, - Opts, - Codec - ), - ?assert(hb_message:verify(Msg, all, Opts)), - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - MatchRes = hb_message:match(Msg, Decoded, primary, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes), - ?assert(hb_message:verify(Decoded, all, Opts)). -``` - -### signed_nested_message_with_child_test - -```erlang -signed_nested_message_with_child_test(Codec, Opts) -> - Msg = #{ - <<"outer-a">> => <<"1">>, - <<"nested">> => - hb_message:commit( - #{ <<"inner-b">> => <<"1">>, <<"inner-list">> => [1, 2, 3] }, - Opts, - Codec - ), - <<"outer-c">> => <<"3">> - }, - hb_cache:write(Msg, Opts), - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - MatchRes = hb_message:match(Msg, Decoded, primary, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes), - ?assert(hb_message:verify(Decoded, all, Opts)). -``` - -### nested_empty_map_test - -```erlang -nested_empty_map_test(Codec, Opts) -> - Msg = #{ <<"body">> => #{ <<"empty-map-test">> => #{}}}, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - MatchRes = hb_message:match(Msg, Decoded, strict, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes). -``` - -### empty_body_test - -```erlang -empty_body_test(Codec, Opts) -> - Msg = #{ <<"body">> => <<>> }, - Signed = hb_message:commit(Msg, Opts, Codec), - Encoded = hb_message:convert(Signed, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - MatchRes = hb_message:match(Signed, Decoded, strict, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes). -``` - -### nested_message_with_large_content_test - -Test that the data field is correctly managed when we have multiple - -```erlang -nested_message_with_large_content_test(Codec, Opts) -> - MainBodyKey = - case Codec of - <<"ans104@1.0">> -> <<"data">>; - _ -> <<"body">> - end, - Msg = #{ - <<"depth">> => <<"outer">>, - MainBodyKey => #{ - <<"map_item">> => - #{ - <<"depth">> => <<"inner">>, - <<"large_data_inner">> => << 0:((1 + 1024) * 8) >> - }, - <<"large_data_outer">> => << 0:((1 + 1024) * 8) >> - } - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### deeply_nested_message_with_content_test - -Test that we can convert a 3 layer nested message into a tx record and back. - -```erlang -deeply_nested_message_with_content_test(Codec, Opts) -> - MainBodyKey = - case Codec of - <<"ans104@1.0">> -> <<"data">>; - _ -> <<"body">> - end, - Msg = #{ - <<"depth">> => <<"outer">>, - MainBodyKey => #{ - <<"map_item">> => - #{ - <<"depth">> => <<"inner">>, - MainBodyKey => #{ - <<"depth">> => <<"innermost">>, - MainBodyKey => <<"DATA">> - } - } - } - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### deeply_nested_message_with_only_content - -```erlang -deeply_nested_message_with_only_content(Codec, Opts) -> - MainBodyKey = - case Codec of - <<"ans104@1.0">> -> <<"data">>; - _ -> <<"body">> - end, - Msg = #{ - <<"depth1">> => <<"outer">>, - MainBodyKey => #{ - MainBodyKey => #{ - MainBodyKey => <<"depth2-body">> - } - } - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### nested_structured_fields_test - -```erlang -nested_structured_fields_test(Codec, Opts) -> - NestedMsg = #{ <<"a">> => #{ <<"b">> => 1 } }, - Encoded = hb_message:convert(NestedMsg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, NestedMsg}, {output, Decoded}}), - ?assert(hb_message:match(NestedMsg, Decoded, strict, Opts)). -``` - -### nested_message_with_large_keys_test - -```erlang -nested_message_with_large_keys_test(Codec, Opts) -> - Msg = #{ - <<"a">> => <<"1">>, - <<"long_data">> => << 0:((1 + 1024) * 8) >>, - <<"nested">> => #{ <<"b">> => <<"1">> }, - <<"c">> => <<"3">> - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### signed_message_encode_decode_verify_test - -```erlang -signed_message_encode_decode_verify_test(Codec, Opts) -> - Msg = #{ - <<"test-1">> => <<"TEST VALUE 1">>, - <<"test-2">> => <<"TEST VALUE 2">>, - <<"test-3">> => <<"TEST VALUE 3">>, - <<"test-4">> => <<"TEST VALUE 4">>, - <<"test-5">> => <<"TEST VALUE 5">> - }, - SignedMsg = - hb_message:commit( - Msg, - Opts, - Codec - ), - ?event({signed_msg, SignedMsg}), - ?assertEqual(true, hb_message:verify(SignedMsg, all, Opts)), - Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), - ?event({msg_encoded_as_codec, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?assertEqual(true, hb_message:verify(Decoded, all, Opts)), - ?event({matching, {input, SignedMsg}, {encoded, Encoded}, {decoded, Decoded}}), - ?event({http, {string, dev_codec_httpsig_conv:encode_http_msg(SignedMsg, Opts)}}), - MatchRes = hb_message:match(SignedMsg, Decoded, strict, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes). -``` - -### specific_order_signed_message_test - -```erlang -specific_order_signed_message_test(RawCodec, Opts) -> - Msg = #{ - <<"key-1">> => <<"DATA-1">>, - <<"key-2">> => <<"DATA-2">>, - <<"key-3">> => <<"DATA-3">> - }, - Codec = - if is_map(RawCodec) -> RawCodec; - true -> #{ <<"device">> => RawCodec } - end, - SignedMsg = - hb_message:commit( - Msg, - Opts, - Codec#{ <<"committed">> => [<<"key-3">>, <<"key-1">>, <<"key-2">>] } - ), - ?event({signed_msg, SignedMsg}), - ?event({http, {string, dev_codec_httpsig_conv:encode_http_msg(SignedMsg, Opts)}}), - ?assert(hb_message:verify(SignedMsg, all, Opts)). -``` - -### specific_order_deeply_nested_signed_message_test - -```erlang -specific_order_deeply_nested_signed_message_test(RawCodec, Opts) -> - Msg = #{ - <<"key-1">> => <<"DATA-1">>, - <<"key-2">> => #{ <<"body">> => [1,2] }, - <<"key-3">> => <<"DATA-3">>, - <<"key-4">> => #{ <<"body">> => [1,2,3,4] }, - <<"key-5">> => <<"DATA-5">> - }, - Codec = - if is_map(RawCodec) -> RawCodec; - true -> #{ <<"device">> => RawCodec } - end, - SignedMsg = - hb_message:commit( - Msg, - Opts, - Codec#{ - <<"committed">> => - [ - <<"key-3">>, - <<"key-5">>, - <<"key-1">>, - <<"key-2">>, - <<"key-4">> - ] - } - ), - ?event({signed_msg, SignedMsg}), - ?assert(hb_message:verify(SignedMsg, all, Opts)). -``` - -### complex_signed_message_test - -```erlang -complex_signed_message_test(Codec, Opts) -> - Msg = #{ - <<"data">> => <<"TEST DATA">>, - <<"deep-data">> => #{ - <<"data">> => <<"DEEP DATA">>, - <<"complex-key">> => 1337, - <<"list">> => [1,2,3] - } - }, - SignedMsg = - hb_message:commit( - Msg, - Opts, - Codec - ), - Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?assertEqual(true, hb_message:verify(Decoded, all, Opts)), - ?event({matching, {input, SignedMsg}, {output, Decoded}}), - MatchRes = hb_message:match(SignedMsg, Decoded, strict, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes). -``` - -### deep_multisignature_test - -```erlang -deep_multisignature_test() -> - % Only the `httpsig@1.0' codec supports multisignatures. -``` - -### deep_typed_message_id_test - -```erlang -deep_typed_message_id_test(Codec, Opts) -> - Msg = #{ - <<"data">> => <<"TEST DATA">>, - <<"deep-data">> => #{ - <<"data">> => <<"DEEP DATA">>, - <<"complex-key">> => 1337, - <<"list">> => [1,2,3] - } - }, - InitID = hb_message:id(Msg, none, Opts), - ?event({init_id, InitID}), - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - DecodedID = hb_message:id(Decoded, none, Opts), - ?event({decoded_id, DecodedID}), - ?event({stages, {init, Msg}, {encoded, Encoded}, {decoded, Decoded}}), - ?assertEqual( - InitID, - DecodedID - ). -``` - -### signed_deep_message_test - -```erlang -signed_deep_message_test(Codec, Opts) -> - Msg = #{ - <<"test-key">> => <<"TEST_VALUE">>, - <<"body">> => #{ - <<"nested-1">> => - #{ - <<"body">> => <<"NESTED BODY">>, - <<"nested-2">> => <<"NESTED-2">> - }, - <<"nested-3">> => <<"NESTED-3">> - } - }, - EncDec = - hb_message:convert( - hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - <<"structured@1.0">>, - Codec, - Opts - ), - ?event({enc_dec, EncDec}), - SignedMsg = - hb_message:commit( - EncDec, - Opts, - Codec - ), - ?event({signed_msg, SignedMsg}), - {ok, Res} = dev_message:verify(SignedMsg, #{ <<"committers">> => <<"all">>}, Opts), - ?event({verify_res, Res}), - ?assertEqual(true, hb_message:verify(SignedMsg, all, Opts)), - ?event({verified, SignedMsg}), - Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - {ok, DecodedRes} = - dev_message:verify( - Decoded, - #{ <<"committers">> => <<"all">>}, - Opts - ), - ?event({verify_decoded_res, DecodedRes}), - MatchRes = hb_message:match(SignedMsg, Decoded, strict, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes). -``` - -### signed_list_test - -```erlang -signed_list_test(Codec, Opts) -> - Msg = #{ <<"key-with-list">> => [1.0, 2.0, 3.0] }, - Signed = hb_message:commit(Msg, Opts, Codec), - ?assert(hb_message:verify(Signed, all, Opts)), - Encoded = hb_message:convert(Signed, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?assert(hb_message:verify(Decoded, all, Opts)), - ?assert(hb_message:match(Signed, Decoded, strict, Opts)). -``` - -### unsigned_id_test - -```erlang -unsigned_id_test(Codec, Opts) -> - Msg = #{ <<"data">> => <<"TEST_DATA">> }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?assertEqual( - dev_message:id(Decoded, #{ <<"committers">> => <<"none">>}, Opts), - dev_message:id(Msg, #{ <<"committers">> => <<"none">>}, Opts) - ). -``` - -### message_with_simple_embedded_list_test - -```erlang -message_with_simple_embedded_list_test(Codec, Opts) -> - Msg = #{ <<"list">> => [<<"value-1">>, <<"value-2">>, <<"value-3">>] }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### empty_string_in_nested_tag_test - -```erlang -empty_string_in_nested_tag_test(Codec, Opts) -> - Msg = - #{ - <<"dev">> => - #{ - <<"stderr">> => <<"aa">>, - <<"stdin">> => <<"b">>, - <<"stdout">> => <<"c">> - } - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### hashpath_sign_verify_test - -```erlang -hashpath_sign_verify_test(Codec, Opts) -> - Msg = - #{ - <<"test_key">> => <<"TEST_VALUE">>, - <<"body">> => #{ - <<"nested_key">> => - #{ - <<"body">> => <<"NESTED_DATA">>, - <<"nested_key">> => <<"NESTED_VALUE">> - }, - <<"nested_key2">> => <<"NESTED_VALUE2">> - }, - <<"priv">> => #{ - <<"hashpath">> => - hb_path:hashpath( - hb_util:human_id(crypto:strong_rand_bytes(32)), - hb_util:human_id(crypto:strong_rand_bytes(32)), - fun hb_crypto:sha256_chain/2, - #{} - ) - } - }, - ?event({msg, {explicit, Msg}}), - SignedMsg = hb_message:commit(Msg, Opts, Codec), - ?event({signed_msg, {explicit, SignedMsg}}), - {ok, Res} = dev_message:verify(SignedMsg, #{ <<"committers">> => <<"all">>}, Opts), - ?event({verify_res, {explicit, Res}}), - ?assert(hb_message:verify(SignedMsg, all, Opts)), - ?event({verified, {explicit, SignedMsg}}), - Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?assert(hb_message:verify(Decoded, all, Opts)), - ?assert( - hb_message:match( - SignedMsg, - Decoded, - strict, - Opts - ) - ). -``` - -### normalize_commitments_test - -```erlang -normalize_commitments_test(Codec, Opts) -> - Msg = #{ - <<"a">> => #{ - <<"b">> => #{ - <<"c">> => 1, - <<"d">> => #{ - <<"e">> => 2 - }, - <<"f">> => 3 - }, - <<"g">> => 4 - }, - <<"h">> => 5 - }, - NormMsg = hb_message:normalize_commitments(Msg, Opts), - ?event({norm_msg, NormMsg}), - ?assert(hb_message:verify(NormMsg, all, Opts)), - ?assert(maps:is_key(<<"commitments">>, NormMsg)), - ?assert(maps:is_key(<<"commitments">>, maps:get(<<"a">>, NormMsg))), - ?assert( - maps:is_key( - <<"commitments">>, - maps:get(<<"b">>, maps:get(<<"a">>, NormMsg)) - ) - ). -``` - -### signed_message_with_derived_components_test - -```erlang -signed_message_with_derived_components_test(Codec, Opts) -> - Msg = #{ - <<"path">> => <<"/test">>, - <<"authority">> => <<"example.com">>, - <<"scheme">> => <<"https">>, - <<"method">> => <<"GET">>, - <<"target-uri">> => <<"/test">>, - <<"request-target">> => <<"/test">>, - <<"status">> => <<"200">>, - <<"reason-phrase">> => <<"OK">>, - <<"body">> => <<"TEST_DATA">>, - <<"content-digest">> => <<"TEST_DIGEST">>, - <<"normal">> => <<"hello">> - }, - SignedMsg = - hb_message:commit( - Msg, - Opts, - Codec - ), - ?event({signed_msg, SignedMsg}), - ?assert(hb_message:verify(SignedMsg, all, Opts)), - Encoded = hb_message:convert(SignedMsg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?assert(hb_message:verify(Decoded, all, Opts)), - ?assert( - hb_message:match( - SignedMsg, - Decoded, - strict, - Opts - ) - ). -``` - -### committed_keys_test - -```erlang -committed_keys_test(Codec, Opts) -> - Msg = #{ <<"a">> => 1, <<"b">> => 2, <<"c">> => 3 }, - Signed = hb_message:commit(Msg, Opts, Codec), - CommittedKeys = hb_message:committed(Signed, all, Opts), - ?event({committed_keys, CommittedKeys}), - ?assert(hb_message:verify(Signed, all, Opts)), - ?assert(lists:member(<<"a">>, CommittedKeys)), - ?assert(lists:member(<<"b">>, CommittedKeys)), - ?assert(lists:member(<<"c">>, CommittedKeys)), - MsgToFilter = Signed#{ <<"bad-key">> => <<"BAD VALUE">> }, - ?assert( - not lists:member( - <<"bad-key">>, - hb_message:committed(MsgToFilter, all, Opts) - ) - ). -``` - -### committed_empty_keys_test - -```erlang -committed_empty_keys_test(Codec, Opts) -> - Msg = #{ - <<"very">> => <<>>, - <<"exciting">> => #{}, - <<"values">> => [], - <<"non-empty">> => <<"TEST">> - }, - Signed = hb_message:commit(Msg, Opts, Codec), - ?assert(hb_message:verify(Signed, all, Opts)), - CommittedKeys = hb_message:committed(Signed, all, Opts), - ?event({committed_keys, CommittedKeys}), - ?event({signed, Signed}), - ?assert(lists:member(<<"very">>, CommittedKeys)), - ?assert(lists:member(<<"exciting">>, CommittedKeys)), - ?assert(lists:member(<<"values">>, CommittedKeys)), - ?assert(lists:member(<<"non-empty">>, CommittedKeys)). -``` - -### deeply_nested_committed_keys_test - -```erlang -deeply_nested_committed_keys_test() -> - Opts = (test_opts(normal))#{ - store => [ - #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - } - ] - }, - Msg = #{ - <<"a">> => 1, - <<"b">> => #{ <<"c">> => #{ <<"d">> => <<0:((1 + 1024) * 1024)>> } }, - <<"e">> => <<0:((1 + 1024) * 1024)>> - }, - Signed = hb_message:commit(Msg, Opts, <<"httpsig@1.0">>), - {ok, WithOnlyCommitted} = hb_message:with_only_committed(Signed, Opts), - Committed = hb_message:committed(Signed, all, Opts), - ToCompare = hb_maps:without([<<"commitments">>], WithOnlyCommitted), - ?event( - {msgs, - {base, Msg}, - {signed, Signed}, - {committed, Committed}, - {with_only_committed, WithOnlyCommitted}, - {to_compare, ToCompare} - } - ), - ?assert( - hb_message:match( - Msg, - ToCompare, - strict, - Opts - ) - ). -``` - -### signed_with_inner_signed_message_test - -```erlang -signed_with_inner_signed_message_test(Codec, Opts) -> - Msg = - hb_message:commit( - #{ - <<"a">> => 1, - <<"inner">> => - hb_maps:merge( - InnerSigned = - hb_message:commit( - #{ - <<"c">> => <<"abc">>, - <<"e">> => 5 - %<<"body">> => <<"inner-body">> - % <<"inner-2">> => #{ - % <<"body">> => <<"inner-2-body">> - % } - }, - Opts, - Codec - ), - % Uncommitted keys that should be ripped out of the inner - % message by `with_only_committed'. These should still be - % present in the `with_only_committed' outer message. -``` - -### large_body_committed_keys_test - -```erlang -large_body_committed_keys_test(Codec, Opts) -> - case Codec of - <<"httpsig@1.0">> -> - Msg = #{ - <<"a">> => 1, - <<"b">> => 2, - <<"c">> => #{ <<"d">> => << 1:((1 + 1024) * 1024) >> } - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - Signed = hb_message:commit(Decoded, Opts, Codec), - ?event({signed, Signed}), - CommittedKeys = hb_message:committed(Signed, all, Opts), - ?assert(lists:member(<<"a">>, CommittedKeys)), - ?assert(lists:member(<<"b">>, CommittedKeys)), - ?assert(lists:member(<<"c">>, CommittedKeys)), - MsgToFilter = Signed#{ <<"bad-key">> => <<"BAD VALUE">> }, - ?assert( - not lists:member( - <<"bad-key">>, - hb_message:committed(MsgToFilter, all, Opts) - ) - ); - _ -> - skip - end. -``` - -### sign_node_message_test - -```erlang -sign_node_message_test(Codec, Opts) -> - Msg = hb_message:commit(hb_opts:default_message_with_env(), Opts, Codec), - ?event({committed, Msg}), - ?assert(hb_message:verify(Msg, all, Opts)), - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({final, Decoded}), - MatchRes = hb_message:match(Msg, Decoded, strict, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes), - ?assert(hb_message:verify(Decoded, all, Opts)). -``` - -### nested_body_list_test - -```erlang -nested_body_list_test(Codec, Opts) -> - Msg = #{ - <<"body">> => - [ - #{ - <<"test-key">> => - <<"TEST VALUE #", (integer_to_binary(X))/binary>> - } - || - X <- lists:seq(1, 3) - ] - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event(encoded, {encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)). -``` - -### recursive_nested_list_test - -```erlang -recursive_nested_list_test(Codec, Opts) -> - % This test is to ensure that the codec can handle arbitrarily deep nested - % lists. -``` - -### priv_survives_conversion_test - -```erlang -priv_survives_conversion_test(<<"ans104@1.0">>, _Opts) -> skip; -``` - -### priv_survives_conversion_test - -```erlang -priv_survives_conversion_test(<<"json@1.0">>, _Opts) -> skip; -``` - -### priv_survives_conversion_test - -```erlang -priv_survives_conversion_test(#{ <<"device">> := <<"ans104@1.0">> }, _Opts) -> - skip; -``` - -### priv_survives_conversion_test - -```erlang -priv_survives_conversion_test(#{ <<"device">> := <<"json@1.0">> }, _Opts) -> - skip; -``` - -### priv_survives_conversion_test - -```erlang -priv_survives_conversion_test(Codec, Opts) -> - Msg = #{ - <<"data">> => <<"TEST_DATA">>, - <<"priv">> => #{ <<"test_key">> => <<"TEST_VALUE">> } - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({decoded, Decoded}), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)), - ?assertMatch( - #{ <<"test_key">> := <<"TEST_VALUE">> }, - maps:get(<<"priv">>, Decoded) - ). -``` - -### encode_balance_table - -```erlang -encode_balance_table(Size, Codec, Opts) -> - Msg = - #{ - hb_util:encode(crypto:strong_rand_bytes(32)) => - rand:uniform(1_000_000_000_000_000) - || - _ <- lists:seq(1, Size) - }, - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, {explicit, Encoded}}), - Decoded = - hb_message:uncommitted( - hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - Opts - ), - ?event({decoded, {explicit, Decoded}}), - ?assert(hb_message:match(Msg, Decoded, if_present, Opts)). -``` - -### encode_small_balance_table_test - -```erlang -encode_small_balance_table_test(Codec, Opts) -> - encode_balance_table(5, Codec, Opts). -``` - -### encode_large_balance_table_test - -```erlang -encode_large_balance_table_test(<<"ans104@1.0">>, _Opts) -> - skip; -``` - -### encode_large_balance_table_test - -```erlang -encode_large_balance_table_test(#{ <<"device">> := <<"ans104@1.0">> }, _Opts) -> - skip; -``` - -### encode_large_balance_table_test - -```erlang -encode_large_balance_table_test(Codec, Opts) -> - encode_balance_table(1000, Codec, Opts). -``` - -### sign_links_test - -```erlang -sign_links_test(#{ <<"bundle">> := true }, _Opts) -> - skip; -``` - -### sign_links_test - -```erlang -sign_links_test(Codec, Opts) -> - % Make a message with definitively non-accessible lazy-loadable links. Sign - % it, ensuring that we can produce signatures and IDs without having the - % data directly in memory. -``` - -### bundled_and_unbundled_ids_differ_test - -```erlang -bundled_and_unbundled_ids_differ_test(Codec = #{ <<"bundle">> := true }, Opts) -> - SignatureType = - case maps:get(<<"device">>, Codec, undefined) of - <<"ans104@1.0">> -> <<"rsa-pss-sha256">>; - _ -> <<"hmac-sha256">> - end, - Msg = #{ - <<"immediate-key">> => <<"immediate-value">>, - <<"nested">> => #{ - <<"immediate-key-2">> => <<"immediate-value-2">> - } - }, - SignedNoBundle = - hb_message:commit( - Msg, - Opts, - maps:without([<<"bundle">>], Codec) - ), - SignedBundled = hb_message:commit(Msg, Opts, Codec), - ?event({signed_no_bundle, SignedNoBundle}), - ?event({signed_bundled, SignedBundled}), - {ok, UnbundledID, _} = - hb_message:commitment( - #{ <<"type">> => SignatureType }, - SignedNoBundle, - Opts - ), - {ok, BundledID, _} = - hb_message:commitment( - #{ <<"type">> => SignatureType }, - SignedBundled, - Opts - ), - ?event({unbundled_id, UnbundledID}), - ?event({bundled_id, BundledID}), - ?assertNotEqual(UnbundledID, BundledID); -``` - -### bundled_and_unbundled_ids_differ_test - -```erlang -bundled_and_unbundled_ids_differ_test(_Codec, _Opts) -> - skip. -``` - -### id_of_linked_message_test - -```erlang -id_of_linked_message_test(#{ <<"bundle">> := true }, _Opts) -> - skip; -``` - -### id_of_linked_message_test - -```erlang -id_of_linked_message_test(Codec, Opts) -> - Msg = #{ - <<"immediate-key">> => <<"immediate-value">>, - <<"link-key">> => - {link, hb_util:human_id(crypto:strong_rand_bytes(32)), #{ - <<"type">> => <<"link">>, - <<"lazy">> => false - }} - }, - UnsignedID = hb_message:id(Msg, Opts), - ?event({id, UnsignedID}), - EncMsg = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - DecMsg = hb_message:convert(EncMsg, <<"structured@1.0">>, Codec, Opts), - UnsignedID2 = hb_message:id(DecMsg, Opts), - ?assertEqual(UnsignedID, UnsignedID2). -``` - -### sign_deep_message_from_lazy_cache_read_test - -```erlang -sign_deep_message_from_lazy_cache_read_test(#{ <<"bundle">> := true }, _Opts) -> - skip; -``` - -### sign_deep_message_from_lazy_cache_read_test - -```erlang -sign_deep_message_from_lazy_cache_read_test(Codec, Opts) -> - Msg = #{ - <<"immediate-key">> => <<"immediate-value">>, - <<"link-key">> => #{ - <<"immediate-key-2">> => <<"link-value">>, - <<"link-key-2">> => #{ - <<"immediate-key-3">> => <<"link-value-2">> - } - } - }, - % Write the message to the store to ensure that we get lazy-loadable links. -``` - -### id_of_deep_message_and_link_message_match_test - -```erlang -id_of_deep_message_and_link_message_match_test(_Codec, Opts) -> - Msg = #{ - <<"immediate-key">> => <<"immediate-value">>, - <<"link-key">> => #{ - <<"immediate-key-2">> => <<"immediate-value-2">>, - <<"link-key-2">> => #{ - <<"immediate-key-3">> => <<"immediate-value-3">> - } - } - }, - Linkified = hb_link:normalize(Msg, offload, Opts), - ?event(linkify, {test_recvd_linkified, {msg, Linkified}}), - BaseID = hb_message:id(Msg, Opts), - ?event(linkify, {test_recvd_nonlink_id, {id, BaseID}}), - LinkID = hb_message:id(Linkified, Opts), - ?event(linkify, {test_recvd_link_id, {id, LinkID}}), - ?assertEqual(BaseID, LinkID). -``` - -### signed_non_bundle_is_bundlable_test - -```erlang -signed_non_bundle_is_bundlable_test( - Codec = #{ <<"device">> := <<"httpsig@1.0">>, <<"bundle">> := true }, - Opts) -> - Msg = - hb_message:commit( - #{ - <<"target">> => hb_util:human_id(crypto:strong_rand_bytes(32)), - <<"type">> => <<"Message">>, - <<"function">> => <<"fac">>, - <<"parameters">> => [5.0] - }, - Opts, - maps:get(<<"device">>, Codec) - ), - Encoded = - hb_message:convert( - Msg, - Codec, - <<"structured@1.0">>, - Opts - ), - Decoded = - hb_message:convert( - Encoded, - <<"structured@1.0">>, - maps:get(<<"device">>, Codec), - Opts - ), - ?assert(hb_message:match(Msg, Decoded, strict, Opts)), - ?assert(hb_message:verify(Decoded, all, Opts)); -``` - -### signed_non_bundle_is_bundlable_test - -```erlang -signed_non_bundle_is_bundlable_test(_Codec, _Opts) -> - skip. -``` - -### find_multiple_commitments_test_disabled - -```erlang -find_multiple_commitments_test_disabled() -> - Opts = test_opts(normal), - Store = hb_opts:get(store, no_store, Opts), - hb_store:reset(Store), - Msg = #{ - <<"a">> => 1, - <<"b">> => 2, - <<"c">> => 3 - }, - Sig1 = hb_message:commit(Msg, Opts#{ priv_wallet => ar_wallet:new() }), - {ok, _} = hb_cache:write(Sig1, Opts), - Sig2 = hb_message:commit(Msg, Opts#{ priv_wallet => ar_wallet:new() }), - {ok, _} = hb_cache:write(Sig2, Opts), - {ok, ReadMsg} = hb_cache:read(hb_message:id(Msg, none, Opts), Opts), - LoadedCommitments = hb_cache:ensure_all_loaded(ReadMsg, Opts), - ?event(debug_commitments, {read, LoadedCommitments}), - ok. -``` - -### bundled_ordering_test - -Ensure that a httpsig@1.0 message which is bundled and requests an - -```erlang -bundled_ordering_test(Codec = #{ <<"bundle">> := true }, Opts) -> - % Opts = (test_opts(normal))#{ - % store => [ - % #{ <<"store-module">> => hb_store_fs, <<"name">> => <<"cache-TEST">> } - % ] - % }, - Msg = - hb_message:commit( - #{ - <<"a">> => <<"1">>, - <<"b">> => <<"2">>, - <<"b-2">> => #{ <<"nested">> => #{ <<"n">> => <<"2">> } }, - <<"c">> => <<"3">>, - <<"c-2">> => #{ <<"nested">> => #{ <<"n">> => <<"3">> } }, - <<"d">> => <<"4">> - }, - Opts, - Codec#{ - <<"committed">> => [ - <<"a">>, - <<"b">>, - <<"b-2">>, - <<"c">>, - <<"c-2">>, - <<"d">> - ] - } - ), - ?event({committed, Msg}), - Encoded = hb_message:convert(Msg, Codec, <<"structured@1.0">>, Opts), - ?event({encoded, Encoded}), - ?event({http, {string, dev_codec_httpsig_conv:encode_http_msg(Msg, Opts)}}), - Decoded = hb_message:convert(Encoded, <<"structured@1.0">>, Codec, Opts), - ?event({matching, {input, Msg}, {output, Decoded}}), - MatchRes = hb_message:match(Msg, Decoded, primary, Opts), - ?event({match_result, MatchRes}), - ?assert(MatchRes), - ?assert(hb_message:verify(Decoded, all, Opts)); -``` - -### bundled_ordering_test - -Ensure that a httpsig@1.0 message which is bundled and requests an - -```erlang -bundled_ordering_test(_Codec, _Opts) -> -``` - ---- - -*Generated from [hb_message_test_vectors.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_message_test_vectors.erl)* diff --git a/docs/book/src/hb_metrics_collector.erl.md b/docs/book/src/hb_metrics_collector.erl.md deleted file mode 100644 index 3ca4073fc..000000000 --- a/docs/book/src/hb_metrics_collector.erl.md +++ /dev/null @@ -1,76 +0,0 @@ -# hb_metrics_collector - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_metrics_collector.erl) - -## Exported Functions - - ---- - -### deregister_cleanup - -```erlang -deregister_cleanup(_) -> ok. -``` - -### collect_mf - -```erlang -collect_mf(_Registry, Callback) -> - {Uptime, _} = erlang:statistics(wall_clock), - Callback( - create_gauge( - process_uptime_seconds, - "The number of seconds the Erlang process has been up.", - Uptime - ) - ), - SystemLoad = cpu_sup:avg5(), - Callback( - create_gauge( - system_load, - "The load values are proportional to how long" - " time a runnable Unix process has to spend in the run queue" - " before it is scheduled. Accordingly, higher values mean" - " more system load", - SystemLoad - ) - ), - ok. -``` - -### collect_metrics - -```erlang -collect_metrics(system_load, SystemLoad) -> - %% Return the gauge metric with no labels - prometheus_model_helpers:gauge_metrics( - [ - {[], SystemLoad} - ] - ); -``` - -### collect_metrics - -```erlang -collect_metrics(process_uptime_seconds, Uptime) -> - %% Convert the uptime from milliseconds to seconds - UptimeSeconds = Uptime / 1000, - %% Return the gauge metric with no labels - prometheus_model_helpers:gauge_metrics( - [ - {[], UptimeSeconds} - ] - ). -``` - -### create_gauge - -```erlang -create_gauge(Name, Help, Data) -> -``` - ---- - -*Generated from [hb_metrics_collector.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_metrics_collector.erl)* diff --git a/docs/book/src/hb_name.erl.md b/docs/book/src/hb_name.erl.md deleted file mode 100644 index 11cc02bff..000000000 --- a/docs/book/src/hb_name.erl.md +++ /dev/null @@ -1,292 +0,0 @@ -# hb_name - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_name.erl) - -An abstraction for name registration/deregistration in HyperBEAM. -Its motivation is to provide a way to register names that are not necessarily -atoms, but can be any term (for example: hashpaths or `process@1.0` IDs). -An important characteristic of these functions is that they are atomic: -There can only ever be one registrant for a given name at a time. - ---- - -## Exported Functions - -- `all/0` -- `lookup/1` -- `register/1` -- `register/2` -- `start/0` -- `unregister/1` - ---- - -### start - -An abstraction for name registration/deregistration in HyperBEAM. - -```erlang -start() -> - try ets:info(?NAME_TABLE) of - undefined -> start_ets(); - _ -> ok - catch - error:badarg -> start_ets() - end. -``` - -### start_ets - -```erlang -start_ets() -> - ets:new(?NAME_TABLE, [ - named_table, - public, - {keypos, 1}, - {write_concurrency, true}, % Safe as key-writes are atomic. -``` - -### register - -Register a name. If the name is already registered, the registration - -```erlang -register(Name) -> - start(), - ?MODULE:register(Name, self()). -``` - -### register - -```erlang -register(Name, Pid) when is_atom(Name) -> - try erlang:register(Name, Pid) of - true -> ok - catch - error:badarg -> error % Name already registered - end; -``` - -### register - -```erlang -register(Name, Pid) -> - start(), - case ets:insert_new(?NAME_TABLE, {Name, Pid}) of - true -> ok; - false -> error - end. -``` - -### unregister - -Unregister a name. - -```erlang -unregister(Name) when is_atom(Name) -> - catch erlang:unregister(Name), - ets:delete(?NAME_TABLE, Name), % Cleanup if atom was in ETS - ok; -``` - -### unregister - -Unregister a name. - -```erlang -unregister(Name) -> - start(), - ets:delete(?NAME_TABLE, Name), - ok. -``` - -### lookup - -Lookup a name -> PID. - -```erlang -lookup(Name) when is_atom(Name) -> - case whereis(Name) of - undefined -> - % Check ETS for atom-based names - start(), - ets_lookup(Name); - Pid -> Pid - end; -``` - -### lookup - -Lookup a name -> PID. - -```erlang -lookup(Name) -> - start(), - ets_lookup(Name). -``` - -### ets_lookup - -```erlang -ets_lookup(Name) -> - case ets:lookup(?NAME_TABLE, Name) of - [{Name, Pid}] -> - case is_process_alive(Pid) of - true -> Pid; - false -> - ets:delete(?NAME_TABLE, Name), - undefined - end; - [] -> undefined - end. -``` - -### all - -List the names in the registry. - -```erlang -all() -> - Registered = - ets:tab2list(?NAME_TABLE) ++ - lists:filtermap( - fun(Name) -> - case whereis(Name) of - undefined -> false; - Pid -> {true, {Name, Pid}} - end - end, - erlang:registered() - ), - lists:filter( - fun({_, Pid}) -> is_process_alive(Pid) end, - Registered - ). -``` - -### basic_test - -```erlang -basic_test(Term) -> - ?assertEqual(ok, hb_name:register(Term)), - ?assertEqual(self(), hb_name:lookup(Term)), - ?assertEqual(error, hb_name:register(Term)), - hb_name:unregister(Term), - ?assertEqual(undefined, hb_name:lookup(Term)). -``` - -### atom_test - -```erlang -atom_test() -> - basic_test(atom). -``` - -### term_test - -```erlang -term_test() -> - basic_test({term, os:timestamp()}). -``` - -### concurrency_test - -```erlang -concurrency_test() -> - Name = {concurrent_test, os:timestamp()}, - SuccessCount = length([R || R <- spawn_test_workers(Name), R =:= ok]), - ?assertEqual(1, SuccessCount), - ?assert(is_pid(hb_name:lookup(Name))), - hb_name:unregister(Name). -``` - -### spawn_test_workers - -```erlang -spawn_test_workers(Name) -> - Self = self(), - Names = - [ - case Name of - random -> {random_name, rand:uniform(1000000)}; - _ -> Name - end - || - _ <- lists:seq(1, ?CONCURRENT_REGISTRATIONS) - ], - Pids = - [ - spawn( - fun() -> - Result = hb_name:register(ProcName), - Self ! {result, self(), Result}, - % Stay alive to prevent cleanup for a period. -``` - -### dead_process_test - -```erlang -dead_process_test() -> - Name = {dead_process_test, os:timestamp()}, - {Pid, Ref} = spawn_monitor(fun() -> hb_name:register(Name), ok end), - receive {'DOWN', Ref, process, Pid, _} -> ok end, - ?assertEqual(undefined, hb_name:lookup(Name)). -``` - -### cleanup_test - -```erlang -cleanup_test() -> - {setup, - fun() -> - Name = {cleanup_test, os:timestamp()}, - {Pid, Ref} = spawn_monitor(fun() -> timer:sleep(1000) end), - ?assertEqual(ok, hb_name:register(Name, Pid)), - {Name, Pid, Ref} - end, - fun({Name, _, _}) -> - hb_name:unregister(Name) - end, - fun({Name, Pid, Ref}) -> - {"Auto-cleanup on process death", - fun() -> - exit(Pid, kill), - receive {'DOWN', Ref, process, Pid, _} -> ok end, - ?assertEqual(undefined, wait_for_cleanup(Name, 10)) - end} - end - }. -``` - -### wait_for_cleanup - -```erlang -wait_for_cleanup(Name, Retries) -> - case Retries > 0 of - true -> - case hb_name:lookup(Name) of - undefined -> undefined; - _ -> - timer:sleep(100), - wait_for_cleanup(Name, Retries - 1) - end; - false -> undefined - end. -``` - -### all_test - -```erlang -all_test() -> - hb_name:register(test_name, self()), - ?assert(lists:member({test_name, self()}, hb_name:all())), - BaseRegistered = length(hb_name:all()), - spawn_test_workers(random), - ?assertEqual(BaseRegistered + ?CONCURRENT_REGISTRATIONS, length(hb_name:all())), - timer:sleep(1000), - ?assertEqual(BaseRegistered, length(hb_name:all())). -``` - ---- - -*Generated from [hb_name.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_name.erl)* diff --git a/docs/book/src/hb_opts.erl.md b/docs/book/src/hb_opts.erl.md deleted file mode 100644 index 499686c9b..000000000 --- a/docs/book/src/hb_opts.erl.md +++ /dev/null @@ -1,683 +0,0 @@ -# hb_opts - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_opts.erl) - -A module for interacting with local and global options inside -HyperBEAM. Options are set globally, but can also be overridden using an -an optional local `Opts` map argument. Many functions across the HyperBEAM -environment accept an `Opts` argument, which can be used to customize -behavior. -Options set in an `Opts` map must _never_ change the behavior of a function -that should otherwise be deterministic. Doing so may lead to loss of funds -by the HyperBEAM node operator, as the results of their executions will be -different than those of other node operators. If they are economically -staked on the correctness of these results, they may experience punishments -for non-verifiable behavior. Instead, if a local node setting makes -deterministic behavior impossible, the caller should fail the execution -with a refusal to execute. - ---- - -## Exported Functions - -- `as/2` -- `check_required_opts/2` -- `default_message_with_env/0` -- `default_message/0` -- `ensure_node_history/2` -- `get/1` -- `get/2` -- `get/3` -- `identities/1` -- `load_bin/2` -- `load/1` -- `load/2` -- `mimic_default_types/3` - ---- - -### default_message_with_env - -A module for interacting with local and global options inside -Return the default message with all environment variables set. - -```erlang -default_message_with_env() -> - maps:fold( - fun(Key, _Spec, NodeMsg) -> - case global_get(Key, undefined, #{}) of - undefined -> NodeMsg; - Value -> NodeMsg#{ Key => Value } - end - end, - default_message(), - ?ENV_KEYS - ). -``` - -### default_message - -The default configuration options of the hyperbeam node. - -```erlang -default_message() -> - #{ - %%%%%%%% Functional options %%%%%%%% - hb_config_location => <<"config.flat">>, - initialized => true, - %% What HTTP client should the node use? - %% Options: gun, httpc - http_client => gun, - %% Scheduling mode: Determines when the SU should inform the recipient - %% that an assignment has been scheduled for a message. -``` - -### get - -Get an option from the global options, optionally overriding with a - -```erlang -get(Key) -> ?MODULE:get(Key, undefined). -``` - -### get - -Get an option from the global options, optionally overriding with a - -```erlang -get(Key, Default) -> ?MODULE:get(Key, Default, #{}). -``` - -### get - -Get an option from the global options, optionally overriding with a - -```erlang -get(Key, Default, Opts) when is_binary(Key) -> - try binary_to_existing_atom(Key, utf8) of - AtomKey -> do_get(AtomKey, Default, Opts) - catch - error:badarg -> do_get(Key, Default, Opts) - end; -``` - -### get - -Get an option from the global options, optionally overriding with a - -```erlang -get(Key, Default, Opts) -> - do_get(Key, Default, Opts). -``` - -### do_get - -```erlang -do_get(Key, Default, Opts = #{ <<"only">> := Only }) -> - do_get(Key, Default, maps:remove(<<"only">>, Opts#{ only => Only })); -``` - -### do_get - -```erlang -do_get(Key, Default, Opts = #{ <<"prefer">> := Prefer }) -> - do_get(Key, Default, maps:remove(<<"prefer">>, Opts#{ prefer => Prefer })); -``` - -### do_get - -```erlang -do_get(Key, Default, Opts = #{ only := local }) -> - case maps:find(Key, Opts) of - {ok, Value} -> Value; - error -> - Default - end; -``` - -### do_get - -```erlang -do_get(Key, Default, Opts = #{ only := global }) -> - case global_get(Key, hb_opts_not_found, Opts) of - hb_opts_not_found -> Default; - Value -> Value - end; -``` - -### do_get - -```erlang -do_get(Key, Default, Opts = #{ prefer := global }) -> - case do_get(Key, hb_opts_not_found, #{ only => global }) of - hb_opts_not_found -> do_get(Key, Default, Opts#{ only => local }); - Value -> Value - end; -``` - -### do_get - -```erlang -do_get(Key, Default, Opts = #{ prefer := local }) -> - case do_get(Key, hb_opts_not_found, Opts#{ only => local }) of - hb_opts_not_found -> - do_get(Key, Default, Opts#{ only => global }); - Value -> Value - end; -``` - -### do_get - -```erlang -do_get(Key, Default, Opts) -> - % No preference was set in Opts, so we default to local. -``` - -### global_get - -Get an environment variable or configuration key. Depending on whether - -```erlang -global_get(Key, Default, Opts) -> - case erlang:get({processed_env, Key}) of - {cached, Value} -> Value; - undefined -> - % Thee value is not cached, so we need to process it. -``` - -### cached_os_env - -Cache the result of os:getenv/1 in the process dictionary, as it never - -```erlang -cached_os_env(Key, DefaultValue) -> - case erlang:get({os_env, Key}) of - {cached, false} -> DefaultValue; - {cached, Value} -> Value; - undefined -> - % The process dictionary returns `undefined' for a key that is not - % set, so we need to check the environment and store the result. -``` - -### normalize_default - -Get an option from environment variables, optionally consulting the - -```erlang -normalize_default({conditional, Feature, IfTest, Else}) -> - case hb_features:enabled(Feature) of - true -> IfTest; - false -> Else - end; -``` - -### normalize_default - -Get an option from environment variables, optionally consulting the -An abstraction for looking up configuration variables. In the future, - -```erlang -normalize_default(Default) -> Default. -``` - -### config_lookup - -Get an option from environment variables, optionally consulting the -An abstraction for looking up configuration variables. In the future, -Parse a `flat@1.0` encoded file into a map, matching the types of the - -```erlang -config_lookup(Key, Default, _Opts) -> maps:get(Key, default_message(), Default). -``` - -### load - -Get an option from environment variables, optionally consulting the -An abstraction for looking up configuration variables. In the future, -Parse a `flat@1.0` encoded file into a map, matching the types of the - -```erlang -load(Path) -> load(Path, #{}). -``` - -### load - -Get an option from environment variables, optionally consulting the -An abstraction for looking up configuration variables. In the future, -Parse a `flat@1.0` encoded file into a map, matching the types of the - -```erlang -load(Path, Opts) -> - {ok, Device} = path_to_device(Path), - case file:read_file(Path) of - {ok, Bin} -> - load_bin(Device, Bin, Opts); - _ -> {error, not_found} - end. -``` - -### path_to_device - -Convert a path to a device from its file extension. If no extension is - -```erlang -path_to_device(Path) -> - case binary:split(hb_util:bin(Path), <<".">>, []) of - [_, Extension] -> - ?event(debug_node_msg, - {path_to_device, - {path, Path}, - {extension, Extension} - } - ), - extension_to_device(Extension); - _ -> {ok, <<"flat@1.0">>} - end. -``` - -### extension_to_device - -Convert a file extension to a device name. - -```erlang -extension_to_device(Ext) -> - extension_to_device(Ext, maps:get(preloaded_devices, default_message())). -``` - -### extension_to_device - -```erlang -extension_to_device(_, []) -> {error, not_found}; -``` - -### extension_to_device - -```erlang -extension_to_device(Ext, [#{ <<"name">> := Name }|Rest]) -> - case binary:match(Name, Ext) of - nomatch -> extension_to_device(Ext, Rest); - {0, _} -> {ok, Name} - end. -``` - -### load_bin - -Parse a given binary with a device (defaulting to `flat@1.0`) into a - -```erlang -load_bin(Bin, Opts) -> - load_bin(<<"flat@1.0">>, Bin, Opts). -``` - -### load_bin - -Parse a given binary with a device (defaulting to `flat@1.0`) into a - -```erlang -load_bin(<<"flat@1.0">>, Bin, Opts) -> - % Trim trailing whitespace from each line in the file. -``` - -### load_bin - -```erlang -load_bin(Device, Bin, Opts) -> - try - { - ok, - mimic_default_types( - hb_cache:ensure_all_loaded( - hb_message:convert(Bin, <<"structured@1.0">>, Device, Opts), - Opts - ), - new_atoms, - Opts - ) - } - catch error:B -> {error, B} - end. -``` - -### mimic_default_types - -Mimic the types of the default message for a given map. - -```erlang -mimic_default_types(Map, Mode, Opts) -> - Default = default_message_with_env(), - hb_maps:from_list(lists:map( - fun({Key, Value}) -> - NewKey = try hb_util:key_to_atom(Key, Mode) catch _:_ -> Key end, - NewValue = - case hb_maps:get(NewKey, Default, not_found, Opts) of - not_found -> Value; - DefaultValue when is_atom(DefaultValue) -> - hb_util:atom(Value); - DefaultValue when is_integer(DefaultValue) -> - hb_util:int(Value); - DefaultValue when is_float(DefaultValue) -> - hb_util:float(Value); - DefaultValue when is_binary(DefaultValue) -> - Value; - _ -> Value - end, - {NewKey, NewValue} - end, - hb_maps:to_list(Map, Opts) - )). -``` - -### as - -Find a given identity from the `identities` map, and return the options - -```erlang -as(Identity, Opts) -> - case identities(Opts) of - #{ Identity := SubOpts } -> - ?event({found_identity_sub_opts_are, SubOpts}), - {ok, maps:merge(Opts, mimic_default_types(SubOpts, new_atoms, Opts))}; - _ -> - {error, not_found} - end. -``` - -### identities - -Find all known IDs and their sub-options from the `priv_ids` map. Allows - -```erlang -identities(Opts) -> - identities(hb:wallet(), Opts). -``` - -### identities - -```erlang -identities(Default, Opts) -> - Named = ?MODULE:get(identities, #{}, Opts), - % Generate an address-based map of identities. -``` - -### check_required_opts - -Utility function to check for required options in a list. - -```erlang --spec check_required_opts(list({binary(), term()}), map()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -check_required_opts(KeyValuePairs, Opts) -> - MissingOpts = lists:filtermap( - fun({Name, Value}) -> - case Value of - not_found -> {true, Name}; - _ -> false - end - end, - KeyValuePairs - ), - case MissingOpts of - [] -> - {ok, Opts}; - _ -> - MissingOptsStr = binary:list_to_bin( - lists:join(<<", ">>, MissingOpts) - ), - ErrorMsg = <<"Missing required opts: ", MissingOptsStr/binary>>, - {error, ErrorMsg} - end. -``` - -### ensure_node_history - -Ensures all items in a node history meet required configuration options. - -```erlang --spec ensure_node_history(NodeHistory :: list() | term(), RequiredOpts :: map()) -> - {ok, binary()} | {error, binary()}. -``` - -```erlang -ensure_node_history(Opts, RequiredOpts) -> - ?event(validate_history_items, {required_opts, RequiredOpts}), - maybe - % Get the node history from the options - NodeHistory = hb_opts:get(node_history, [], Opts), - % Add the Opts to the node history to validate all items - NodeHistoryWithOpts = [ Opts | NodeHistory ], - % Normalize required options - NormalizedRequiredOpts ?= hb_ao:normalize_keys(RequiredOpts), - % Normalize all node history items once - NormalizedNodeHistory ?= lists:map( - fun(Item) -> - hb_ao:normalize_keys(Item) - end, - NodeHistoryWithOpts - ), - % Get the first item (complete opts) and remaining items (differences) - [FirstItem | RemainingItems] = NormalizedNodeHistory, - % Step 2: Validate first item values match requirements - FirstItemValuesMatch = hb_message:match(NormalizedRequiredOpts, FirstItem, primary), - true ?= (FirstItemValuesMatch == true) orelse {error, values_invalid}, - % Step 3: Check that remaining items don't modify required keys - NoRequiredKeysModified = lists:all( - fun(HistoryItem) -> - % For each required key, if it exists in this history item, - % it must match the value from the first item - hb_message:match(RequiredOpts, HistoryItem, only_present) - end, - RemainingItems - ), - true ?= NoRequiredKeysModified orelse {error, required_key_modified}, - % If we've made it this far, everything is valid - ?event({validate_node_history_items, all_items_valid}), - {ok, valid} - else - {error, values_invalid} -> - ?event({validate_node_history_items, validation_failed, invalid_values}), - {error, invalid_values}; - {error, required_key_modified} -> - ?event({validate_node_history_items, validation_failed, required_key_modified}), - {error, modified_required_key}; - _ -> - ?event({validate_node_history_items, validation_failed, unknown}), - {error, validation_failed} - end. -``` - -### global_get_test - -```erlang -global_get_test() -> - ?assertEqual(debug, ?MODULE:get(mode)), - ?assertEqual(debug, ?MODULE:get(mode, production)), - ?assertEqual(undefined, ?MODULE:get(unset_global_key)), - ?assertEqual(1234, ?MODULE:get(unset_global_key, 1234)). -``` - -### local_get_test - -```erlang -local_get_test() -> - Local = #{ only => local }, - ?assertEqual(undefined, - ?MODULE:get(test_key, undefined, Local)), - ?assertEqual(correct, - ?MODULE:get(test_key, undefined, Local#{ test_key => correct })). -``` - -### local_preference_test - -```erlang -local_preference_test() -> - Local = #{ prefer => local }, - ?assertEqual(correct, - ?MODULE:get(test_key, undefined, Local#{ test_key => correct })), - ?assertEqual(correct, - ?MODULE:get(mode, undefined, Local#{ mode => correct })), - ?assertNotEqual(undefined, - ?MODULE:get(mode, undefined, Local)). -``` - -### global_preference_test - -```erlang -global_preference_test() -> - Global = #{ prefer => global }, - ?assertEqual(undefined, ?MODULE:get(test_key, undefined, Global)), - ?assertNotEqual(incorrect, - ?MODULE:get(mode, undefined, Global#{ mode => incorrect })), - ?assertNotEqual(undefined, ?MODULE:get(mode, undefined, Global)). -``` - -### load_flat_test - -```erlang -load_flat_test() -> - % File contents: - % port: 1234 - % host: https://ao.computer - % await-inprogress: false - {ok, Conf} = load("test/config.flat", #{}), - ?event({loaded, {explicit, Conf}}), - % Ensure we convert types as expected. -``` - -### load_json_test - -```erlang -load_json_test() -> - {ok, Conf} = load("test/config.json", #{}), - ?event(debug_node_msg, {loaded, Conf}), - ?assertEqual(1234, hb_maps:get(port, Conf)), - ?assertEqual(9001, hb_maps:get(example, Conf)), - % A binary - ?assertEqual(<<"https://ao.computer">>, hb_maps:get(host, Conf)), - % An atom, where the key contained a header-key `-' rather than a `_'. -``` - -### as_identity_test - -```erlang -as_identity_test() -> - DefaultWallet = ar_wallet:new(), - TestWallet1 = ar_wallet:new(), - TestWallet2 = ar_wallet:new(), - TestID2 = hb_util:human_id(TestWallet2), - Opts = #{ - test_key => 0, - priv_wallet => DefaultWallet, - identities => #{ - <<"testname-1">> => #{ - priv_wallet => TestWallet1, - test_key => 1 - }, - TestID2 => #{ - priv_wallet => TestWallet2, - test_key => 2 - } - } - }, - ?event({base_opts, Opts}), - Identities = identities(Opts), - ?event({identities, Identities}), - % The number of identities should be 5: `default`, its ID, `testname-1`, - % and its ID, and just the ID of `TestWallet2`. -``` - -### ensure_node_history_test - -```erlang -ensure_node_history_test() -> - % Define some test data - RequiredOpts = #{ - key1 => - #{ - <<"type">> => <<"string">>, - <<"value">> => <<"value1">> - }, - key2 => <<"value2">> - }, - % Test case: All items have required options - ValidOpts = - #{ - <<"key1">> => - #{ - <<"type">> => <<"string">>, - <<"value">> => <<"value1">> - }, - <<"key2">> => <<"value2">>, - <<"extra">> => <<"value">>, - node_history => [ - #{ - <<"key1">> => - #{ - <<"type">> => <<"string">>, - <<"value">> => <<"value1">> - }, - <<"key2">> => <<"value2">>, - <<"extra">> => <<"value">> - }, - #{ - <<"key1">> => - #{ - <<"type">> => <<"string">>, - <<"value">> => <<"value1">> - }, - <<"key2">> => <<"value2">> - } - ] - }, - ?assertEqual({ok, valid}, ensure_node_history(ValidOpts, RequiredOpts)), - ?event({valid_items, ValidOpts}), - % Test Missing items - MissingItems = - #{ - <<"key1">> => - #{ - <<"type">> => <<"string">>, - <<"value">> => <<"value1">> - }, - node_history => [ - #{ - <<"key1">> => - #{ - <<"type">> => <<"string">>, - <<"value">> => <<"value1">> - } - % missing key2 - } - ] - }, - ?assertEqual({error, invalid_values}, ensure_node_history(MissingItems, RequiredOpts)), - ?event({missing_items, MissingItems}), - % Test Invalid items - InvalidItems = - #{ - <<"key1">> => - #{ - <<"type">> => <<"string">>, - <<"value">> => <<"value">> - }, - <<"key2">> => <<"value2">>, - node_history => - [ - #{ - <<"key1">> => - #{ - <<"type">> => <<"string">>, - <<"value">> => <<"value2">> - }, - <<"key2">> => <<"value3">> - } - ] - }, - ?assertEqual({error, invalid_values}, ensure_node_history(InvalidItems, RequiredOpts)). -``` - ---- - -*Generated from [hb_opts.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_opts.erl)* diff --git a/docs/book/src/hb_path.erl.md b/docs/book/src/hb_path.erl.md deleted file mode 100644 index dfbb24428..000000000 --- a/docs/book/src/hb_path.erl.md +++ /dev/null @@ -1,695 +0,0 @@ -# hb_path - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_path.erl) - -This module provides utilities for manipulating the paths of a -message: Its request path (referred to in messages as just the `Path`), and -its HashPath. -A HashPath is a rolling Merkle list of the messages that have been applied -in order to generate a given message. Because applied messages can -themselves be the result of message applications with the AO-Core protocol, -the HashPath can be thought of as the tree of messages that represent the -history of a given message. The initial message on a HashPath is referred to -by its ID and serves as its user-generated 'root'. -Specifically, the HashPath can be generated by hashing the previous HashPath -and the current message. This means that each message in the HashPath is -dependent on all previous messages. -
-    Msg1.HashPath = Msg1.ID
-    Msg3.HashPath = Msg1.Hash(Msg1.HashPath, Msg2.ID)
-    Msg3.{...} = AO-Core.apply(Msg1, Msg2)
-    ...
-
-A message's ID itself includes its HashPath, leading to the mixing of -a Msg2's merkle list into the resulting Msg3's HashPath. This allows a single -message to represent a history _tree_ of all of the messages that were -applied to generate it -- rather than just a linear history. -A message may also specify its own algorithm for generating its HashPath, -which allows for custom logic to be used for representing the history of a -message. When Msg2's are applied to a Msg1, the resulting Msg3's HashPath -will be generated according to Msg1's algorithm choice. - ---- - -## Exported Functions - -- `from_message/3` -- `hashpath_alg/2` -- `hashpath/2` -- `hashpath/3` -- `hashpath/4` -- `hd/2` -- `matches/2` -- `normalize/1` -- `pop_request/2` -- `priv_remaining/2` -- `priv_store_remaining/2` -- `priv_store_remaining/3` -- `push_request/2` -- `push_request/3` -- `queue_request/2` -- `queue_request/3` -- `regex_matches/2` -- `term_to_path_parts/1` -- `term_to_path_parts/2` -- `tl/2` -- `to_binary/1` -- `verify_hashpath/2` - ---- - -### hd - -This module provides utilities for manipulating the paths of a -Extract the first key from a `Message2`'s `Path` field. - -```erlang -hd(Msg2, Opts) -> - %?event({key_from_path, Msg2, Opts}), - case pop_request(Msg2, Opts) of - undefined -> undefined; - {Head, _} -> - % `term_to_path' returns the full path, so we need to take the - % `hd' of our `Head'. -``` - -### tl - -Return the message without its first path element. Note that this - -```erlang -tl(Msg2, Opts) when is_map(Msg2) -> - case pop_request(Msg2, Opts) of - undefined -> undefined; - {_, Rest} -> Rest - end; -``` - -### tl - -Return the message without its first path element. Note that this - -```erlang -tl(Path, Opts) when is_list(Path) -> - case tl(#{ <<"path">> => Path }, Opts) of - [] -> undefined; - undefined -> undefined; - #{ <<"path">> := Rest } -> Rest - end. -``` - -### priv_remaining - -Return the `Remaining-Path` of a message, from its hidden `AO-Core` -Store the remaining path of a message in its hidden `AO-Core` key. - -```erlang -priv_remaining(Msg, Opts) -> - Priv = hb_private:from_message(Msg), - AOCore = hb_maps:get(<<"ao-core">>, Priv, #{}, Opts), - hb_maps:get(<<"remaining">>, AOCore, undefined, Opts). -``` - -### priv_store_remaining - -Return the `Remaining-Path` of a message, from its hidden `AO-Core` -Store the remaining path of a message in its hidden `AO-Core` key. - -```erlang -priv_store_remaining(Msg, RemainingPath) -> - priv_store_remaining(Msg, RemainingPath, #{}). -``` - -### priv_store_remaining - -```erlang -priv_store_remaining(Msg, RemainingPath, Opts) -> - Priv = hb_private:from_message(Msg), - AOCore = hb_maps:get(<<"ao-core">>, Priv, #{}, Opts), - Msg#{ - <<"priv">> => - Priv#{ - <<"ao-core">> => - AOCore#{ - <<"remaining">> => RemainingPath - } - } - }. -``` - -### hashpath - -Add an ID of a Msg2 to the HashPath of another message. - -```erlang -hashpath(Bin, _Opts) when is_binary(Bin) -> - % Default hashpath for a binary message is its SHA2-256 hash. -``` - -### hashpath - -```erlang -hashpath(RawMsg1, Opts) -> - Msg1 = hb_ao:normalize_keys(RawMsg1, Opts), - case hb_private:from_message(Msg1) of - #{ <<"hashpath">> := HP } -> HP; - _ -> - % Note: We do not use `hb_message:id' here because it will call - % hb_ao:resolve, which will call `hashpath' recursively. -``` - -### hashpath - -```erlang -hashpath(Msg1, Msg2, Opts) when is_map(Msg1) -> - Msg1Hashpath = hashpath(Msg1, Opts), - HashpathAlg = hashpath_alg(Msg1, Opts), - hashpath(Msg1Hashpath, Msg2, HashpathAlg, Opts); -``` - -### hashpath - -```erlang -hashpath(Msg1, Msg2, Opts) -> - throw({hashpath_not_viable, Msg1, Msg2, Opts}). -``` - -### hashpath - -```erlang -hashpath(Msg1, Msg2, HashpathAlg, Opts) when is_map(Msg2) -> - Msg2WithoutMeta = hb_maps:without(?AO_CORE_KEYS, Msg2, Opts), - ReqPath = from_message(request, Msg2, Opts), - case {map_size(Msg2WithoutMeta), ReqPath} of - {0, _} when ReqPath =/= undefined -> - hashpath(Msg1, to_binary(hd(ReqPath)), HashpathAlg, Opts); - _ -> - {ok, Msg2ID} = - dev_message:id( - Msg2, - #{ <<"commitments">> => <<"all">> }, - Opts - ), - hashpath(Msg1, hb_util:human_id(Msg2ID), HashpathAlg, Opts) - end; -``` - -### hashpath - -```erlang -hashpath(Msg1Hashpath, HumanMsg2ID, HashpathAlg, Opts) -> - ?event({hashpath, {msg1hp, {explicit, Msg1Hashpath}}, {msg2id, {explicit, HumanMsg2ID}}}), - HP = - case term_to_path_parts(Msg1Hashpath, Opts) of - [_] -> - << Msg1Hashpath/binary, "/", HumanMsg2ID/binary >>; - [Prev1, Prev2] -> - % Calculate the new base of the hashpath. We check whether the key is - % a human-readable binary ID, or a path part, and convert or pass - % through accordingly. -``` - -### hashpath_alg - -Get the hashpath function for a message from its HashPath-Alg. - -```erlang -hashpath_alg(Msg, Opts) -> - case dev_message:get(<<"hashpath-alg">>, Msg, Opts) of - {ok, <<"sha-256-chain">>} -> - fun hb_crypto:sha256_chain/2; - {ok, <<"accumulate-256">>} -> - fun hb_crypto:accumulate/2; - {error, not_found} -> - fun hb_crypto:sha256_chain/2 - end. -``` - -### push_request - -Add a message to the head (next to execute) of a request path. - -```erlang -push_request(Msg, Path) -> - push_request(Msg, Path, #{}). -``` - -### push_request - -Pop the next element from a request path or path list. - -```erlang -push_request(Msg, Path, Opts) -> - hb_maps:put(<<"path">>, term_to_path_parts(Path, Opts) ++ from_message(request, Msg, Opts), Msg, Opts). -``` - -### pop_request - -Pop the next element from a request path or path list. - -```erlang -pop_request(undefined, _Opts) -> undefined; -``` - -### pop_request - -Pop the next element from a request path or path list. - -```erlang -pop_request(Msg, Opts) when is_map(Msg) -> - %?event({popping_request, {msg, Msg}, {opts, Opts}}), - case pop_request(from_message(request, Msg, Opts), Opts) of - undefined -> undefined; - {undefined, _} -> undefined; - {Head, []} -> {Head, undefined}; - {Head, Rest} -> - ?event({popped_request, Head, Rest}), - {Head, hb_maps:put(<<"path">>, Rest, Msg, Opts)} - end; -``` - -### pop_request - -Pop the next element from a request path or path list. - -```erlang -pop_request([], _Opts) -> undefined; -``` - -### pop_request - -Pop the next element from a request path or path list. - -```erlang -pop_request([Head|Rest], _Opts) -> - {Head, Rest}. -``` - -### queue_request - -Queue a message at the back of a request path. `path` is the only - -```erlang -queue_request(Msg, Path) -> - queue_request(Msg, Path, #{}). -``` - -### queue_request - -Verify the HashPath of a message, given a list of messages that - -```erlang -queue_request(Msg, Path, Opts) -> - hb_maps:put(<<"path">>, from_message(request, Msg, Opts) ++ term_to_path_parts(Path), Msg, Opts). -``` - -### verify_hashpath - -Verify the HashPath of a message, given a list of messages that - -```erlang -verify_hashpath([Msg1, Msg2, Msg3|Rest], Opts) -> - CorrectHashpath = hashpath(Msg1, Msg2, Opts), - FromMsg3 = from_message(hashpath, Msg3, Opts), - CorrectHashpath == FromMsg3 andalso - case Rest of - [] -> true; - _ -> verify_hashpath([Msg2, Msg3|Rest], Opts) - end. -``` - -### from_message - -Extract the request path or hashpath from a message. We do not use - -```erlang -from_message(Type, Link, Opts) when ?IS_LINK(Link) -> - from_message(Type, hb_cache:ensure_loaded(Link, Opts), Opts); -``` - -### from_message - -Extract the request path or hashpath from a message. We do not use - -```erlang -from_message(hashpath, Msg, Opts) -> hashpath(Msg, Opts); -``` - -### from_message - -Extract the request path or hashpath from a message. We do not use - -```erlang -from_message(request, #{ path := Path }, Opts) -> term_to_path_parts(Path, Opts); -``` - -### from_message - -Extract the request path or hashpath from a message. We do not use - -```erlang -from_message(request, #{ <<"path">> := Path }, Opts) -> term_to_path_parts(Path, Opts); -``` - -### from_message - -Extract the request path or hashpath from a message. We do not use - -```erlang -from_message(request, #{ <<"Path">> := Path }, Opts) -> term_to_path_parts(Path, Opts); -``` - -### from_message - -Extract the request path or hashpath from a message. We do not use -Convert a term into an executable path. Supports binaries, lists, and - -```erlang -from_message(request, _, _Opts) -> undefined. -``` - -### term_to_path_parts - -Extract the request path or hashpath from a message. We do not use -Convert a term into an executable path. Supports binaries, lists, and - -```erlang -term_to_path_parts(Path) -> - term_to_path_parts(Path, #{ error_strategy => throw }). -``` - -### term_to_path_parts - -```erlang -term_to_path_parts(Link, Opts) when ?IS_LINK(Link) -> - term_to_path_parts(hb_cache:ensure_loaded(Link, Opts), Opts); -``` - -### term_to_path_parts - -```erlang -term_to_path_parts([], _Opts) -> undefined; -``` - -### term_to_path_parts - -```erlang -term_to_path_parts(<<>>, _Opts) -> undefined; -``` - -### term_to_path_parts - -```erlang -term_to_path_parts(<<"/">>, _Opts) -> []; -``` - -### term_to_path_parts - -```erlang -term_to_path_parts(Binary, Opts) when is_binary(Binary) -> - case binary:match(Binary, <<"/">>) of - nomatch -> [Binary]; - _ -> - term_to_path_parts( - binary:split(Binary, <<"/">>, [global, trim_all]), - Opts - ) - end; -``` - -### term_to_path_parts - -```erlang -term_to_path_parts(Path = [ASCII | _], _Opts) when is_integer(ASCII) -> - [hb_ao:normalize_key(Path)]; -``` - -### term_to_path_parts - -```erlang -term_to_path_parts(List, Opts) when is_list(List) -> - lists:flatten(lists:map( - fun(Part) -> - term_to_path_parts(Part, Opts) - end, - List - )); -``` - -### term_to_path_parts - -```erlang -term_to_path_parts(Atom, _Opts) when is_atom(Atom) -> [Atom]; -``` - -### term_to_path_parts - -```erlang -term_to_path_parts(Integer, _Opts) when is_integer(Integer) -> - [hb_ao:normalize_key(Integer)]; -``` - -### term_to_path_parts - -```erlang -term_to_path_parts({as, DevName, Msgs}, _Opts) -> - [{as, hb_ao:normalize_key(DevName), Msgs}]. -``` - -### to_binary - -Convert a path of any form to a binary. - -```erlang -to_binary(Path) -> - Parts = binary:split(do_to_binary(Path), <<"/">>, [global, trim_all]), - iolist_to_binary(lists:join(<<"/">>, Parts)). -``` - -### do_to_binary - -Convert a path of any form to a binary. - -```erlang -do_to_binary(Path) when is_list(Path) -> - case hb_util:is_string_list(Path) of - false -> - iolist_to_binary( - lists:join( - "/", - lists:filtermap( - fun(Part) -> - case do_to_binary(Part) of - <<>> -> false; - BinPart -> {true, BinPart} - end - end, - Path - ) - ) - ); - true -> - to_binary(list_to_binary(Path)) - end; -``` - -### do_to_binary - -Convert a path of any form to a binary. - -```erlang -do_to_binary(Path) when is_binary(Path) -> - Path; -``` - -### do_to_binary - -Convert a path of any form to a binary. - -```erlang -do_to_binary(Other) -> - hb_ao:normalize_key(Other). -``` - -### matches - -Check if two keys match. - -```erlang -matches(Key1, Key2) -> - hb_util:to_lower(hb_ao:normalize_key(Key1)) == - hb_util:to_lower(hb_ao:normalize_key(Key2)). -``` - -### regex_matches - -Check if two keys match using regex. - -```erlang -regex_matches(Path1, Path2) -> - NormP1 = normalize(hb_ao:normalize_key(Path1)), - NormP2 = - case hb_ao:normalize_key(Path2) of - Normalized = <<"^", _/binary>> -> Normalized; - Normalized -> normalize(Normalized) - end, - try re:run(NormP1, NormP2) =/= nomatch - catch _A:_B:_C -> false - end. -``` - -### normalize - -Normalize a path to a binary, removing the leading slash if present. - -```erlang -normalize(Path) -> - case iolist_to_binary([Path]) of - BinPath = <<"/", _/binary>> -> BinPath; - Binary -> <<"/", Binary/binary>> - end. -``` - -### hashpath_test - -```erlang -hashpath_test() -> - Msg1 = #{ priv => #{<<"empty">> => <<"message">>} }, - Msg2 = #{ priv => #{<<"exciting">> => <<"message2">>} }, - Hashpath = hashpath(Msg1, Msg2, #{}), - ?assert(is_binary(Hashpath) andalso byte_size(Hashpath) == 87). -``` - -### hashpath_direct_msg2_test - -```erlang -hashpath_direct_msg2_test() -> - Msg1 = #{ <<"base">> => <<"message">> }, - Msg2 = #{ <<"path">> => <<"base">> }, - Hashpath = hashpath(Msg1, Msg2, #{}), - [_, KeyName] = term_to_path_parts(Hashpath), - ?assert(matches(KeyName, <<"base">>)). -``` - -### multiple_hashpaths_test - -```erlang -multiple_hashpaths_test() -> - Msg1 = #{ <<"empty">> => <<"message">> }, - Msg2 = #{ <<"exciting">> => <<"message2">> }, - Msg3 = #{ priv => #{<<"hashpath">> => hashpath(Msg1, Msg2, #{}) } }, - Msg4 = #{ <<"exciting">> => <<"message4">> }, - Msg5 = hashpath(Msg3, Msg4, #{}), - ?assert(is_binary(Msg5)). -``` - -### verify_hashpath_test - -```erlang -verify_hashpath_test() -> - Msg1 = #{ <<"test">> => <<"initial">> }, - Msg2 = #{ <<"firstapplied">> => <<"msg2">> }, - Msg3 = #{ priv => #{<<"hashpath">> => hashpath(Msg1, Msg2, #{})} }, - Msg4 = #{ priv => #{<<"hashpath">> => hashpath(Msg2, Msg3, #{})} }, - Msg3Fake = #{ priv => #{<<"hashpath">> => hashpath(Msg4, Msg2, #{})} }, - ?assert(verify_hashpath([Msg1, Msg2, Msg3, Msg4], #{})), - ?assertNot(verify_hashpath([Msg1, Msg2, Msg3Fake, Msg4], #{})). -``` - -### validate_path_transitions - -```erlang -validate_path_transitions(X, Opts) -> - {Head, X2} = pop_request(X, Opts), - ?assertEqual(<<"a">>, Head), - {H2, X3} = pop_request(X2, Opts), - ?assertEqual(<<"b">>, H2), - {H3, X4} = pop_request(X3, Opts), - ?assertEqual(<<"c">>, H3), - ?assertEqual(undefined, pop_request(X4, Opts)). -``` - -### pop_from_message_test - -```erlang -pop_from_message_test() -> - validate_path_transitions(#{ <<"path">> => [<<"a">>, <<"b">>, <<"c">>] }, #{}). -``` - -### pop_from_path_list_test - -```erlang -pop_from_path_list_test() -> - validate_path_transitions([<<"a">>, <<"b">>, <<"c">>], #{}). -``` - -### hd_test - -```erlang -hd_test() -> - ?assertEqual(<<"a">>, hd(#{ <<"path">> => [<<"a">>, <<"b">>, <<"c">>] }, #{})), - ?assertEqual(undefined, hd(#{ <<"path">> => undefined }, #{})). -``` - -### tl_test - -```erlang -tl_test() -> - ?assertMatch([<<"b">>, <<"c">>], hb_maps:get(<<"path">>, tl(#{ <<"path">> => [<<"a">>, <<"b">>, <<"c">>] }, #{}))), - ?assertEqual(undefined, tl(#{ <<"path">> => [] }, #{})), - ?assertEqual(undefined, tl(#{ <<"path">> => <<"a">> }, #{})), - ?assertEqual(undefined, tl(#{ <<"path">> => undefined }, #{})), - ?assertEqual([<<"b">>, <<"c">>], tl([<<"a">>, <<"b">>, <<"c">>], #{ })), - ?assertEqual(undefined, tl([<<"c">>], #{ })). -``` - -### to_binary_test - -```erlang -to_binary_test() -> - ?assertEqual(<<"a/b/c">>, to_binary([<<"a">>, <<"b">>, <<"c">>])), - ?assertEqual(<<"a/b/c">>, to_binary(<<"a/b/c">>)), - ?assertEqual(<<"a/b/c">>, to_binary([<<"a">>, <<"b">>, <<"c">>])), - ?assertEqual(<<"a/b/c">>, to_binary(["a", <<"b">>, <<"c">>])), - ?assertEqual(<<"a/b/b/c">>, to_binary([<<"a">>, [<<"b">>, <<"//b">>], <<"c">>])). -``` - -### term_to_path_parts_test - -```erlang -term_to_path_parts_test() -> - ?assert(matches([<<"a">>, <<"b">>, <<"c">>], - term_to_path_parts(<<"a/b/c">>))), - ?assert(matches([<<"a">>, <<"b">>, <<"c">>], - term_to_path_parts([<<"a">>, <<"b">>, <<"c">>]))), - ?assert(matches([<<"a">>, <<"b">>, <<"c">>], - term_to_path_parts(["a", <<"b">>, <<"c">>]))), - ?assert(matches([<<"a">>, <<"b">>, <<"b">>, <<"c">>], - term_to_path_parts([[<<"/a">>, [<<"b">>, <<"//b">>], <<"c">>]]))), - ?assertEqual([], term_to_path_parts(<<"/">>)). -% calculate_multistage_hashpath_test() -> -% Msg1 = #{ <<"base">> => <<"message">> }, -% Msg2 = #{ <<"path">> => <<"2">> }, -% Msg3 = #{ <<"path">> => <<"3">> }, -% Msg4 = #{ <<"path">> => <<"4">> }, -% Msg5 = hashpath(Msg1, [Msg2, Msg3, Msg4], #{}), -% ?assert(is_binary(Msg5)), -% Msg3Path = <<"3">>, -% Msg5b = hashpath(Msg1, [Msg2, Msg3Path, Msg4]), -% ?assertEqual(Msg5, Msg5b). -``` - -### regex_matches_test - -```erlang -regex_matches_test() -> - ?assert(regex_matches(<<"a/b/c">>, <<"a/.*/c">>)), - ?assertNot(regex_matches(<<"a/b/c">>, <<"a/.*/d">>)), - ?assert(regex_matches(<<"a/abcd/c">>, <<"a/abc.*/c">>)), - ?assertNot(regex_matches(<<"a/bcd/c">>, <<"a/abc.*/c">>)), - ?assert(regex_matches(<<"a/bcd/ignored/c">>, <<"a/.*/c">>)), -``` - ---- - -*Generated from [hb_path.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_path.erl)* diff --git a/docs/book/src/hb_persistent.erl.md b/docs/book/src/hb_persistent.erl.md deleted file mode 100644 index 16e4bcf83..000000000 --- a/docs/book/src/hb_persistent.erl.md +++ /dev/null @@ -1,585 +0,0 @@ -# hb_persistent - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_persistent.erl) - -Creates and manages long-lived AO-Core resolution processes. -These can be useful for situations where a message is large and expensive -to serialize and deserialize, or when executions should be deliberately -serialized to avoid parallel executions of the same computation. This -module is called during the core `hb_ao` execution process, so care -must be taken to avoid recursive spawns/loops. -Built using the `pg` module, which is a distributed Erlang process group -manager. - ---- - -## Exported Functions - -- `await/4` -- `default_await/5` -- `default_grouper/3` -- `default_worker/3` -- `find_or_register/3` -- `forward_work/2` -- `group/3` -- `notify/4` -- `start_monitor/0` -- `start_monitor/1` -- `start_worker/2` -- `start_worker/3` -- `stop_monitor/1` -- `unregister_notify/4` - ---- - -### start - -Creates and manages long-lived AO-Core resolution processes. -Ensure that the `pg` module is started. -Start a monitor that prints the current members of the group every - -```erlang -start() -> hb_name:start(). -``` - -### start_monitor - -Creates and manages long-lived AO-Core resolution processes. -Ensure that the `pg` module is started. -Start a monitor that prints the current members of the group every - -```erlang -start_monitor() -> - start_monitor(global). -``` - -### start_monitor - -```erlang -start_monitor(Group) -> - start_monitor(Group, #{}). -``` - -### start_monitor - -```erlang -start_monitor(Group, Opts) -> - start(), - ?event({worker_monitor, {start_monitor, Group, hb_name:all()}}), - spawn(fun() -> do_monitor(Group, #{}, Opts) end). -``` - -### stop_monitor - -```erlang -stop_monitor(PID) -> - PID ! stop. -``` - -### do_monitor - -```erlang -do_monitor(Group, Last, Opts) -> - Groups = lists:map(fun({Name, _}) -> Name end, hb_name:all()), - New = - hb_maps:from_list( - lists:map( - fun(G) -> - Pid = hb_name:lookup(G), - { - G, - #{ - pid => Pid, - messages => - case Pid of - undefined -> 0; - _ -> - length( - element(2, - erlang:process_info(Pid, messages) - ) - ) - end - } - } - end, - case Group of - global -> Groups; - TargetGroup -> - case lists:member(TargetGroup, Groups) of - true -> [TargetGroup]; - false -> [] - end - end - ) - ), - Delta = - hb_maps:filter( - fun(G, NewState) -> - case hb_maps:get(G, Last, []) of - NewState -> false; - _ -> true - end - end, - New, - Opts - ), - case hb_maps:size(Delta, Opts) of - 0 -> ok; - Deltas -> - io:format(standard_error, "== Sitrep ==> ~p named processes. ~p changes. ~n", - [hb_maps:size(New, Opts), Deltas]), - hb_maps:map( - fun(G, #{pid := P, messages := Msgs}) -> - io:format(standard_error, "[~p: ~p] #M: ~p~n", [G, P, Msgs]) - end, - Delta, - Opts - ), - io:format(standard_error, "~n", []) - end, - timer:sleep(1000), - receive stop -> stopped - after 0 -> do_monitor(Group, New, Opts) - end. -``` - -### find_or_register - -Register the process to lead an execution if none is found, otherwise - -```erlang -find_or_register(Msg1, Msg2, Opts) -> - GroupName = group(Msg1, Msg2, Opts), - find_or_register(GroupName, Msg1, Msg2, Opts). -``` - -### find_or_register - -```erlang -find_or_register(ungrouped_exec, _Msg1, _Msg2, _Opts) -> - {leader, ungrouped_exec}; -``` - -### find_or_register - -```erlang -find_or_register(GroupName, _Msg1, _Msg2, Opts) -> - case hb_opts:get(await_inprogress, false, Opts) of - false -> {leader, GroupName}; - _ -> - Self = self(), - case find_execution(GroupName, Opts) of - {ok, Leader} when Leader =/= Self -> - ?event({found_leader, GroupName, {leader, Leader}}), - {wait, Leader}; - {ok, Leader} when Leader =:= Self -> - {infinite_recursion, GroupName}; - _ -> - ?event({register_resolver, {group, GroupName}}), - register_groupname(GroupName, Opts), - {leader, GroupName} - end - end. -``` - -### unregister_notify - -Unregister as the leader for an execution and notify waiting processes. - -```erlang -unregister_notify(ungrouped_exec, _Msg2, _Msg3, _Opts) -> ok; -``` - -### unregister_notify - -Unregister as the leader for an execution and notify waiting processes. - -```erlang -unregister_notify(GroupName, Msg2, Msg3, Opts) -> - unregister_groupname(GroupName, Opts), - notify(GroupName, Msg2, Msg3, Opts). -``` - -### find_execution - -Find a group with the given name. - -```erlang -find_execution(Groupname, _Opts) -> - start(), - case hb_name:lookup(Groupname) of - undefined -> not_found; - Pid -> {ok, Pid} - end. -``` - -### group - -Calculate the group name for a Msg1 and Msg2 pair. Uses the Msg1's - -```erlang -group(Msg1, Msg2, Opts) -> - Grouper = - hb_maps:get(grouper, hb_ao:info(Msg1, Opts), fun default_grouper/3, Opts), - apply( - Grouper, - hb_ao:truncate_args(Grouper, [Msg1, Msg2, Opts]) - ). -``` - -### register_groupname - -Register for performing an AO-Core resolution. - -```erlang -register_groupname(Groupname, _Opts) -> - ?event({registering_as, Groupname}), - hb_name:register(Groupname). -``` - -### unregister - -Unregister for being the leader on an AO-Core resolution. - -```erlang -unregister(Msg1, Msg2, Opts) -> - start(), - unregister_groupname(group(Msg1, Msg2, Opts), Opts). -``` - -### unregister_groupname - -```erlang -unregister_groupname(Groupname, _Opts) -> - ?event({unregister_resolver, {explicit, Groupname}}), - hb_name:unregister(Groupname). -``` - -### await - -If there was already an Erlang process handling this execution, - -```erlang -await(Worker, Msg1, Msg2, Opts) -> - % Get the device's await function, if it exists. -``` - -### default_await - -Default await function that waits for a resolution from a worker. - -```erlang -default_await(Worker, GroupName, Msg1, Msg2, Opts) -> - % Wait for the result. -``` - -### notify - -Check our inbox for processes that are waiting for the resolution - -```erlang -notify(GroupName, Msg2, Msg3, Opts) -> - case is_binary(GroupName) of - true -> - ?event({notifying_all, {group, GroupName}}); - false -> - ok - end, - receive - {resolve, Listener, GroupName, Msg2, _ListenerOpts} -> - ?event({notifying_listener, {listener, Listener}, {group, GroupName}}), - send_response(Listener, GroupName, Msg2, Msg3), - notify(GroupName, Msg2, Msg3, Opts) - after 0 -> - ?event(finished_notify), - ok - end. -``` - -### forward_work - -Forward requests to a newly delegated execution process. - -```erlang -forward_work(NewPID, Opts) -> - Gather = - fun Gather() -> - receive - Req = {resolve, _, _, _, _} -> [Req | Gather()] - after 0 -> [] - end - end, - ToForward = Gather(), - lists:foreach( - fun(Req) -> - NewPID ! Req - end, - ToForward - ), - case length(ToForward) > 0 of - true -> - ?event({fwded, {reqs, length(ToForward)}, {pid, NewPID}}, Opts); - false -> ok - end, - ok. -``` - -### send_response - -Helper function that wraps responding with a new Msg3. - -```erlang -send_response(Listener, GroupName, Msg2, Msg3) -> - ?event(worker, - {send_response, - {listener, Listener}, - {group, GroupName} - } - ), - Listener ! {resolved, self(), GroupName, Msg2, Msg3}. -``` - -### start_worker - -Start a worker process that will hold a message in memory for - -```erlang -start_worker(Msg, Opts) -> - start_worker(group(Msg, undefined, Opts), Msg, Opts). -``` - -### start_worker - -```erlang -start_worker(_, NotMsg, _) when not is_map(NotMsg) -> not_started; -``` - -### start_worker - -```erlang -start_worker(GroupName, Msg, Opts) -> - start(), - ?event(worker_spawns, - {starting_worker, {group, GroupName}, {msg, Msg}, {opts, Opts}} - ), - WorkerPID = spawn( - fun() -> - % If the device's info contains a `worker' function we - % use that instead of the default implementation. -``` - -### default_worker - -A server function for handling persistent executions. - -```erlang -default_worker(GroupName, Msg1, Opts) -> - Timeout = hb_opts:get(worker_timeout, 10000, Opts), - worker_event(GroupName, default_worker_waiting_for_req, Msg1, undefined, Opts), - receive - {resolve, Listener, GroupName, Msg2, ListenerOpts} -> - ?event(worker, - {work_received, - {listener, Listener}, - {group, GroupName} - } - ), - Res = - hb_ao:resolve( - Msg1, - Msg2, - hb_maps:merge(ListenerOpts, Opts, Opts) - ), - send_response(Listener, GroupName, Msg2, Res), - notify(GroupName, Msg2, Res, Opts), - case hb_opts:get(static_worker, false, Opts) of - true -> - % Reregister for the existing group name. -``` - -### default_grouper - -Create a group name from a Msg1 and Msg2 pair as a tuple. - -```erlang -default_grouper(Msg1, Msg2, Opts) -> - %?event({calculating_default_group_name, {msg1, Msg1}, {msg2, Msg2}}), - % Use Erlang's `phash2' to hash the result of the Grouper function. -``` - -### worker_event - -Log an event with the worker process. If we used the default grouper - -```erlang -worker_event(Group, Data, Msg1, Msg2, Opts) when is_integer(Group) -> - ?event(worker, {worker_event, Group, Data, {msg1, Msg1}, {msg2, Msg2}}, Opts); -``` - -### worker_event - -Log an event with the worker process. If we used the default grouper - -```erlang -worker_event(Group, Data, _, _, Opts) -> - ?event(worker, {worker_event, Group, Data}, Opts). -``` - -### test_device - -```erlang -test_device() -> test_device(#{}). -``` - -### test_device - -```erlang -test_device(Base) -> - #{ - info => - fun() -> - hb_maps:merge( - #{ - grouper => - fun(M1, _M2, _Opts) -> - erlang:phash2(M1) - end - }, - Base - ) - end, - slow_key => - fun(_, #{ <<"wait">> := Wait }) -> - ?event({slow_key_wait_started, Wait}), - receive after Wait -> - {ok, - #{ - waited => Wait, - pid => self(), - random_bytes => - hb_util:encode(crypto:strong_rand_bytes(4)) - } - } - end - end, - self => - fun(M1, #{ <<"wait">> := Wait }) -> - ?event({self_waiting, {wait, Wait}}), - receive after Wait -> - ?event({self_returning, M1, {wait, Wait}}), - {ok, M1} - end - end - }. -``` - -### spawn_test_client - -```erlang -spawn_test_client(Msg1, Msg2) -> - spawn_test_client(Msg1, Msg2, #{}). -``` - -### spawn_test_client - -```erlang -spawn_test_client(Msg1, Msg2, Opts) -> - Ref = make_ref(), - TestParent = self(), - spawn_link(fun() -> - ?event({new_concurrent_test_resolver, Ref, {executing, Msg2}}), - Res = hb_ao:resolve(Msg1, Msg2, Opts), - ?event({test_worker_got_result, Ref, {result, Res}}), - TestParent ! {result, Ref, Res} - end), - Ref. -``` - -### wait_for_test_result - -```erlang -wait_for_test_result(Ref) -> - receive {result, Ref, Res} -> Res end. -``` - -### deduplicated_execution_test - -Test merging and returning a value with a persistent worker. - -```erlang -deduplicated_execution_test() -> - TestTime = 200, - Msg1 = #{ <<"device">> => test_device() }, - Msg2 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => TestTime }, - T0 = hb:now(), - Ref1 = spawn_test_client(Msg1, Msg2), - receive after 100 -> ok end, - Ref2 = spawn_test_client(Msg1, Msg2), - Res1 = wait_for_test_result(Ref1), - Res2 = wait_for_test_result(Ref2), - T1 = hb:now(), - % Check the result is the same. -``` - -### persistent_worker_test - -Test spawning a default persistent worker. - -```erlang -persistent_worker_test() -> - TestTime = 200, - Msg1 = #{ <<"device">> => test_device() }, - link(start_worker(Msg1, #{ static_worker => true })), - receive after 10 -> ok end, - Msg2 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => TestTime }, - Msg3 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => trunc(TestTime*1.1) }, - Msg4 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => trunc(TestTime*1.2) }, - T0 = hb:now(), - Ref1 = spawn_test_client(Msg1, Msg2), - Ref2 = spawn_test_client(Msg1, Msg3), - Ref3 = spawn_test_client(Msg1, Msg4), - Res1 = wait_for_test_result(Ref1), - Res2 = wait_for_test_result(Ref2), - Res3 = wait_for_test_result(Ref3), - T1 = hb:now(), - ?assertNotEqual(Res1, Res2), - ?assertNotEqual(Res2, Res3), - ?assert(T1 - T0 >= (3*TestTime)). -``` - -### spawn_after_execution_test - -```erlang -spawn_after_execution_test() -> - ?event(<<"">>), - TestTime = 500, - Msg1 = #{ <<"device">> => test_device() }, - Msg2 = #{ <<"path">> => <<"self">>, <<"wait">> => TestTime }, - Msg3 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => trunc(TestTime*1.1) }, - Msg4 = #{ <<"path">> => <<"slow_key">>, <<"wait">> => trunc(TestTime*1.2) }, - T0 = hb:now(), - Ref1 = - spawn_test_client( - Msg1, - Msg2, - #{ - spawn_worker => true, - static_worker => true, - hashpath => ignore - } - ), - receive after 10 -> ok end, - Ref2 = spawn_test_client(Msg1, Msg3), - Ref3 = spawn_test_client(Msg1, Msg4), - Res1 = wait_for_test_result(Ref1), - Res2 = wait_for_test_result(Ref2), - Res3 = wait_for_test_result(Ref3), - T1 = hb:now(), - ?assertNotEqual(Res1, Res2), - ?assertNotEqual(Res2, Res3), -``` - ---- - -*Generated from [hb_persistent.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_persistent.erl)* diff --git a/docs/book/src/hb_private.erl.md b/docs/book/src/hb_private.erl.md deleted file mode 100644 index 353a46deb..000000000 --- a/docs/book/src/hb_private.erl.md +++ /dev/null @@ -1,279 +0,0 @@ -# hb_private - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_private.erl) - -This module provides basic helper utilities for managing the -private element of a message, which can be used to store state that is -not included in serialized messages, or those granted to users via the -APIs. Private elements of a message can be useful for storing state that -is only relevant temporarily. For example, a device might use the private -element to store a cache of values that are expensive to recompute. They -should _not_ be used for encoding state that makes the execution of a -device non-deterministic (unless you are sure you know what you are doing). -The `set` and `get` functions of this module allow you to run those keys -as AO-Core paths if you would like to have private `devices` in the -messages non-public zone. -See `hb_ao` for more information about the AO-Core protocol -and private elements of messages. - ---- - -## Exported Functions - -- `from_message/1` -- `get/3` -- `get/4` -- `is_private/1` -- `merge/3` -- `opts/1` -- `reset/1` -- `set_priv/2` -- `set/3` -- `set/4` - ---- - -### from_message - -This module provides basic helper utilities for managing the -Return the `private` key from a message. If the key does not exist, an - -```erlang -from_message(Msg) when is_map(Msg) -> - case maps:is_key(<<"priv">>, Msg) of - true -> maps:get(<<"priv">>, Msg, #{}); - false -> maps:get(priv, Msg, #{}) - end; -``` - -### from_message - -This module provides basic helper utilities for managing the -Return the `private` key from a message. If the key does not exist, an -Helper for getting a value from the private element of a message. Uses - -```erlang -from_message(_NonMapMessage) -> #{}. -``` - -### get - -This module provides basic helper utilities for managing the -Return the `private` key from a message. If the key does not exist, an -Helper for getting a value from the private element of a message. Uses - -```erlang -get(Key, Msg, Opts) -> - get(Key, Msg, not_found, Opts). -``` - -### get - -```erlang -get(InputPath, Msg, Default, Opts) -> - % Resolve the path against the private element of the message. -``` - -### set - -Helper function for setting a key in the private element of a message. - -```erlang -set(Msg, InputPath, Value, Opts) -> - Path = remove_private_specifier(InputPath, Opts), - Priv = from_message(Msg), - ?event({set_private, {in, Path}, {out, Path}, {value, Value}, {opts, Opts}}), - NewPriv = hb_util:deep_set(Path, Value, Priv, opts(Opts)), - ?event({set_private_res, {out, NewPriv}}), - set_priv(Msg, NewPriv). -``` - -### set - -```erlang -set(Msg, PrivMap, Opts) -> - CurrentPriv = from_message(Msg), - ?event({set_private, {in, PrivMap}, {opts, Opts}}), - NewPriv = hb_util:deep_merge(CurrentPriv, PrivMap, opts(Opts)), - ?event({set_private_res, {out, NewPriv}}), - set_priv(Msg, NewPriv). -``` - -### merge - -Merge the private elements of two messages into one. The keys in the - -```erlang -merge(Msg1, Msg2, Opts) -> - % Merge the private elements of the two messages. -``` - -### set_priv - -Helper function for setting the complete private element of a message. - -```erlang -set_priv(Msg, PrivMap) - when map_size(PrivMap) =:= 0 andalso not is_map_key(<<"priv">>, Msg) -> - Msg; -``` - -### set_priv - -Helper function for setting the complete private element of a message. -Check if a key is private. - -```erlang -set_priv(Msg, PrivMap) -> - Msg#{ <<"priv">> => PrivMap }. -``` - -### is_private - -Helper function for setting the complete private element of a message. -Check if a key is private. - -```erlang -is_private(Key) -> - try hb_util:bin(Key) of - <<"priv", _/binary>> -> true; - _ -> false - catch _:_ -> false - end. -``` - -### remove_private_specifier - -Remove the first key from the path if it is a private specifier. - -```erlang -remove_private_specifier(InputPath, Opts) -> - case is_private(hd(Path = hb_path:term_to_path_parts(InputPath, Opts))) of - true -> tl(Path); - false -> Path - end. -``` - -### opts - -The opts map that should be used when resolving paths against the - -```erlang -opts(Opts) -> - PrivStore = - case hb_opts:get(priv_store, undefined, Opts) of - undefined -> []; - PrivateStores when is_list(PrivateStores) -> PrivateStores; - PrivateStore -> [PrivateStore] - end, - BaseStore = - case hb_opts:get(store, [], Opts) of - SingleStore when is_map(SingleStore) -> [SingleStore]; - Stores when is_list(Stores) -> Stores - end, - NormStore = PrivStore ++ BaseStore, - Opts#{ - hashpath => ignore, - cache_control => [<<"no-cache">>, <<"no-store">>], - store => NormStore - }. -``` - -### reset - -Unset all of the private keys in a message or deep Erlang term. - -```erlang -reset(Msg) when is_map(Msg) -> - maps:map( - fun(_Key, Val) -> reset(Val) end, - maps:without( - lists:filter(fun is_private/1, maps:keys(Msg)), - Msg - ) - ); -``` - -### reset - -Unset all of the private keys in a message or deep Erlang term. - -```erlang -reset(List) when is_list(List) -> - % Check if any of the terms in the list are private specifiers, return an - % empty list if so. -``` - -### reset - -```erlang -reset(Tuple) when is_tuple(Tuple) -> - list_to_tuple(reset(tuple_to_list(Tuple))); -``` - -### reset - -```erlang -reset(NonMapMessage) -> - NonMapMessage. -``` - -### set_private_test - -```erlang -set_private_test() -> - ?assertEqual( - #{<<"a">> => 1, <<"priv">> => #{<<"b">> => 2}}, - set(#{<<"a">> => 1}, <<"b">>, 2, #{}) - ), - Res = set(#{<<"a">> => 1}, <<"a">>, 1, #{}), - ?assertEqual(#{<<"a">> => 1, <<"priv">> => #{<<"a">> => 1}}, Res), - ?assertEqual( - #{<<"a">> => 1, <<"priv">> => #{<<"a">> => 1}}, - set(Res, <<"a">>, 1, #{}) - ). -``` - -### get_private_key_test - -```erlang -get_private_key_test() -> - M1 = #{<<"a">> => 1, <<"priv">> => #{<<"b">> => 2}}, - ?assertEqual(not_found, get(<<"a">>, M1, #{})), - {ok, [<<"a">>]} = hb_ao:resolve(M1, <<"keys">>, #{}), - ?assertEqual(2, get(<<"b">>, M1, #{})), - {error, _} = hb_ao:resolve(M1, <<"priv/a">>, #{}), - {error, _} = hb_ao:resolve(M1, <<"priv">>, #{}). -``` - -### get_deep_key_test - -```erlang -get_deep_key_test() -> - M1 = #{<<"a">> => 1, <<"priv">> => #{<<"b">> => #{<<"c">> => 3}}}, - ?assertEqual(3, get(<<"b/c">>, M1, #{})). -``` - -### priv_opts_store_read_link_test - -```erlang -priv_opts_store_read_link_test() -> - % Write a message to the public store. -``` - -### priv_opts_cache_read_message_test - -```erlang -priv_opts_cache_read_message_test() -> - hb:init(), - PublicStore = [hb_test_utils:test_store(hb_store_lmdb)], - OnlyPrivStore = [hb_test_utils:test_store(hb_store_fs)], - Opts = #{ store => PublicStore, priv_store => OnlyPrivStore }, - PrivOpts = opts(Opts), - % Use the `~scheduler@1.0' and `~process@1.0' infrastructure to write a - % complex message into the public store. -``` - ---- - -*Generated from [hb_private.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_private.erl)* diff --git a/docs/book/src/hb_process_monitor.erl.md b/docs/book/src/hb_process_monitor.erl.md deleted file mode 100644 index ea96dae16..000000000 --- a/docs/book/src/hb_process_monitor.erl.md +++ /dev/null @@ -1,106 +0,0 @@ -# hb_process_monitor - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_process_monitor.erl) - -## Exported Functions - -- `start/1` -- `start/2` -- `start/3` -- `stop/1` - ---- - -### start - -```erlang -start(ProcID) -> - start(ProcID, hb_opts:get(default_cron_rate)). -``` - -### start - -```erlang -start(ProcID, Rate) -> - start(ProcID, Rate, hb_client:cron_cursor(ProcID)). -``` - -### start - -```erlang -start(ProcID, Rate, Cursor) -> - Logger = hb_logger:start(), - Monitor = spawn( - fun() -> - server( - #state{ - proc_id = ProcID, - cursor = Cursor, - logger = Logger - } - ) - end), - Ticker = spawn(fun() -> ticker(Monitor, Rate) end), - hb_logger:register(Monitor), - hb_logger:log(Monitor, {ok, started_monitor, {ProcID, Rate, Cursor}}), - hb_logger:register(Ticker), - {Monitor, Logger}. -``` - -### stop - -```erlang -stop(PID) -> - PID ! stop. -``` - -### server - -```erlang -server(State) -> - receive - stop -> ok; - tick ->server(handle_crons(State)) - end. -``` - -### handle_crons - -```erlang -handle_crons(State) -> - case hb_client:cron(State#state.proc_id, State#state.cursor) of - {ok, HasNextPage, Results, Cursor} -> - lists:map( - fun(Res) -> - % TODO: Validate this - dev_mu:push(#{ message => Res }, State) - end, - Results - ), - NS = State#state{cursor = Cursor}, - case HasNextPage of - true -> NS; - false -> handle_crons(NS) - end; - Error -> - hb_logger:log(State#state.logger, Error), - State - end. -``` - -### ticker - -```erlang -ticker(Monitor, Rate) -> - case erlang:is_process_alive(Monitor) of - true -> - timer:sleep(Rate), - Monitor ! tick, - ticker(Monitor, Rate); - false -> - ok -``` - ---- - -*Generated from [hb_process_monitor.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_process_monitor.erl)* diff --git a/docs/book/src/hb_router.erl.md b/docs/book/src/hb_router.erl.md deleted file mode 100644 index 06c229ae3..000000000 --- a/docs/book/src/hb_router.erl.md +++ /dev/null @@ -1,44 +0,0 @@ -# hb_router - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_router.erl) - -Locate a service in the AO network. This module uses -URLs to locate services, so it can be used to locate -nodes using IP addresses or domain names. This also -allows us to use different protocols later, potentially. - ---- - -## Exported Functions - -- `find/2` -- `find/3` - ---- - -### find - -```erlang -find(Type, ID) -> - find(Type, ID, '_'). -``` - -### find - -```erlang -find(Type, ID, Address) -> - find(Type, ID, Address, #{}). -``` - -### find - -```erlang -find(Type, _ID, Address, Opts) -> - case hb_maps:get(Type, hb_opts:get(nodes), undefined, Opts) of - #{ Address := Node } -> {ok, Node}; - undefined -> {error, service_type_not_found} -``` - ---- - -*Generated from [hb_router.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_router.erl)* diff --git a/docs/book/src/hb_singleton.erl.md b/docs/book/src/hb_singleton.erl.md deleted file mode 100644 index 12a3cdbca..000000000 --- a/docs/book/src/hb_singleton.erl.md +++ /dev/null @@ -1,1104 +0,0 @@ -# hb_singleton - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_singleton.erl) - -A parser that translates AO-Core HTTP API requests in TABM format -into an ordered list of messages to evaluate. The details of this format -are described in `docs/ao-core-http-api.md`. -Syntax overview: -
-    Singleton: Message containing keys and a `path` field,
-               which may also contain a query string of key-value pairs.
-    Path:
-        - /Part1/Part2/.../PartN/ => [Part1, Part2, ..., PartN]
-        - /ID/Part2/.../PartN => [ID, Part2, ..., PartN]
-    Part: (Key + Resolution), Device?, #{ K => V}?
-        - Part => #{ path => Part }
-        - `Part&Key=Value => #{ path => Part, Key => Value }`
-        - `Part=Value&... => #{ path => Part, Part => Value, ... }`
-        - `Part&Key => #{ path => Part, Key => true }`
-        - `Part&k1=v1&k2=v2 => #{ path => Part, k1 => `<<"v1">>`, k2 => `<<"v2">>` }'
-        - `Part~Device => {as, Device, #{ path => Part }}`
-        - `Part~D&K1=V1 => {as, D, #{ path => Part, K1 => `<<"v1">>` }}'
-        - `pt&k1+int=1 => #{ path => pt, k1 => 1 }`
-        - `pt~d&k1+int=1 => {as, d, #{ path => pt, k1 => 1 }}`
-        - `(/nested/path) => Resolution of the path /nested/path`
-        - `(/nested/path&k1=v1) => (resolve /nested/path)#{k1 => v1}`
-        - `(/nested/path~D&K1=V1) => (resolve /nested/path)#{K1 => V1}`
-        - `pt&k1+res=(/a/b/c) => #{ path => pt, k1 => (resolve /a/b/c) }`
-    Key:
-        - key: `<<"value">>` => #{ key => `<<"value">>`, ... } for all messages
-        - n.key: `<<"value">>` => #{ key => `<<"value">>`, ... } for Nth message
-        - key+int: 1 => #{ key => 1, ... }
-        - key+res: /nested/path => #{ key => (resolve /nested/path), ... }
-        - N.Key+res=(/a/b/c) => #{ Key => (resolve /a/b/c), ... }
-
- ---- - -## Exported Functions - -- `from_path/1` -- `from/2` -- `to/1` - ---- - -### append_path - -```erlang --spec to(list(ao_message())) -> tabm_message(). -to(Messages) -> - % Iterate through all AO-Core messages folding them into the TABM message - % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} - % that allows to scope keys to the given stage. -``` - -```erlang -append_path(PathPart, #{<<"path">> := Path} = Message) -> - hb_maps:put(<<"path">>, <>, Message); -``` - -### append_path - -```erlang --spec to(list(ao_message())) -> tabm_message(). -to(Messages) -> - % Iterate through all AO-Core messages folding them into the TABM message - % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} - % that allows to scope keys to the given stage. -``` - -```erlang -append_path(PathPart, Message) -> - hb_maps:put(<<"path">>, <<"/", PathPart/binary>>, Message). -``` - -### type - -```erlang --spec to(list(ao_message())) -> tabm_message(). -to(Messages) -> - % Iterate through all AO-Core messages folding them into the TABM message - % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} - % that allows to scope keys to the given stage. -``` - -```erlang -type(Value) when is_binary(Value) -> binary; -``` - -### type - -```erlang --spec to(list(ao_message())) -> tabm_message(). -to(Messages) -> - % Iterate through all AO-Core messages folding them into the TABM message - % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} - % that allows to scope keys to the given stage. -``` - -```erlang -type(Value) when is_integer(Value) -> integer; -``` - -### type - -Normalize a singleton TABM message into a list of executable AO-Core - -```erlang --spec to(list(ao_message())) -> tabm_message(). -to(Messages) -> - % Iterate through all AO-Core messages folding them into the TABM message - % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} - % that allows to scope keys to the given stage. -``` - -```erlang -type(_Value) -> unknown. -``` - -### from - -Normalize a singleton TABM message into a list of executable AO-Core - -```erlang --spec to(list(ao_message())) -> tabm_message(). -to(Messages) -> - % Iterate through all AO-Core messages folding them into the TABM message - % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} - % that allows to scope keys to the given stage. -``` - -```erlang -from(RawMsg, Opts) when is_binary(RawMsg) -> - from(#{ <<"path">> => RawMsg }, Opts); -``` - -### from - -Normalize a singleton TABM message into a list of executable AO-Core - -```erlang --spec to(list(ao_message())) -> tabm_message(). -to(Messages) -> - % Iterate through all AO-Core messages folding them into the TABM message - % Scopes contains the following map: #{Key => [StageIndex, StageIndex2...]} - % that allows to scope keys to the given stage. -``` - -```erlang -from(RawMsg, Opts) -> - RawPath = hb_maps:get(<<"path">>, RawMsg, <<>>), - ?event(parsing, {raw_path, RawPath}), - {ok, Path, Query} = from_path(RawPath), - ?event(parsing, {parsed_path, Path, Query}), - MsgWithoutBasePath = - hb_maps:merge( - hb_maps:remove(<<"path">>, RawMsg), - Query - ), - % 2. Decode, split, and sanitize path segments. Each yields one step message. -``` - -### from_path - -Parse the relative reference into path, query, and fragment. - -```erlang -from_path(RelativeRef) -> - %?event(parsing, {raw_relative_ref, RawRelativeRef}), - %RelativeRef = hb_escape:decode(RawRelativeRef), - Decoded = decode_string(RelativeRef), - ?event(parsing, {parsed_relative_ref, Decoded}), - {Path, QKVList} = - case hb_util:split_depth_string_aware_single("?", Decoded) of - {_Sep, P, QStr} -> {P, cowboy_req:parse_qs(#{ qs => QStr })}; - {no_match, P, <<>>} -> {P, []} - end, - { - ok, - path_parts($/, Path), - hb_maps:from_list(QKVList) - }. -``` - -### path_messages - -Step 2: Decode, split and sanitize the path. Split by `/` but avoid - -```erlang -path_messages(Bin, Opts) when is_binary(Bin) -> - lists:map(fun(Part) -> parse_part(Part, Opts) end, path_parts([$/], Bin)). -``` - -### normalize_base - -Normalize the base path. - -```erlang -normalize_base([]) -> []; -``` - -### normalize_base - -Normalize the base path. - -```erlang -normalize_base([First|Rest]) when ?IS_ID(First) -> [First|Rest]; -``` - -### normalize_base - -Normalize the base path. - -```erlang -normalize_base([{as, DevID, First}|Rest]) -> [{as, DevID, First}|Rest]; -``` - -### normalize_base - -Normalize the base path. - -```erlang -normalize_base([Subres = {resolve, _}|Rest]) -> [Subres|Rest]; -``` - -### normalize_base - -Normalize the base path. -Split the path into segments, filtering out empty segments and - -```erlang -normalize_base(Rest) -> [#{}|Rest]. -``` - -### path_parts - -Normalize the base path. -Split the path into segments, filtering out empty segments and - -```erlang -path_parts(Sep, PathBin) when is_binary(PathBin) -> - Res = lists:filtermap( - fun(Part) -> - case byte_size(Part) of - 0 -> false; - TooLong when TooLong > ?MAX_SEGMENT_LENGTH -> - throw({error, segment_too_long, Part}); - _ -> {true, Part} - end - end, - all_path_parts(Sep, PathBin) - ), - ?event({path_parts, Res}), - Res. -``` - -### all_path_parts - -Extract all of the parts from the binary, given (a list of) separators. - -```erlang -all_path_parts(_Sep, <<>>) -> []; -``` - -### all_path_parts - -Extract all of the parts from the binary, given (a list of) separators. - -```erlang -all_path_parts(Sep, Bin) -> - hb_util:split_depth_string_aware(Sep, Bin). -``` - -### part - -Extract the characters from the binary until a separator is found. - -```erlang -part(Sep, Bin) when not is_list(Sep) -> - part([Sep], Bin); -``` - -### part - -Extract the characters from the binary until a separator is found. - -```erlang -part(Seps, Bin) -> - hb_util:split_depth_string_aware_single(Seps, Bin). -``` - -### apply_types - -Step 3: Apply types to values and remove specifiers. - -```erlang -apply_types(Msg, Opts) -> - hb_maps:fold( - fun(Key, Val, Acc) -> - {_, K, V} = maybe_typed(Key, Val, Opts), - hb_maps:put(K, V, Acc, Opts) - end, - #{}, - Msg, - Opts - ). -``` - -### group_scoped - -Step 4: Group headers/query by N-scope. - -```erlang -group_scoped(Map, Msgs) -> - {NScope, Global} = - hb_maps:fold( - fun(KeyBin, Val, {Ns, Gs}) -> - case parse_scope(KeyBin) of - {OkN, RealKey} when OkN > 0 -> - Curr = hb_maps:get(OkN, Ns, #{}), - Ns2 = hb_maps:put(OkN, hb_maps:put(RealKey, Val, Curr), Ns), - {Ns2, Gs}; - global -> {Ns, hb_maps:put(KeyBin, Val, Gs)} - end - end, - {#{}, #{}}, - Map - ), - [ - hb_maps:merge(Global, hb_maps:get(N, NScope, #{})) - || - N <- lists:seq(1, length(Msgs)) - ]. -``` - -### parse_scope - -Get the scope of a key. Adds 1 to account for the base message. - -```erlang -parse_scope(KeyBin) -> - case binary:split(KeyBin, <<".">>, [global]) of - [Front, Remainder] -> - case catch erlang:binary_to_integer(Front) of - NInt when is_integer(NInt) -> {NInt + 1, Remainder}; - _ -> throw({error, invalid_scope, KeyBin}) - end; - _ -> global - end. -``` - -### build_messages - -Step 5: Merge the base message with the scoped messages. - -```erlang -build_messages(Msgs, ScopedModifications, Opts) -> - do_build(1, Msgs, ScopedModifications, Opts). -``` - -### do_build - -```erlang -do_build(_, [], _, _) -> []; -``` - -### do_build - -```erlang -do_build(I, [{as, DevID, RawMsg} | Rest], ScopedKeys, Opts) when is_map(RawMsg) -> - % We are processing an `as' message. If the path is empty, we need to - % remove it from the message and the additional message, such that AO-Core - % returns only the message with the device specifier changed. If the message - % does have a path, AO-Core will subresolve it. -``` - -### do_build - -```erlang -do_build(I, [Msg | Rest], ScopedKeys, Opts) when not is_map(Msg) -> - [Msg | do_build(I + 1, Rest, ScopedKeys, Opts)]; -``` - -### do_build - -```erlang -do_build(I, [Msg | Rest], ScopedKeys, Opts) -> - Additional = lists:nth(I, ScopedKeys), - Merged = hb_maps:merge(Additional, Msg, Opts), - StepMsg = hb_message:convert( - Merged, - <<"structured@1.0">>, - Opts#{ topic => ao_internal } - ), - ?event(parsing, {build_messages, {base, Msg}, {additional, Additional}}), - [StepMsg | do_build(I + 1, Rest, ScopedKeys, Opts)]. -``` - -### parse_part - -Parse a path part into a message or an ID. - -```erlang -parse_part(ID, _Opts) when ?IS_ID(ID) -> ID; -``` - -### parse_part - -Parse a path part into a message or an ID. - -```erlang -parse_part(Part, Opts) -> - case maybe_subpath(Part, Opts) of - {resolve, Subpath} -> {resolve, Subpath}; - Part -> - case part([$&, $~, $+, $ , $=], Part) of - {no_match, PartKey, <<>>} -> - #{ <<"path">> => PartKey }; - {Sep, PartKey, PartModBin} -> - parse_part_mods( - << Sep:8/integer, PartModBin/binary >>, - #{ <<"path">> => PartKey }, - Opts - ) - end - end. -``` - -### parse_part_mods - -Parse part modifiers: - -```erlang -parse_part_mods([], Msg, _Opts) -> Msg; -``` - -### parse_part_mods - -Parse part modifiers: - -```erlang -parse_part_mods(<<>>, Msg, _Opts) -> Msg; -``` - -### parse_part_mods - -Parse part modifiers: - -```erlang -parse_part_mods(<<"~", PartMods/binary>>, Msg, Opts) -> - % Get the string until the end of the device specifier or end of string. -``` - -### parse_part_mods - -```erlang -parse_part_mods(<< "&", InlinedMsgBin/binary >>, Msg, Opts) -> - InlinedKeys = path_parts($&, InlinedMsgBin), - MsgWithInlined = - lists:foldl( - fun(InlinedKey, Acc) -> - {Key, Val} = parse_inlined_key_val(InlinedKey, Opts), - ?event({inlined_key, {explicit, Key}, {explicit, Val}}), - hb_maps:put(Key, Val, Acc) - end, - Msg, - InlinedKeys - ), - MsgWithInlined; -``` - -### parse_part_mods - -```erlang -parse_part_mods(<<$=, InlinedMsgBin/binary>>, M = #{ <<"path">> := Path }, Opts) - when map_size(M) =:= 1, is_binary(Path) -> - parse_part_mods(<< "&", Path/binary, "=", InlinedMsgBin/binary >>, M, Opts); -``` - -### parse_part_mods - -Extrapolate the inlined key-value pair from a path segment. If the - -```erlang -parse_part_mods(<<$+, InlinedMsgBin/binary>>, M = #{ <<"path">> := Path }, Opts) - when map_size(M) =:= 1, is_binary(InlinedMsgBin) -> - parse_part_mods(<< "&", Path/binary, "+", InlinedMsgBin/binary >>, M, Opts). -``` - -### parse_inlined_key_val - -Extrapolate the inlined key-value pair from a path segment. If the - -```erlang -parse_inlined_key_val(Bin, Opts) -> - case part([$=, $&], Bin) of - {no_match, K, <<>>} -> {K, true}; - {$=, K, RawV} -> - V = unquote(RawV), - {_, Key, Val} = maybe_typed(K, maybe_subpath(V, Opts), Opts), - {Key, Val} - end. -``` - -### unquote - -Unquote a string. - -```erlang -unquote(<<"\"", Inner/binary>>) -> - case binary:last(Inner) of - $" -> binary:part(Inner, 0, byte_size(Inner) - 1); - _ -> Inner - end; -``` - -### unquote - -Unquote a string. -Attempt Cowboy URL decode, then sanitize the result. - -```erlang -unquote(Bin) -> Bin. -``` - -### decode_string - -Unquote a string. -Attempt Cowboy URL decode, then sanitize the result. - -```erlang -decode_string(B) -> - case catch uri_string:unquote(B) of - DecodedBin when is_binary(DecodedBin) -> DecodedBin; - _ -> throw({error, cannot_decode, B}) - end. -``` - -### maybe_subpath - -Check if the string is a subpath, returning it in parsed form, - -```erlang -maybe_subpath(Str, Opts) when byte_size(Str) >= 2 -> - case {binary:first(Str), binary:last(Str)} of - {$(, $)} -> - Inside = binary:part(Str, 1, byte_size(Str) - 2), - {resolve, from(#{ <<"path">> => Inside }, Opts)}; - _ -> Str - end; -``` - -### maybe_subpath - -Check if the string is a subpath, returning it in parsed form, -Parse a key's type (applying it to the value) and device name if present. - -```erlang -maybe_subpath(Other, _Opts) -> Other. -``` - -### maybe_typed - -Check if the string is a subpath, returning it in parsed form, -Parse a key's type (applying it to the value) and device name if present. - -```erlang -maybe_typed(Key, Value, Opts) -> - case part([$+, $ ], Key) of - {no_match, OnlyKey, <<>>} -> {untyped, OnlyKey, Value}; - {_, OnlyKey, Type} -> - case {Type, Value} of - {<<"resolve">>, Subpath} -> - % If the value needs to be resolved before it is converted, - % use the `Codec/1.0' device to resolve it. -``` - -### maybe_join - -Join a list of items with a separator, or return the first item if there - -```erlang -maybe_join(Items, Sep) -> - case length(Items) of - 0 -> <<>>; - 1 -> hd(Items); - _ -> iolist_to_binary(lists:join(Sep, Items)) - end. -``` - -### parse_explicit_message_test - -```erlang -parse_explicit_message_test() -> - Singleton1 = #{ - <<"path">> => <<"/a">>, - <<"a">> => <<"b">> - }, - ?assertEqual( - [ - #{ <<"a">> => <<"b">>}, - #{ <<"path">> => <<"a">>, <<"a">> => <<"b">> } - ], - from(Singleton1, #{}) - ), - DummyID = hb_util:human_id(crypto:strong_rand_bytes(32)), - Singleton2 = #{ - <<"path">> => <<"/", DummyID/binary, "/a">> - }, - ?assertEqual([DummyID, #{ <<"path">> => <<"a">> }], from(Singleton2, #{})), - Singleton3 = #{ - <<"path">> => <<"/", DummyID/binary, "/a">>, - <<"a">> => <<"b">> - }, - ?assertEqual( - [DummyID, #{ <<"path">> => <<"a">>, <<"a">> => <<"b">> }], - from(Singleton3, #{}) - ). -``` - -### to_suite_test_ - -```erlang -to_suite_test_() -> - [ - fun simple_to_test/0, - fun multiple_messages_to_test/0, - fun basic_hashpath_to_test/0, - fun scoped_key_to_test/0, - fun typed_key_to_test/0, - fun subpath_in_key_to_test/0, - fun subpath_in_path_to_test/0, - fun inlined_keys_to_test/0, - fun multiple_inlined_keys_to_test/0, - fun subpath_in_inlined_to_test/0 - ]. -``` - -### simple_to_test - -```erlang -simple_to_test() -> - Messages = [ - #{<<"test-key">> => <<"test-value">>}, - #{<<"path">> => <<"a">>, <<"test-key">> => <<"test-value">>} - ], - Expected = #{<<"path">> => <<"/a">>, <<"test-key">> => <<"test-value">>}, - ?assertEqual(Expected, to(Messages)), - ?assertEqual(Messages, from(to(Messages), #{})). -``` - -### multiple_messages_to_test - -```erlang -multiple_messages_to_test() -> - Messages = - [ - #{<<"test-key">> => <<"test-value">>}, - #{<<"path">> => <<"a">>, <<"test-key">> => <<"test-value">>}, - #{<<"path">> => <<"b">>, <<"test-key">> => <<"test-value">>}, - #{<<"path">> => <<"c">>, <<"test-key">> => <<"test-value">>} - ], - Expected = #{ - <<"path">> => <<"/a/b/c">>, - <<"test-key">> => <<"test-value">> - }, - ?assertEqual(Expected, to(Messages)), - ?assertEqual(Messages, from(to(Messages), #{})). -``` - -### basic_hashpath_to_test - -```erlang -basic_hashpath_to_test() -> - Messages = [ - <<"e5ohB7TgMYRoc0BLllkmAqkqLy1SrliEkOPJlNPXBQ8">>, - #{<<"method">> => <<"GET">>, <<"path">> => <<"some-other">>} - ], - Expected = #{ - <<"path">> => <<"/e5ohB7TgMYRoc0BLllkmAqkqLy1SrliEkOPJlNPXBQ8/some-other">>, - <<"method">> => <<"GET">> - }, - ?assertEqual(Expected, to(Messages)), - ?assertEqual(Messages, from(to(Messages), #{})). -``` - -### scoped_key_to_test - -```erlang -scoped_key_to_test() -> - Messages = [ - #{}, - #{<<"path">> => <<"a">>}, - #{<<"path">> => <<"b">>, <<"test-key">> => <<"test-value">>}, - #{<<"path">> => <<"c">>} - ], - Expected = #{<<"2.test-key">> => <<"test-value">>, <<"path">> => <<"/a/b/c">>}, - ?assertEqual(Expected, to(Messages)), - ?assertEqual(Messages, from(to(Messages), #{})). -``` - -### typed_key_to_test - -```erlang -typed_key_to_test() -> - Messages = - [ - #{}, - #{<<"path">> => <<"a">>}, - #{<<"path">> => <<"b">>, <<"test-key">> => 123}, - #{<<"path">> => <<"c">>} - ], - Expected = #{<<"2.test-key+integer">> => <<"123">>, <<"path">> => <<"/a/b/c">>}, - ?assertEqual(Expected, to(Messages)), - ?assertEqual(Messages, from(to(Messages), #{})). -``` - -### subpath_in_key_to_test - -```erlang -subpath_in_key_to_test() -> - Messages = [ - #{}, - #{<<"path">> => <<"a">>}, - #{ - <<"path">> => <<"b">>, - <<"test-key">> => - {resolve, - [ - #{}, - #{<<"path">> => <<"x">>}, - #{<<"path">> => <<"y">>}, - #{<<"path">> => <<"z">>} - ] - } - }, - #{<<"path">> => <<"c">>} - ], - Expected = #{<<"2.test-key+resolve">> => <<"/x/y/z">>, <<"path">> => <<"/a/b/c">>}, - ?assertEqual(Expected, to(Messages)), - ?assertEqual(Messages, from(to(Messages), #{})). -``` - -### subpath_in_path_to_test - -```erlang -subpath_in_path_to_test() -> - Messages = [ - #{}, - #{<<"path">> => <<"a">>}, - {resolve, - [ - #{}, - #{<<"path">> => <<"x">>}, - #{<<"path">> => <<"y">>}, - #{<<"path">> => <<"z">>} - ] - }, - #{<<"path">> => <<"z">>} - ], - Expected = #{ - <<"path">> => <<"/a/(x/y/z)/z">> - }, - ?assertEqual(Expected, to(Messages)), - ?assertEqual(Messages, from(to(Messages), #{})). -``` - -### inlined_keys_to_test - -```erlang -inlined_keys_to_test() -> - Messages = - [ - #{<<"method">> => <<"POST">>}, - #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"a">> - }, - #{ - <<"k1">> => <<"v1">>, - <<"method">> => <<"POST">>, - <<"path">> => <<"b">> - }, - #{ - <<"k2">> => <<"v2">>, - <<"method">> => <<"POST">>, - <<"path">> => <<"c">> - } - ], - % NOTE: The implementation above does not convert the given list of messages - % into the original format, however it assures that the `to/1' and `from/1' - % operations are idempotent. -``` - -### multiple_inlined_keys_to_test - -```erlang -multiple_inlined_keys_to_test() -> - Messages = [ - #{<<"method">> => <<"POST">>}, - #{<<"method">> => <<"POST">>, <<"path">> => <<"a">>}, - #{ - <<"k1">> => <<"v1">>, - <<"k2">> => <<"v2">>, - <<"method">> => <<"POST">>, - <<"path">> => <<"b">> - } - ], - % NOTE: The implementation above does not convert the given list of messages - % into the original format, however it assures that the `to/1' and `from/1' - % operations are idempotent. -``` - -### subpath_in_inlined_to_test - -```erlang -subpath_in_inlined_to_test() -> - Messages = [ - #{}, - #{<<"path">> => <<"part1">>}, - #{<<"b">> => - {resolve, - [#{}, - #{<<"path">> => <<"x">>}, - #{<<"path">> => <<"y">>}]}, - <<"path">> => <<"part2">>, - <<"test">> => <<"1">>}, - #{<<"path">> => <<"part3">>}], - % NOTE: The implementation above does not convert the given list of messages - % into the original format, however it assures that the `to/1' and `from/1' - % operations are idempotent. -``` - -### single_message_test - -```erlang -single_message_test() -> - % This is a singleton TABM message - Req = #{ - <<"path">> => <<"/a">>, - <<"test-key">> => <<"test-value">> - }, - Msgs = from(Req, #{}), - ?assertEqual(2, length(Msgs)), - ?assert(is_map(hd(Msgs))), - ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, hd(Msgs))). -``` - -### basic_hashpath_test - -```erlang -basic_hashpath_test() -> - Hashpath = hb_util:human_id(crypto:strong_rand_bytes(32)), - Path = <<"/", Hashpath/binary, "/some-other">>, - Req = #{ - <<"path">> => Path, - <<"method">> => <<"GET">> - }, - Msgs = from(Req, #{}), - ?assertEqual(2, length(Msgs)), - [Base, Msg2] = Msgs, - ?assertEqual(Base, Hashpath), - ?assertEqual(<<"GET">>, hb_maps:get(<<"method">>, Msg2)), - ?assertEqual(<<"some-other">>, hb_maps:get(<<"path">>, Msg2)). -``` - -### multiple_messages_test - -```erlang -multiple_messages_test() -> - Req = #{ - <<"path">> => <<"/a/b/c">>, - <<"test-key">> => <<"test-value">> - }, - Msgs = from(Req, #{}), - ?assertEqual(4, length(Msgs)), - [_Base, Msg1, Msg2, Msg3] = Msgs, - ?assert(lists:all(fun is_map/1, Msgs)), - ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, Msg1)), - ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, Msg2)), - ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, Msg3)). -%%% Advanced key syntax tests -``` - -### scoped_key_test - -```erlang -scoped_key_test() -> - Req = #{ - <<"path">> => <<"/a/b/c">>, - <<"2.test-key">> => <<"test-value">> - }, - Msgs = from(Req, #{}), - ?assertEqual(4, length(Msgs)), - [_, Msg1, Msg2, Msg3] = Msgs, - ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg1, not_found)), - ?assertEqual(<<"test-value">>, hb_maps:get(<<"test-key">>, Msg2, not_found)), - ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg3, not_found)). -``` - -### typed_key_test - -```erlang -typed_key_test() -> - Req = #{ - <<"path">> => <<"/a/b/c">>, - <<"2.test-key+integer">> => <<"123">> - }, - Msgs = from(Req, #{}), - ?assertEqual(4, length(Msgs)), - [_, Msg1, Msg2, Msg3] = Msgs, - ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg1, not_found)), - ?assertEqual(123, hb_maps:get(<<"test-key">>, Msg2, not_found)), - ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg3, not_found)). -``` - -### subpath_in_key_test - -```erlang -subpath_in_key_test() -> - Req = #{ - <<"path">> => <<"/a/b/c">>, - <<"2.test-key+resolve">> => <<"/x/y/z">> - }, - Msgs = from(Req, #{}), - ?assertEqual(4, length(Msgs)), - [_, Msg1, Msg2, Msg3] = Msgs, - ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg1, not_found)), - ?assertEqual( - {resolve, - [ - #{}, - #{ <<"path">> => <<"x">> }, - #{ <<"path">> => <<"y">> }, - #{ <<"path">> => <<"z">> } - ] - }, - hb_maps:get(<<"test-key">>, Msg2, not_found) - ), - ?assertEqual(not_found, hb_maps:get(<<"test-key">>, Msg3, not_found)). -%%% Advanced path syntax tests -``` - -### subpath_in_path_test - -```erlang -subpath_in_path_test() -> - Req = #{ - <<"path">> => <<"/a/(x/y/z)/z">> - }, - Msgs = from(Req, #{}), - ?assertEqual(4, length(Msgs)), - [_, Msg1, Msg2, Msg3] = Msgs, - ?assertEqual(<<"a">>, hb_maps:get(<<"path">>, Msg1)), - ?assertEqual( - {resolve, - [ - #{}, - #{ <<"path">> => <<"x">> }, - #{ <<"path">> => <<"y">> }, - #{ <<"path">> => <<"z">> } - ] - }, - Msg2 - ), - ?assertEqual(<<"z">>, hb_maps:get(<<"path">>, Msg3)). -``` - -### inlined_keys_test - -```erlang -inlined_keys_test() -> - Req = #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"/a/b&k1=v1/c&k2=v2">> - }, - Msgs = from(Req, #{}), - ?assertEqual(4, length(Msgs)), - [_, Msg1, Msg2, Msg3] = Msgs, - ?assertEqual(<<"v1">>, hb_maps:get(<<"k1">>, Msg2)), - ?assertEqual(<<"v2">>, hb_maps:get(<<"k2">>, Msg3)), - ?assertEqual(not_found, hb_maps:get(<<"k1">>, Msg1, not_found)), - ?assertEqual(not_found, hb_maps:get(<<"k2">>, Msg2, not_found)). -``` - -### inlined_quoted_key_test - -```erlang -inlined_quoted_key_test() -> - Req = #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"/a/b&k1=\"v/1\"/c&k2=v2">> - }, - Msgs = from(Req, #{}), - ?assertEqual(4, length(Msgs)), - [_, Msg1, Msg2, Msg3] = Msgs, - ?assertEqual(<<"v/1">>, hb_maps:get(<<"k1">>, Msg2)), - ?assertEqual(<<"v2">>, hb_maps:get(<<"k2">>, Msg3)), - ?assertEqual(not_found, hb_maps:get(<<"k1">>, Msg1, not_found)), - ?assertEqual(not_found, hb_maps:get(<<"k2">>, Msg2, not_found)), - ReqB = #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"/~profile@1.0/eval=%22~meta@1.0/info%22">> - }, - MsgsB = from(ReqB, #{}), - [_, Msg2b] = MsgsB, - ?assertEqual(<<"~meta@1.0/info">>, hb_maps:get(<<"eval">>, Msg2b)). -``` - -### inlined_assumed_key_test - -```erlang -inlined_assumed_key_test() -> - Req = #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"/a/b=4/c&k2=v2">> - }, - Msgs = from(Req, #{}), - ?assertEqual(4, length(Msgs)), - [_, Msg1, Msg2, Msg3] = Msgs, - ?event({parsed, Msgs}), - ?assertEqual(<<"4">>, hb_maps:get(<<"b">>, Msg2)), - ?assertEqual(not_found, hb_maps:get(<<"b">>, Msg1, not_found)), - ?assertEqual(not_found, hb_maps:get(<<"b">>, Msg3, not_found)), - ReqB = #{ - <<"method">> => <<"POST">>, - <<"path">> => <<"/a/b+integer=4/c&k2=v2">> - }, - MsgsB = from(ReqB, #{}), - [_, Msg1b, Msg2b, Msg3b] = MsgsB, - ?event({parsed, MsgsB}), - ?assertEqual(4, hb_maps:get(<<"b">>, Msg2b)), - ?assertEqual(not_found, hb_maps:get(<<"b">>, Msg1b, not_found)), - ?assertEqual(not_found, hb_maps:get(<<"b">>, Msg3b, not_found)). -``` - -### multiple_inlined_keys_test - -```erlang -multiple_inlined_keys_test() -> - Path = <<"/a/b&k1=v1&k2=v2">>, - Req = #{ - <<"method">> => <<"POST">>, - <<"path">> => Path - }, - Msgs = from(Req, #{}), - ?assertEqual(3, length(Msgs)), - [_, Msg1, Msg2] = Msgs, - ?assertEqual(not_found, hb_maps:get(<<"k1">>, Msg1, not_found)), - ?assertEqual(not_found, hb_maps:get(<<"k2">>, Msg1, not_found)), - ?assertEqual(<<"v1">>, hb_maps:get(<<"k1">>, Msg2, not_found)), - ?assertEqual(<<"v2">>, hb_maps:get(<<"k2">>, Msg2, not_found)). -``` - -### subpath_in_inlined_test - -```erlang -subpath_in_inlined_test() -> - Path = <<"/part1/part2&test=1&b=(/x/y)/part3">>, - Req = #{ - <<"path">> => Path - }, - Msgs = from(Req, #{}), - ?assertEqual(4, length(Msgs)), - [_, First, Second, Third] = Msgs, - ?assertEqual(<<"part1">>, hb_maps:get(<<"path">>, First)), - ?assertEqual(<<"part3">>, hb_maps:get(<<"path">>, Third)), - ?assertEqual( - {resolve, [#{}, #{ <<"path">> => <<"x">> }, #{ <<"path">> => <<"y">> }] }, - hb_maps:get(<<"b">>, Second) - ). -``` - -### path_parts_test - -```erlang -path_parts_test() -> - ?assertEqual( - [<<"a">>, <<"b&c=(/d/e)">>, <<"f">>], - path_parts($/, <<"/a/b&c=(/d/e)/f">>) - ), - ?assertEqual([<<"a">>], path_parts($/, <<"/a">>)), - ?assertEqual([<<"a">>, <<"b">>, <<"c">>], path_parts($/, <<"/a/b/c">>)), - ?assertEqual( - [ - <<"IYkkrqlZNW_J-4T-5eFApZOMRl5P4VjvrcOXWvIqB1Q">>, - <<"msg2">> - ], - path_parts($/, <<"/IYkkrqlZNW_J-4T-5eFApZOMRl5P4VjvrcOXWvIqB1Q/msg2">>) - ), - ?assertEqual( - [<<"a">>, <<"b&K1=V1">>, <<"c&K2=V2">>], - path_parts($/, <<"/a/b&K1=V1/c&K2=V2">>) - ), - ?assertEqual( - [<<"a">>, <<"(x/y/z)">>, <<"c">>], - path_parts($/, <<"/a/(x/y/z)/c">>) - ), -``` - ---- - -*Generated from [hb_singleton.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_singleton.erl)* diff --git a/docs/book/src/hb_store.erl.md b/docs/book/src/hb_store.erl.md deleted file mode 100644 index 0592fa33e..000000000 --- a/docs/book/src/hb_store.erl.md +++ /dev/null @@ -1,1123 +0,0 @@ -# hb_store - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store.erl) - -A simple abstraction layer for AO key value store operations. -This interface allows us to swap out the underlying store implementation(s) -as desired, without changing the API that `hb_cache` employs. Additionally, -it enables node operators to customize their configuration to maximize -performance, data availability, and other factors. -Stores can be represented in a node's configuration as either a single -message, or a (`structured@1.0`) list of store messages. If a list of stores -is provided, the node will cycle through each until a viable store is found -to execute the given function. -A valid store must implement a _subset_ of the following functions: -``` - start/1: Initialize the store. - stop/1: Stop any processes (etc.) that manage the store. - reset/1: Restore the store to its original, empty state. - scope/0: A tag describing the 'scope' of a stores search: `in_memory`, - `local`, `remote`, `arweave`, etc. Used in order to allow - node operators to prioritize their stores for search. - make_group/2: Create a new group of keys in the store with the given ID. - make_link/3: Create a link (implying one key should redirect to another) - from `existing` to `new` (in that order). - type/2: Return whether the value found at the given key is a - `composite` (group) type, or a `simple` direct binary. - read/2: Read the data at the given location, returning a binary - if it is a `simple` value, or a message if it is a complex - term. - write/3: Write the given `key` with the associated `value` (in that - order) to the store. - list/2: For `composite` type keys, return a list of its child keys. - path/2: Optionally transform a list of path parts into the store's - canonical form. -''' -Each function takes a `store` message first, containing an arbitrary set -of its necessary configuration keys, as well as the `store-module` key which -refers to the Erlang module that implements the store. -All functions must return `ok` or `{ok, Result}`, as appropriate. Other -results will lead to the store manager (this module) iterating to the next -store message given by the user. If none of the given store messages are -able to execute a requested service, the store manager will return -`not_found`. - ---- - -## Exported Functions - -- `add_path/2` -- `add_path/3` -- `behavior_info/1` -- `filter/2` -- `find/1` -- `generate_test_suite/1` -- `generate_test_suite/2` -- `join/1` -- `list/2` -- `make_group/2` -- `make_link/3` -- `match/2` -- `path/1` -- `path/2` -- `read/2` -- `reset/1` -- `resolve/2` -- `scope/2` -- `sort/2` -- `start/1` -- `stop/1` -- `test_stores/0` -- `type/2` -- `write/3` - ---- - -### behavior_info - -A simple abstraction layer for AO key value store operations. -The number of write and read operations to perform in the benchmark. - -```erlang -behavior_info(callbacks) -> - [ - {start, 1}, {stop, 1}, {reset, 1}, {make_group, 2}, {make_link, 3}, - {type, 2}, {read, 2}, {write, 3}, - {list, 2}, {match, 2}, {path, 2}, {add_path, 3} - ]. -``` - -### set - -Store access policies to function names. -Set the instance options for a given store module and name combination. - -```erlang -set(StoreOpts, InstanceTerm) -> - Mod = maps:get(<<"store-module">>, StoreOpts), - set( - Mod, - maps:get(<<"name">>, StoreOpts, Mod), - InstanceTerm - ). -``` - -### set - -```erlang -set(StoreMod, Name, undefined) -> - StoreRef = {store, StoreMod, Name}, - erlang:erase(StoreRef), - persistent_term:erase(StoreRef); -``` - -### set - -```erlang -set(StoreMod, Name, InstanceTerm) -> - StoreRef = {store, StoreMod, Name}, - put(StoreRef, InstanceTerm), - persistent_term:put(StoreRef, InstanceTerm), - ok. -``` - -### find - -Find or spawn a store instance by its store opts. - -```erlang -find(StoreOpts) -> - {Time, Result} = timer:tc(fun() -> do_find(StoreOpts) end), - hb_event:increment(<<"store_duration">>, <<"find">>, #{}, Time), - hb_event:increment(<<"store">>, <<"find">>, #{}, 1), - Result. -``` - -### find - -```erlang -find(StoreOpts) -> - do_find(StoreOpts). -``` - -### do_find - -```erlang -do_find(StoreOpts = #{ <<"store-module">> := Mod }) -> - Name = maps:get(<<"name">>, StoreOpts, Mod), - LookupName = {store, Mod, Name}, - case get(LookupName) of - undefined -> - try persistent_term:get(LookupName) of - Instance1 -> - EnsuredInstance = ensure_instance_alive(StoreOpts, Instance1), - put(LookupName, EnsuredInstance), - EnsuredInstance - catch - error:badarg -> spawn_instance(StoreOpts) - end; - InstanceMessage -> - ensure_instance_alive(StoreOpts, InstanceMessage) - end. -``` - -### spawn_instance - -Create a new instance of a store and return its term. - -```erlang -spawn_instance(StoreOpts = #{ <<"store-module">> := Mod }) -> - Name = maps:get(<<"name">>, StoreOpts, Mod), - try Mod:start(StoreOpts) of - ok -> ok; - {ok, InstanceMessage} -> - set(Mod, Name, InstanceMessage), - InstanceMessage; - {error, Reason} -> - ?event(error, {store_start_failed, {Mod, Name, Reason}}), - throw({store_start_failed, {Mod, Name, Reason}}) - catch error:undef -> - ok - end. -``` - -### ensure_instance_alive - -Handle a found instance message. If it contains a PID, we check if it - -```erlang -ensure_instance_alive(StoreOpts, InstanceMessage = #{ <<"pid">> := Pid }) -> - case is_process_alive(Pid) of - true -> InstanceMessage; - false -> spawn_instance(StoreOpts) - end; -``` - -### ensure_instance_alive - -Handle a found instance message. If it contains a PID, we check if it - -```erlang -ensure_instance_alive(_, InstanceMessage) -> - InstanceMessage. -``` - -### start - -Ensure that a store, or list of stores, have all been started. - -```erlang -start(StoreOpts) when not is_list(StoreOpts) -> start([StoreOpts]); -``` - -### start - -Ensure that a store, or list of stores, have all been started. - -```erlang -start([]) -> ok; -``` - -### start - -Ensure that a store, or list of stores, have all been started. - -```erlang -start([StoreOpts | Rest]) -> - find(StoreOpts), - start(Rest). -``` - -### stop - -```erlang -stop(Modules) -> - call_function(Modules, stop, []). -``` - -### filter - -Takes a store object and a filter function or match spec, returning a - -```erlang -filter(Module, Filter) when not is_list(Module) -> - filter([Module], Filter); -``` - -### filter - -Takes a store object and a filter function or match spec, returning a - -```erlang -filter(Modules, Filter) -> - lists:filter( - fun(Store) -> - try Filter(get_store_scope(Store), Store) - catch _:_ -> false - end - end, - Modules - ). -``` - -### scope - -Limit the store scope to only a specific (set of) option(s). - -```erlang -scope(Opts, Scope) when is_map(Opts) -> - case hb_opts:get(store, no_viable_store, Opts) of - no_viable_store -> Opts; - Store when is_list(Store) -> - % Store is already a list, apply scope normally - Opts#{ store => scope(Store, Scope) }; - Store when is_map(Store) -> - % Check if Store already has a nested 'store' key - case maps:find(store, Store) of - {ok, _NestedStores} -> - % Already has nested structure, return as-is - Opts; - error -> - % Single store map, wrap in list before scoping - % This ensures consistent behavior - Opts#{ store => scope([Store], Scope) } - end - end; -``` - -### scope - -Limit the store scope to only a specific (set of) option(s). - -```erlang -scope(Store, Scope) -> - filter( - Store, - fun(StoreScope, _) -> - StoreScope == Scope orelse - (is_list(Scope) andalso lists:member(StoreScope, Scope)) - end - ). -``` - -### get_store_scope - -Ask a store for its own scope. If it doesn't have one, return the - -```erlang -get_store_scope(Store) -> - case call_function(Store, scope, []) of - not_found -> ?DEFAULT_SCOPE; - Scope -> Scope - end. -``` - -### sort - -Order a store by a preference of its scopes. This is useful for making - -```erlang -sort(Stores, PreferenceOrder) when is_list(PreferenceOrder) -> - sort( - Stores, - hb_maps:from_list( - [ - {Scope, -Index} - || - {Scope, Index} <- - lists:zip( - PreferenceOrder, - lists:seq(1, length(PreferenceOrder)) - ) - ] - ) - ); -``` - -### sort - -Order a store by a preference of its scopes. This is useful for making - -```erlang -sort(Stores, ScoreMap) -> - lists:sort( - fun(Store1, Store2) -> - hb_maps:get(get_store_scope(Store1), ScoreMap, 0) > - hb_maps:get(get_store_scope(Store2), ScoreMap, 0) - end, - Stores - ). -``` - -### join - -Join a list of path components together. - -```erlang -join(Path) -> hb_path:to_binary(Path). -%%% The store interface that modules should implement. -``` - -### read - -Read a key from the store. -Write a key with a value to the store. - -```erlang -read(Modules, Key) -> call_function(Modules, read, [Key]). -``` - -### write - -Read a key from the store. -Write a key with a value to the store. -Make a group in the store. A group can be seen as a namespace or - -```erlang -write(Modules, Key, Value) -> call_function(Modules, write, [Key, Value]). -``` - -### make_group - -Read a key from the store. -Write a key with a value to the store. -Make a group in the store. A group can be seen as a namespace or -Make a link from one path to another in the store. - -```erlang -make_group(Modules, Path) -> call_function(Modules, make_group, [Path]). -``` - -### make_link - -Read a key from the store. -Write a key with a value to the store. -Make a group in the store. A group can be seen as a namespace or -Make a link from one path to another in the store. - -```erlang -make_link(Modules, Existing, New) -> - call_function(Modules, make_link, [Existing, New]). -``` - -### reset - -Delete all of the keys in a store. Should be used with extreme -Get the type of element of a given path in the store. This can be - -```erlang -reset(Modules) -> call_function(Modules, reset, []). -``` - -### type - -Delete all of the keys in a store. Should be used with extreme -Get the type of element of a given path in the store. This can be -Create a path from a list of path components. If no store implements - -```erlang -type(Modules, Path) -> call_function(Modules, type, [Path]). -``` - -### path - -Delete all of the keys in a store. Should be used with extreme -Get the type of element of a given path in the store. This can be -Create a path from a list of path components. If no store implements - -```erlang -path(Path) -> join(Path). -``` - -### path - -Delete all of the keys in a store. Should be used with extreme -Get the type of element of a given path in the store. This can be -Create a path from a list of path components. If no store implements -Add two path components together. If no store implements the add_path - -```erlang -path(_, Path) -> path(Path). -``` - -### add_path - -Delete all of the keys in a store. Should be used with extreme -Get the type of element of a given path in the store. This can be -Create a path from a list of path components. If no store implements -Add two path components together. If no store implements the add_path - -```erlang -add_path(Path1, Path2) -> Path1 ++ Path2. -``` - -### add_path - -Delete all of the keys in a store. Should be used with extreme -Get the type of element of a given path in the store. This can be -Create a path from a list of path components. If no store implements -Add two path components together. If no store implements the add_path - -```erlang -add_path(Store, Path1, Path2) -> - case call_function(Store, add_path, [Path1, Path2]) of - not_found -> add_path(Path1, Path2); - Result -> Result - end. -``` - -### resolve - -Follow links through the store to resolve a path to its ultimate target. -List the keys in a group in the store. Use only in debugging. - -```erlang -resolve(Modules, Path) -> call_function(Modules, resolve, [Path]). -``` - -### list - -Follow links through the store to resolve a path to its ultimate target. -List the keys in a group in the store. Use only in debugging. -Match a series of keys and values against the store. Returns - -```erlang -list(Modules, Path) -> call_function(Modules, list, [Path]). -``` - -### match - -Follow links through the store to resolve a path to its ultimate target. -List the keys in a group in the store. Use only in debugging. -Match a series of keys and values against the store. Returns -Call a function on the first store module that succeeds. Returns its - -```erlang -match(Modules, Match) -> call_function(Modules, match, [Match]). --ifdef(STORE_EVENTS). -``` - -### call_function - -```erlang -call_function(X, Function, Args) -> - {Time, Result} = timer:tc(fun() -> do_call_function(X, Function, Args) end), - ?event(store_events, - {store_call, - {function, Function}, - {args, Args}, - {primary_store, - case X of - [PrimaryStore | _] -> PrimaryStore; - _ -> X - end - }, - {time, Time}, - {result, Result} - } - ), - hb_event:increment(<<"store_duration">>, hb_util:bin(Function), #{}, Time), - hb_event:increment(<<"store">>, hb_util:bin(Function), #{}, 1), - Result. -``` - -### call_function - -```erlang -call_function(X, Function, Args) -> - do_call_function(X, Function, Args). -``` - -### do_call_function - -```erlang -do_call_function(X, _Function, _Args) when not is_list(X) -> - do_call_function([X], _Function, _Args); -``` - -### do_call_function - -```erlang -do_call_function([], _Function, _Args) -> - not_found; -``` - -### do_call_function - -```erlang -do_call_function([Store = #{<<"access">> := Access} | Rest], Function, Args) -> - % If the store has an access controls, check if the function is allowed from - % the stated policies. -``` - -### do_call_function - -```erlang -do_call_function([Store = #{<<"store-module">> := Mod} | Rest], Function, Args) -> - % Attempt to apply the function. If it fails, try the next store. -``` - -### apply_store_function - -Apply a store function, checking if the store returns a retry request or - -```erlang -apply_store_function(Mod, Store, Function, Args) -> - MaxAttempts = maps:get(<<"max-retries">>, Store, ?DEFAULT_RETRIES) + 1, - apply_store_function(Mod, Store, Function, Args, MaxAttempts). -``` - -### apply_store_function - -```erlang -apply_store_function(_Mod, _Store, _Function, _Args, 0) -> - % Too many attempts have already failed. Bail. -``` - -### apply_store_function - -```erlang -apply_store_function(Mod, Store, Function, Args, AttemptsRemaining) -> - try apply(Mod, Function, [Store | Args]) of - retry -> retry(Mod, Store, Function, Args, AttemptsRemaining); - Other -> Other - catch Class:Reason:Stacktrace -> - ?event(store_error, - {store_call_failed_retrying, - {store, Store}, - {function, Function}, - {args, Args}, - {class, Class}, - {reason, Reason}, - {stacktrace, {trace, Stacktrace}} - } - ), - retry(Mod, Store, Function, Args, AttemptsRemaining) - end. -``` - -### retry - -Stop and start the store, then retry. - -```erlang -retry(Mod, Store, Function, Args, AttemptsRemaining) -> - % Attempt to stop the store and start it again, then retry. -``` - -### call_all - -Call a function on all modules in the store. - -```erlang -call_all(X, _Function, _Args) when not is_list(X) -> - call_all([X], _Function, _Args); -``` - -### call_all - -Call a function on all modules in the store. - -```erlang -call_all([], _Function, _Args) -> - ok; -``` - -### call_all - -Call a function on all modules in the store. - -```erlang -call_all([Store = #{<<"store-module">> := Mod} | Rest], Function, Args) -> - try apply_store_function(Mod, Function, Store, Args) - catch - Class:Reason:Stacktrace -> - ?event(warning, {store_call_failed, {Class, Reason, Stacktrace}}), - ok - end, - call_all(Rest, Function, Args). -``` - -### test_stores - -Return a list of stores for testing. Additional individual functions are - -```erlang -test_stores() -> - [ - (hb_test_utils:test_store(hb_store_fs))#{ - <<"benchmark-scale">> => 0.001 - }, - (hb_test_utils:test_store(hb_store_lmdb))#{ - <<"benchmark-scale">> => 0.5 - }, - (hb_test_utils:test_store(hb_store_lru))#{ - <<"persistent-store">> => [ - #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/lru">> - } - ] - } - ] ++ rocks_stores(). -``` - -### rocks_stores - -```erlang -rocks_stores() -> - [ - #{ - <<"store-module">> => hb_store_rocksdb, - <<"name">> => <<"cache-TEST/rocksdb">> - } - ]. -``` - -### rocks_stores - -```erlang -rocks_stores() -> []. --endif. -``` - -### generate_test_suite - -```erlang -generate_test_suite(Suite) -> - generate_test_suite(Suite, test_stores()). -``` - -### generate_test_suite - -```erlang -generate_test_suite(Suite, Stores) -> - hb:init(), - lists:map( - fun(Store = #{<<"store-module">> := Mod}) -> - {foreach, - fun() -> - hb_store:start(Store) - end, - fun(_) -> - hb_store:reset(Store) - % hb_store:stop(Store) - end, - [ - { - atom_to_list(Mod) ++ ": " ++ Desc, - { - timeout, - 60, - fun() -> - TestResult = Test(Store), - TestResult - end - } - } - || - {Desc, Test} <- Suite - ] - } - end, - Stores - ). -``` - -### simple_path_resolution_test - -Test path resolution dynamics. -Ensure that we can resolve links recursively. - -```erlang -simple_path_resolution_test(Store) -> - ok = hb_store:write(Store, <<"test-file">>, <<"test-data">>), - hb_store:make_link(Store, <<"test-file">>, <<"test-link">>), - ?assertEqual({ok, <<"test-data">>}, hb_store:read(Store, <<"test-link">>)). -``` - -### resursive_path_resolution_test - -Test path resolution dynamics. -Ensure that we can resolve links recursively. -Ensure that we can resolve links through a directory. - -```erlang -resursive_path_resolution_test(Store) -> - hb_store:write(Store, <<"test-file">>, <<"test-data">>), - hb_store:make_link(Store, <<"test-file">>, <<"test-link">>), - hb_store:make_link(Store, <<"test-link">>, <<"test-link2">>), - ?assertEqual({ok, <<"test-data">>}, hb_store:read(Store, <<"test-link2">>)). -``` - -### hierarchical_path_resolution_test - -Test path resolution dynamics. -Ensure that we can resolve links recursively. -Ensure that we can resolve links through a directory. - -```erlang -hierarchical_path_resolution_test(Store) -> - hb_store:make_group(Store, <<"test-dir1">>), - hb_store:write(Store, [<<"test-dir1">>, <<"test-file">>], <<"test-data">>), - hb_store:make_link(Store, [<<"test-dir1">>], <<"test-link">>), - ?assertEqual( - {ok, <<"test-data">>}, - hb_store:read(Store, [<<"test-link">>, <<"test-file">>]) - ). -``` - -### store_suite_test_ - -```erlang -store_suite_test_() -> - generate_test_suite([ - {"simple path resolution", fun simple_path_resolution_test/1}, - {"resursive path resolution", fun resursive_path_resolution_test/1}, - {"hierarchical path resolution", fun hierarchical_path_resolution_test/1} - ]). -``` - -### benchmark_suite_test_ - -```erlang -benchmark_suite_test_() -> - generate_test_suite([ - {"benchmark key read write", fun benchmark_key_read_write/1}, - {"benchmark list", fun benchmark_list/1}, - {"benchmark message read write", fun benchmark_message_read_write/1} - ]). -``` - -### benchmark_key_read_write - -Benchmark a store. By default, we write 10,000 keys and read 10,000 - -```erlang -benchmark_key_read_write(Store = #{ <<"benchmark-scale">> := Scale }) -> - benchmark_key_read_write( - Store, - erlang:ceil(Scale * ?STORE_BENCH_WRITE_OPS), - erlang:ceil(Scale * ?STORE_BENCH_READ_OPS) - ); -``` - -### benchmark_key_read_write - -Benchmark a store. By default, we write 10,000 keys and read 10,000 - -```erlang -benchmark_key_read_write(Store) -> - benchmark_key_read_write(Store, ?STORE_BENCH_WRITE_OPS, ?STORE_BENCH_READ_OPS). -``` - -### benchmark_key_read_write - -```erlang -benchmark_key_read_write(Store, WriteOps, ReadOps) -> - start(Store), - timer:sleep(100), - ?event( - {benchmarking, - {store, Store}, - {write_ops, WriteOps}, - {read_ops, ReadOps} - } - ), - % Generate random data to write and the keys to read ahead of time. -``` - -### benchmark_list - -```erlang -benchmark_list(Store = #{ <<"benchmark-scale">> := Scale }) -> - benchmark_list( - Store, - erlang:ceil(Scale * ?STORE_BENCH_LIST_KEYS), - erlang:ceil(Scale * ?STORE_BENCH_LIST_OPS), - erlang:ceil(Scale * ?STORE_BENCH_LIST_GROUP_SIZE) - ); -``` - -### benchmark_list - -```erlang -benchmark_list(Store) -> - benchmark_list( - Store, - ?STORE_BENCH_LIST_KEYS, - ?STORE_BENCH_LIST_OPS, - ?STORE_BENCH_LIST_GROUP_SIZE - ). -``` - -### benchmark_list - -```erlang -benchmark_list(Store, WriteOps, ListOps, GroupSize) -> - start(Store), - timer:sleep(100), - ?event( - {benchmarking, - {store, Store}, - {keys, hb_util:human_int(WriteOps)}, - {groups, hb_util:human_int(WriteOps div GroupSize)}, - {lists, hb_util:human_int(ListOps)} - } - ), - % Generate a random message to write and the keys to read ahead of time. -``` - -### benchmark_message_read_write - -```erlang -benchmark_message_read_write(Store = #{ <<"benchmark-scale">> := Scale }) -> - benchmark_message_read_write( - Store, - erlang:ceil(Scale * ?BENCH_MSG_WRITE_OPS), - erlang:ceil(Scale * ?BENCH_MSG_READ_OPS) - ); -``` - -### benchmark_message_read_write - -```erlang -benchmark_message_read_write(Store) -> - benchmark_message_read_write(Store, ?BENCH_MSG_WRITE_OPS, ?BENCH_MSG_READ_OPS). -``` - -### benchmark_message_read_write - -```erlang -benchmark_message_read_write(Store, WriteOps, ReadOps) -> - start(Store), - Opts = #{ store => Store, priv_wallet => hb:wallet() }, - TestDataSize = ?BENCH_MSG_DATA_SIZE * 8, % in _bits_ - timer:sleep(100), - ?event( - {benchmarking, - {store, Store}, - {write_ops, WriteOps}, - {read_ops, ReadOps} - } - ), - % Generate a random message to write and the keys to read ahead of time. -``` - -### read_only_access_test - -Test that read-only stores allow read operations but block write operations - -```erlang -read_only_access_test() -> - TestStore = hb_test_utils:test_store(hb_store_fs, <<"access-read-only">>), - ReadOnlyStore = TestStore#{<<"access">> => [<<"read">>]}, - WriteStore = hb_test_utils:test_store(hb_store_fs, <<"access-write">>), - StoreList = [ReadOnlyStore, WriteStore], - TestKey = <<"test-key">>, - TestValue = <<"test-value">>, - start(StoreList), - ?event(testing, {read_only_test_started}), - WriteResponse = write(StoreList, TestKey, TestValue), - ?assertEqual(ok, WriteResponse), - ?event(testing, {write_used_fallback_store, WriteResponse}), - ReadResponse = read(StoreList, TestKey), - ?assertEqual({ok, TestValue}, ReadResponse), - ?event(testing, {read_succeeded, ReadResponse}), - ReadOnlyStoreState = read([ReadOnlyStore], TestKey), - WriteStoreState = read([WriteStore], TestKey), - ?event(testing, { - store_state, {read_only, ReadOnlyStoreState},{ write, WriteStoreState} - }), - ?assertEqual(not_found, ReadOnlyStoreState), - ?assertEqual({ok, TestValue}, WriteStoreState). -``` - -### write_only_access_test - -Test that write-only stores allow write operations but block read operations - -```erlang -write_only_access_test() -> - WriteOnlyStore = - (hb_test_utils:test_store(hb_store_fs, <<"access-write-only">>))#{ - <<"access">> => [<<"write">>] - }, - ReadStore = hb_test_utils:test_store(hb_store_fs, <<"access-read-fallback">>), - StoreList = [WriteOnlyStore, ReadStore], - TestKey = <<"write-test-key">>, - TestValue = <<"write-test-value">>, - start(StoreList), - ?event(testing, {write_only_test_started}), - ?assertEqual(ok, write(StoreList, TestKey, TestValue)), - ?event(testing, {write_succeeded_on_write_only}), - ReadStoreState = read(StoreList, TestKey), - ?assertEqual(not_found, ReadStoreState), - ?event(testing, {read_skipped_write_only_store, ReadStoreState}), - WriteOnlyStoreNoAccess = maps:remove(<<"access">>, WriteOnlyStore), - ReadStoreNoAccess = read([WriteOnlyStoreNoAccess], TestKey), - ?event(testing, {store, ReadStoreNoAccess}), - ?assertEqual({ok, TestValue}, ReadStoreNoAccess). -``` - -### admin_only_access_test - -Test admin-only stores for start/stop/reset operations - -```erlang -admin_only_access_test() -> - AdminOnlyStore = - (hb_test_utils:test_store(hb_store_fs, <<"access-admin-only">>))#{ - <<"access">> => [<<"admin">>, <<"read">>, <<"write">>] - }, - StoreList = [AdminOnlyStore], - TestKey = <<"admin-test-key">>, - TestValue = <<"admin-test-value">>, - start(StoreList), - ?assertEqual(ok, write(StoreList, TestKey, TestValue)), - ?assertEqual({ok, TestValue}, read(StoreList, TestKey)), - reset(StoreList), - ?assertEqual(ok, start(StoreList)), - ?assertEqual(not_found, read(StoreList, TestKey)). -``` - -### multi_access_permissions_test - -Test multiple access permissions - -```erlang -multi_access_permissions_test() -> - ReadWriteStore = - (hb_test_utils:test_store(hb_store_fs, <<"access-read-write">>))#{ - <<"access">> => [<<"read">>, <<"write">>] - }, - AdminStore = - (hb_test_utils:test_store(hb_store_fs, <<"access-admin-fallback">>))#{ - <<"access">> => [<<"admin">>] - }, - StoreList = [ReadWriteStore, AdminStore], - TestKey = <<"multi-access-key">>, - TestValue = <<"multi-access-value">>, - start(StoreList), - ?event(testing, {multi_access_test_started}), - ?assertEqual(ok, write(StoreList, TestKey, TestValue)), - ?event(testing, {write_succeeded_on_read_write_store}), - ?assertEqual({ok, TestValue}, read(StoreList, TestKey)), - ?event(testing, {read_succeeded_on_read_write_store}), - reset(StoreList), - ?assertEqual(ok, start(StoreList)), - ?assertEqual(not_found, read(StoreList, TestKey)). -``` - -### store_access_list_test - -Test access control with a list of stores. - -```erlang -store_access_list_test() -> - % Chain: Read-only -> Write-only -> Unrestricted - ReadOnlyStore = - (hb_test_utils:test_store(hb_store_fs, <<"chain-read-only">>))#{ - <<"access">> => [<<"read">>] - }, - WriteOnlyStore = - (hb_test_utils:test_store(hb_store_fs, <<"chain-write-only">>))#{ - <<"access">> => [<<"write">>] - }, - UnrestrictedStore = - hb_test_utils:test_store(hb_store_fs, <<"chain-unrestricted">>), - StoreChain = [ReadOnlyStore, WriteOnlyStore, UnrestrictedStore], - TestKey = <<"chain-test-key">>, - TestValue = <<"chain-test-value">>, - start(StoreChain), - ?event(testing, {fallback_chain_test_started, length(StoreChain)}), - ?assertEqual(ok, write(StoreChain, TestKey, TestValue)), - ?event(testing, {write_used_second_store_in_chain}), - ?assertEqual(not_found, read(StoreChain, TestKey)), - ?event(testing, {read_fell_through_entire_chain}), - WriteOnlyNoAccess = maps:remove(<<"access">>, WriteOnlyStore), - ?assertEqual({ok, TestValue}, read([WriteOnlyNoAccess], TestKey)). -``` - -### invalid_access_permissions_test - -Test invalid access permissions are ignored - -```erlang -invalid_access_permissions_test() -> - InvalidAccessStore = - (hb_test_utils:test_store(hb_store_fs, <<"access-invalid">>))#{ - <<"access">> => [<<"invalid-policy">>, <<"nonexistent-policy">>] - }, - FallbackStore = hb_test_utils:test_store(hb_store_fs, <<"access-fallback">>), - StoreList = [InvalidAccessStore, FallbackStore], - TestKey = <<"invalid-access-key">>, - TestValue = <<"invalid-access-value">>, - start(StoreList), - ?event(testing, {invalid_access_test_started}), - ?assertEqual(ok, write(StoreList, TestKey, TestValue)), - ?event(testing, {write_used_fallback_store}), - ?assertEqual({ok, TestValue}, read(StoreList, TestKey)), - ?event(testing, {read_used_fallback_store}), - InvalidStoreNoAccess = maps:remove(<<"access">>, InvalidAccessStore), - start([InvalidStoreNoAccess]), - ?assertEqual(not_found, read([InvalidStoreNoAccess], TestKey)). -``` - -### list_access_control_test - -Test list operations with access control - -```erlang -list_access_control_test() -> - ReadOnlyStore = - (hb_test_utils:test_store(hb_store_fs, <<"list-read-only">>))#{ - <<"access">> => [<<"read">>] - }, - WriteStore = hb_test_utils:test_store(hb_store_fs, <<"list-write">>), - StoreList = [ReadOnlyStore, WriteStore], - ListGroup = <<"list-test-group">>, - TestKey = <<"list-test-key">>, - TestValue = <<"list-test-value">>, - start(StoreList), - ?event(testing, {list_access_test_started}), - GroupResult = make_group(StoreList, ListGroup), - ?assertEqual(ok, GroupResult), - ?event(testing, {group_created, GroupResult}), - WriteResponse = write(StoreList, [ListGroup, TestKey], TestValue), - ?assertEqual(ok, WriteResponse), - ListResult = list(StoreList, ListGroup), - ListValue = read(StoreList, [ListGroup, TestKey]), - ?event(testing, {list_result, ListResult, ListValue}), - ?assertEqual({ok,[TestKey]}, ListResult), - ?assertEqual({ok,TestValue}, ListValue). -``` - -### make_link_access_test - -Test make_link operations with write access - -```erlang -make_link_access_test() -> - WriteOnlyStore = - (hb_test_utils:test_store(hb_store_fs, <<"link-write-only">>))#{ - <<"access">> => [<<"write">>,<<"read">>] - }, - FallbackStore = hb_test_utils:test_store(hb_store_fs, <<"link-fallback">>), - StoreList = [WriteOnlyStore, FallbackStore], - SourceKey = <<"link-source">>, - TargetKey = <<"link-target">>, - TestValue = <<"link-test-value">>, - start(StoreList), - ?event(testing, {make_link_access_test_started}), - ?assertEqual(ok, write(StoreList, TargetKey, TestValue)), - LinkResult = make_link(StoreList, TargetKey, SourceKey), - ?event(testing, {make_link_result, LinkResult}), - ReadResult = read(StoreList, SourceKey), - ?event(testing, {read_linked_value, ReadResult}), - ?assertEqual({ok, TestValue}, ReadResult), -``` - ---- - -*Generated from [hb_store.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store.erl)* diff --git a/docs/book/src/hb_store_fs.erl.md b/docs/book/src/hb_store_fs.erl.md deleted file mode 100644 index 5f0582311..000000000 --- a/docs/book/src/hb_store_fs.erl.md +++ /dev/null @@ -1,305 +0,0 @@ -# hb_store_fs - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_fs.erl) - -A key-value store implementation, following the `hb_store` behavior -and interface. This implementation utilizes the node's local file system as -its storage mechanism, offering an alternative to other store's that require -the compilation of additional libraries in order to function. -As this store implementation operates using Erlang's native `file` and -`filelib` mechanisms, it largely inherits its performance characteristics -from those of the underlying OS/filesystem drivers. Certain filesystems can -be quite performant for the types of workload that HyperBEAM AO-Core execution -requires (many reads and writes to explicit keys, few directory 'listing' or -search operations), awhile others perform suboptimally. -Additionally, thisstore implementation offers the ability for simple -integration of HyperBEAM with other non-volatile storage media: `hb_store_fs` -will interact with any service that implements the host operating system's -native filesystem API. By mounting devices via `FUSE` (etc), HyperBEAM is -able to interact with a large number of existing storage systems (for example, -S3-compatible cloud storage APIs, etc). - ---- - -## Exported Functions - -- `list/2` -- `make_group/2` -- `make_link/3` -- `read/2` -- `reset/1` -- `resolve/2` -- `scope/0` -- `scope/1` -- `start/1` -- `stop/1` -- `type/2` -- `write/3` - ---- - -### start - -A key-value store implementation, following the `hb_store` behavior -Initialize the file system store with the given data directory. - -```erlang -start(#{ <<"name">> := DataDir }) -> - ok = filelib:ensure_dir(DataDir). -``` - -### stop - -Stop the file system store. Currently a no-op. - -```erlang -stop(#{ <<"name">> := _DataDir }) -> - ok. -``` - -### scope - -The file-based store is always local, for now. In the future, we may - -```erlang -scope() -> local. -``` - -### scope - -The file-based store is always local, for now. In the future, we may - -```erlang -scope(#{ <<"scope">> := Scope }) -> Scope; -``` - -### scope - -The file-based store is always local, for now. In the future, we may -Reset the store by completely removing its directory and recreating it. - -```erlang -scope(_) -> scope(). -``` - -### reset - -The file-based store is always local, for now. In the future, we may -Reset the store by completely removing its directory and recreating it. - -```erlang -reset(#{ <<"name">> := DataDir }) -> - % Use pattern that completely removes directory then recreates it - os:cmd(binary_to_list(<< "rm -Rf ", DataDir/binary >>)), - ?event({reset_store, {path, DataDir}}). -``` - -### read - -Read a key from the store, following symlinks as needed. - -```erlang -read(Opts, Key) -> - read(add_prefix(Opts, resolve(Opts, Key))). -``` - -### read - -```erlang -read(Path) -> - ?event({read, Path}), - case file:read_file_info(Path) of - {ok, #file_info{type = regular}} -> - {ok, _} = file:read_file(Path); - _ -> - case file:read_link(Path) of - {ok, Link} -> - ?event({link_found, Path, Link}), - read(Link); - _ -> - not_found - end - end. -``` - -### write - -Write a value to the specified path in the store. - -```erlang -write(Opts, PathComponents, Value) -> - Path = add_prefix(Opts, PathComponents), - ?event({writing, Path, byte_size(Value)}), - filelib:ensure_dir(Path), - ok = file:write_file(Path, Value). -``` - -### list - -List contents of a directory in the store. - -```erlang -list(Opts, Path) -> - case file:list_dir(add_prefix(Opts, Path)) of - {ok, Files} -> {ok, lists:map(fun hb_util:bin/1, Files)}; - {error, _} -> not_found - end. -``` - -### resolve - -Replace links in a path successively, returning the final path. - -```erlang -resolve(Opts, RawPath) -> - Res = resolve(Opts, "", hb_path:term_to_path_parts(hb_store:join(RawPath), Opts)), - ?event({resolved, RawPath, Res}), - Res. -``` - -### resolve - -```erlang -resolve(_, CurrPath, []) -> - hb_store:join(CurrPath); -``` - -### resolve - -```erlang -resolve(Opts, CurrPath, [Next|Rest]) -> - PathPart = hb_store:join([CurrPath, Next]), - ?event( - {resolving, - {accumulated_path, CurrPath}, - {next_segment, Next}, - {generated_partial_path_to_test, PathPart} - } - ), - case file:read_link(add_prefix(Opts, PathPart)) of - {ok, RawLink} -> - Link = remove_prefix(Opts, RawLink), - resolve(Opts, Link, Rest); - {error, enoent} -> - not_found; - _ -> - resolve(Opts, PathPart, Rest) - end. -``` - -### type - -Determine the type of a key in the store. - -```erlang -type(Opts, Key) -> - type(add_prefix(Opts, Key)). -``` - -### type - -```erlang -type(Path) -> - ?event({type, Path}), - case file:read_file_info(Path) of - {ok, #file_info{type = directory}} -> composite; - {ok, #file_info{type = regular}} -> simple; - _ -> - case file:read_link(Path) of - {ok, Link} -> - type(Link); - _ -> - not_found - end - end. -``` - -### make_group - -Create a directory (group) in the store. - -```erlang -make_group(Opts = #{ <<"name">> := _DataDir }, Path) -> - P = add_prefix(Opts, Path), - ?event({making_group, P}), - % We need to ensure that the parent directory exists, so that we can - % make the group. -``` - -### make_link - -Create a symlink, handling the case where the link would point to itself. - -```erlang -make_link(_, Link, Link) -> ok; -``` - -### make_link - -Create a symlink, handling the case where the link would point to itself. - -```erlang -make_link(Opts, Existing, New) -> - ?event({symlink, - add_prefix(Opts, Existing), - P2 = add_prefix(Opts, New)}), - filelib:ensure_dir(P2), - case file:make_symlink(add_prefix(Opts, Existing), N = add_prefix(Opts, New)) of - ok -> ok; - {error, eexist} -> - file:delete(N), - R = file:make_symlink(add_prefix(Opts, Existing), N), - ?event(debug_fs, - {symlink_recreated, - {existing, Existing}, - {new, New}, - {result, R} - } - ), - R - end. -``` - -### add_prefix - -Add the directory prefix to a path. - -```erlang -add_prefix(#{ <<"name">> := Prefix }, Path) -> - ?event({add_prefix, Prefix, Path}), - % Check if the prefix is an absolute path - IsAbsolute = is_binary(Prefix) andalso binary:first(Prefix) =:= $/ orelse - is_list(Prefix) andalso hd(Prefix) =:= $/, - % Join the paths - JoinedPath = hb_store:join([Prefix, Path]), - % If the prefix was absolute, ensure the joined path is also absolute - case IsAbsolute of - true -> - case is_binary(JoinedPath) of - true -> - case binary:first(JoinedPath) of - $/ -> JoinedPath; - _ -> <<"/", JoinedPath/binary>> - end; - false -> - case JoinedPath of - [$/ | _] -> JoinedPath; - _ -> [$/ | JoinedPath] - end - end; - false -> - JoinedPath - end. -``` - -### remove_prefix - -Remove the directory prefix from a path. - -```erlang -remove_prefix(#{ <<"name">> := Prefix }, Path) -> -``` - ---- - -*Generated from [hb_store_fs.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_fs.erl)* diff --git a/docs/book/src/hb_store_gateway.erl.md b/docs/book/src/hb_store_gateway.erl.md deleted file mode 100644 index 480ef52a6..000000000 --- a/docs/book/src/hb_store_gateway.erl.md +++ /dev/null @@ -1,388 +0,0 @@ -# hb_store_gateway - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_gateway.erl) - -A store module that reads data from the nodes Arweave gateway and -GraphQL routes, additionally including additional store-specific routes. - ---- - -## Exported Functions - -- `list/2` -- `read/2` -- `resolve/2` -- `scope/1` -- `type/2` - ---- - -### scope - -A store module that reads data from the nodes Arweave gateway and -The scope of a GraphQL store is always remote, due to performance. - -```erlang -scope(_) -> remote. -``` - -### resolve - -A store module that reads data from the nodes Arweave gateway and -The scope of a GraphQL store is always remote, due to performance. - -```erlang -resolve(_, Key) -> Key. -``` - -### list - -A store module that reads data from the nodes Arweave gateway and -The scope of a GraphQL store is always remote, due to performance. - -```erlang -list(StoreOpts, Key) -> - ?event(store_gateway, executing_list), - case read(StoreOpts, Key) of - not_found -> not_found; - {ok, Message} -> {ok, hb_maps:keys(Message, StoreOpts)} - end. -``` - -### type - -Get the type of the data at the given key. We potentially cache the - -```erlang -type(StoreOpts, Key) -> - ?event(store_gateway, executing_type), - case read(StoreOpts, Key) of - not_found -> not_found; - {ok, Data} -> - ?event({type, hb_private:reset(hb_message:uncommitted(Data, StoreOpts))}), - IsFlat = lists:all( - fun({_, Value}) -> not is_map(Value) end, - hb_maps:to_list( - hb_private:reset( - hb_message:uncommitted(Data, StoreOpts) - ), - StoreOpts - ) - ), - if - IsFlat -> simple; - true -> composite - end - end. -``` - -### read - -Read the data at the given key from the GraphQL route. Will only attempt - -```erlang -read(BaseStoreOpts, Key) -> - StoreOpts = opts(BaseStoreOpts), - case hb_path:term_to_path_parts(Key, StoreOpts) of - [ID] when ?IS_ID(ID) -> - ?event({read, StoreOpts, Key}), - case hb_gateway_client:read(Key, StoreOpts) of - {error, _} -> - ?event(store_gateway, {read_not_found, {key, ID}}), - not_found; - {ok, Message} -> - ?event(store_gateway, {read_found, {key, ID}}), - try hb_store_remote_node:maybe_cache(StoreOpts, Message) - catch _:_ -> ignored end, - {ok, Message} - end; - _ -> - ?event({ignoring_non_id, Key}), - not_found - end. -``` - -### opts - -Normalize the routes in the given `Opts`. - -```erlang -opts(Opts) -> - case hb_maps:find(<<"node">>, Opts) of - error -> Opts; - {ok, Node} -> - case hb_maps:get(<<"node-type">>, Opts, <<"arweave">>, Opts) of - <<"arweave">> -> - Opts#{ - routes => [ - #{ - % Routes for GraphQL requests to use the remote - % server's GraphQL API. -``` - -### graphql_as_store_test_ - -Store is accessible via the default options. - -```erlang -graphql_as_store_test_() -> - hb_http_server:start_node(#{}), - {timeout, 10, fun() -> - hb_http_server:start_node(#{}), - ?assertMatch( - {ok, #{ <<"app-name">> := <<"aos">> }}, - hb_store:read( - [#{ <<"store-module">> => hb_store_gateway }], - <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">> - ) - ) - end}. -``` - -### graphql_from_cache_test - -Stored messages are accessible via `hb_cache` accesses. - -```erlang -graphql_from_cache_test() -> - hb_http_server:start_node(#{}), - Opts = - #{ - store => - [ - #{ - <<"store-module">> => hb_store_gateway - } - ] - }, - ?assertMatch( - {ok, #{ <<"app-name">> := <<"aos">> }}, - hb_cache:read( - <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, - Opts - ) - ). -``` - -### manual_local_cache_test - -```erlang -manual_local_cache_test() -> - hb_http_server:start_node(#{}), - Local = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/gw-local-cache">> - }, - hb_store:reset(Local), - Gateway = #{ - <<"store-module">> => hb_store_gateway, - <<"local-store">> => Local - }, - {ok, FromRemote} = - hb_cache:read( - <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, - #{ store => [Gateway] } - ), - ?event({writing_recvd_to_local, FromRemote}), - {ok, _} = hb_cache:write(FromRemote, #{ store => [Local] }), - {ok, Read} = - hb_cache:read( - <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, - #{ store => [Local] } - ), - ?event({read_from_local, Read}), - ?assert(hb_message:match(Read, FromRemote)). -``` - -### cache_read_message_test - -Ensure that saving to the gateway store works. - -```erlang -cache_read_message_test() -> - hb_http_server:start_node(#{}), - Local = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST/1">> - }, - hb_store:reset(Local), - WriteOpts = #{ - store => - [ - #{ <<"store-module">> => hb_store_gateway, - <<"local-store">> => [Local] - } - ] - }, - {ok, Written} = - hb_cache:read( - <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, - WriteOpts - ), - {ok, Read} = - hb_cache:read( - <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, - #{ store => [Local] } - ), - ?assert(hb_message:match(Read, Written)). -``` - -### specific_route_test - -Routes can be specified in the options, overriding the default routes. - -```erlang -specific_route_test() -> - hb_http_server:start_node(#{}), - Opts = #{ - store => - [ - #{ <<"store-module">> => hb_store_gateway, - <<"routes">> => [], - <<"only">> => local - } - ] - }, - ?assertMatch( - not_found, - hb_cache:read( - <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, - Opts - ) - ). -``` - -### external_http_access_test - -Test that the default node config allows for data to be accessed. - -```erlang -external_http_access_test() -> - Node = hb_http_server:start_node( - #{ - cache_control => <<"cache">>, - store => - [ - #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - }, - #{ <<"store-module">> => hb_store_gateway } - ] - } - ), - ?assertMatch( - {ok, #{ <<"data-protocol">> := <<"ao">> }}, - hb_http:get( - Node, - <<"p45HPD-ENkLS7Ykqrx6p_DYGbmeHDeeF8LJ09N2K53g">>, - #{} - ) - ). -``` - -### store_opts_test - -Test to verify store opts is being set for Data-Protocol ao -Test that items retreived from the gateway store are verifiable. - -```erlang -store_opts_test() -> - Opts = #{ - cache_control => <<"cache">>, - store => - [ - #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-TEST">> - }, - #{ - <<"store-module">> => hb_store_gateway, - <<"local-store">> => false, - <<"subindex">> => [ - #{ - <<"name">> => <<"Data-Protocol">>, - <<"value">> => <<"ao">> - } - ] - } - ] - }, - Node = hb_http_server:start_node(Opts), - {ok, Res} = - hb_http:get( - Node, - <<"myb2p8_TSM0KSgBMoG-nu6TLuqWwPmdZM5V2QSUeNmM">>, - #{} - ), - ?event(debug_gateway, {res, Res}), - ?assertEqual(<<"Hello World">>, hb_ao:get(<<"data">>, Res)). -``` - -### verifiability_test - -Test to verify store opts is being set for Data-Protocol ao -Test that items retreived from the gateway store are verifiable. - -```erlang -verifiability_test() -> - hb_http_server:start_node(#{}), - {ok, Message} = - hb_cache:read( - <<"BOogk_XAI3bvNWnxNxwxmvOfglZt17o4MOVAdPNZ_ew">>, - #{ - store => - [ - #{ - <<"store-module">> => hb_store_gateway - } - ] - } - ), - % Ensure that the message is verifiable after being converted to - % httpsig@1.0 and back to structured@1.0. -``` - -### remote_hyperbeam_node_ans104_test - -Test that another HyperBEAM node offering the `~query@1.0` device can - -```erlang -remote_hyperbeam_node_ans104_test() -> - ServerOpts = - #{ - priv_wallet => ar_wallet:new(), - store => hb_test_utils:test_store() - }, - Server = hb_http_server:start_node(ServerOpts), - Msg = - hb_message:commit( - #{ - <<"hello">> => <<"world">> - }, - ServerOpts, - #{ <<"commitment-device">> => <<"ans104@1.0">> } - ), - {ok, ID} = hb_cache:write(Msg, ServerOpts), - {ok, ReadMsg} = hb_cache:read(ID, ServerOpts), - ?assert(hb_message:verify(ReadMsg)), - LocalStore = hb_test_utils:test_store(), - ClientOpts = - #{ - store => - [ - #{ - <<"store-module">> => hb_store_gateway, - <<"node">> => Server, - <<"node-type">> => <<"ao">>, - <<"local-store">> => [LocalStore] - } - ] - }, - {ok, Msg2} = hb_cache:read(ID, ClientOpts), - ?assert(hb_message:verify(Msg2)), -``` - ---- - -*Generated from [hb_store_gateway.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_gateway.erl)* diff --git a/docs/book/src/hb_store_lmdb.erl.md b/docs/book/src/hb_store_lmdb.erl.md deleted file mode 100644 index 8b287cb5b..000000000 --- a/docs/book/src/hb_store_lmdb.erl.md +++ /dev/null @@ -1,1206 +0,0 @@ -# hb_store_lmdb - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_lmdb.erl) - -## Exported Functions - -- `add_path/3` -- `list/2` -- `make_group/2` -- `make_link/3` -- `match/2` -- `path/2` -- `read/2` -- `reset/1` -- `resolve/2` -- `scope/0` -- `scope/1` -- `start/1` -- `stop/1` -- `type/2` -- `write/3` - ---- - -### start - -An LMDB (Lightning Memory Database) implementation of the HyperBeam store interface. -Start the LMDB storage system for a given database configuration. - -```erlang -start(Opts = #{ <<"name">> := DataDir }) -> - % Ensure the directory exists before opening LMDB environment - DataDirPath = hb_util:list(DataDir), - ok = filelib:ensure_dir(filename:join(DataDirPath, "dummy")), - % Create the LMDB environment with specified size limit - {ok, Env} = - elmdb:env_open( - DataDirPath, - [ - {map_size, maps:get(<<"capacity">>, Opts, ?DEFAULT_SIZE)}, - no_mem_init, no_sync - ] - ), - {ok, DBInstance} = elmdb:db_open(Env, [create]), - % Store both environment and DB instance in persistent_term for later cleanup - StoreKey = {lmdb, ?MODULE, DataDir}, - persistent_term:put(StoreKey, {Env, DBInstance, DataDir}), - {ok, #{ <<"env">> => Env, <<"db">> => DBInstance }}; -``` - -### start - -An LMDB (Lightning Memory Database) implementation of the HyperBeam store interface. -Start the LMDB storage system for a given database configuration. -Determine whether a key represents a simple value or composite group. -Write a key-value pair to the database asynchronously. -Read a value from the database by key, with automatic link resolution. -Helper function to check if a value is a link and extract the target. - -```erlang --spec read(map(), binary() | list()) -> {ok, binary()} | {error, term()}. -read(Opts, PathParts) when is_list(PathParts) -> - read(Opts, to_path(PathParts)); -read(Opts, Path) -> - % Try direct read first (fast path for non-link paths) - case read_with_links(Opts, Path) of - {ok, Value} -> - {ok, Value}; - not_found -> - try - PathParts = binary:split(Path, <<"/">>, [global]), - case resolve_path_links(Opts, PathParts) of - {ok, ResolvedPathParts} -> - ResolvedPathBin = to_path(ResolvedPathParts), - read_with_links(Opts, ResolvedPathBin); - {error, _} -> - not_found - end - catch - Class:Reason:Stacktrace -> - ?event(error, - { - resolve_path_links_failed, - {class, Class}, - {reason, Reason}, - {stacktrace, Stacktrace}, - {path, Path} - } - ), - % If link resolution fails, return not_found - not_found - end - end. -``` - -```erlang -start(_) -> - {error, {badarg, <<"StoreOpts must be a map">>}}. -%% -%% -%% -%% -%% -%% -%% -%% -%% -%% -%% -``` - -### is_link - -An LMDB (Lightning Memory Database) implementation of the HyperBeam store interface. -Start the LMDB storage system for a given database configuration. -Determine whether a key represents a simple value or composite group. -Write a key-value pair to the database asynchronously. -Read a value from the database by key, with automatic link resolution. -Helper function to check if a value is a link and extract the target. - -```erlang --spec read(map(), binary() | list()) -> {ok, binary()} | {error, term()}. -read(Opts, PathParts) when is_list(PathParts) -> - read(Opts, to_path(PathParts)); -read(Opts, Path) -> - % Try direct read first (fast path for non-link paths) - case read_with_links(Opts, Path) of - {ok, Value} -> - {ok, Value}; - not_found -> - try - PathParts = binary:split(Path, <<"/">>, [global]), - case resolve_path_links(Opts, PathParts) of - {ok, ResolvedPathParts} -> - ResolvedPathBin = to_path(ResolvedPathParts), - read_with_links(Opts, ResolvedPathBin); - {error, _} -> - not_found - end - catch - Class:Reason:Stacktrace -> - ?event(error, - { - resolve_path_links_failed, - {class, Class}, - {reason, Reason}, - {stacktrace, Stacktrace}, - {path, Path} - } - ), - % If link resolution fails, return not_found - not_found - end - end. -``` - -```erlang -is_link(Value) -> - LinkPrefixSize = byte_size(<<"link:">>), - case byte_size(Value) > LinkPrefixSize andalso - binary:part(Value, 0, LinkPrefixSize) =:= <<"link:">> of - true -> - Link = - binary:part( - Value, - LinkPrefixSize, - byte_size(Value) - LinkPrefixSize - ), - {true, Link}; - false -> - false - end. -``` - -### to_path - -Helper function to convert to a path -Unified read function that handles LMDB reads with fallback to the - -```erlang -to_path(PathParts) -> - hb_util:bin(lists:join(<<"/">>, PathParts)). -``` - -### read_direct - -Helper function to convert to a path -Unified read function that handles LMDB reads with fallback to the - -```erlang -read_direct(Opts, Path) -> - #{ <<"db">> := DBInstance } = find_env(Opts), - case elmdb:get(DBInstance, Path) of - {ok, Value} -> {ok, Value}; - {error, not_found} -> not_found; % Normalize error format - not_found -> not_found % Handle both old and new format - end. -``` - -### read_with_links - -Read a value directly from the database with link resolution. - -```erlang -read_with_links(Opts, Path) -> - case read_direct(Opts, Path) of - {ok, Value} -> - % Check if this value is actually a link to another key - case is_link(Value) of - {true, Link} -> - % Extract the target key and recursively resolve the link - read_with_links(Opts, Link); - false -> - % Check if this is a group marker - groups should not be - % readable as simple values - case Value of - <<"group">> -> not_found; - _ -> {ok, Value} - end - end; - not_found -> - not_found - end. -``` - -### resolve_path_links - -Resolve links in a path, checking each segment except the last. - -```erlang -resolve_path_links(Opts, Path) -> - resolve_path_links(Opts, Path, 0). -``` - -### resolve_path_links - -```erlang -resolve_path_links(_Opts, _Path, Depth) when Depth > ?MAX_REDIRECTS -> - % Prevent infinite loops with depth limit - {error, too_many_redirects}; -``` - -### resolve_path_links - -```erlang -resolve_path_links(_Opts, [LastSegment], _Depth) -> - % Base case: only one segment left, no link resolution needed - {ok, [LastSegment]}; -``` - -### resolve_path_links - -```erlang -resolve_path_links(Opts, Path, Depth) -> - resolve_path_links_acc(Opts, Path, [], Depth). -``` - -### resolve_path_links_acc - -```erlang -resolve_path_links_acc(_Opts, [], AccPath, _Depth) -> - % No more segments to process - {ok, lists:reverse(AccPath)}; -``` - -### resolve_path_links_acc - -```erlang -resolve_path_links_acc(_, FullPath = [<<"data">>|_], [], _Depth) -> - {ok, FullPath}; -``` - -### resolve_path_links_acc - -```erlang -resolve_path_links_acc(Opts, [Head | Tail], AccPath, Depth) -> - % Build the accumulated path so far - CurrentPath = lists:reverse([Head | AccPath]), - CurrentPathBin = to_path(CurrentPath), - % Check if the accumulated path (not just the segment) is a link - case read_direct(Opts, CurrentPathBin) of - {ok, Value} -> - case is_link(Value) of - {true, Link} -> - % The accumulated path is a link! Resolve it - LinkSegments = binary:split(Link, <<"/">>, [global]), - % Replace the accumulated path with the link target and - % continue with remaining segments - NewPath = LinkSegments ++ Tail, - resolve_path_links(Opts, NewPath, Depth + 1); - false -> - % Not a link, continue accumulating - resolve_path_links_acc(Opts, Tail, [Head | AccPath], Depth) - end; - not_found -> - % Path doesn't exist as a complete link, continue accumulating - resolve_path_links_acc(Opts, Tail, [Head | AccPath], Depth) - end. -``` - -### match - -Match a series of keys and values against the database. Returns - -```erlang --spec list(map(), binary()) -> {ok, [binary()]} | {error, term()}. -list(Opts, Path) -> - % Check if Path is a link and resolve it if necessary - ResolvedPath = - case read_direct(Opts, Path) of - {ok, Value} -> - case is_link(Value) of - {true, Link} -> - Link; - false -> - % Not a link; use original path - Path - end; - not_found -> - Path - end, - % Ensure path ends with / for elmdb:list API - SearchPath = - case ResolvedPath of - <<>> -> <<>>; % Root path - <<"/">> -> <<>>; % Root path variant - _ -> - case binary:last(ResolvedPath) of - $/ -> ResolvedPath; - _ -> <> - end - end, - % Use native elmdb:list function - #{ <<"db">> := DBInstance } = find_env(Opts), - case elmdb:list(DBInstance, SearchPath) of - {ok, Children} -> {ok, Children}; - {error, not_found} -> {ok, []}; % Normalize new error format - not_found -> {ok, []} % Handle both old and new format - end. -``` - -```erlang -match(Opts, MatchMap) when is_map(MatchMap) -> - match(Opts, maps:to_list(MatchMap)); -``` - -### match - -Match a series of keys and values against the database. Returns - -```erlang --spec list(map(), binary()) -> {ok, [binary()]} | {error, term()}. -list(Opts, Path) -> - % Check if Path is a link and resolve it if necessary - ResolvedPath = - case read_direct(Opts, Path) of - {ok, Value} -> - case is_link(Value) of - {true, Link} -> - Link; - false -> - % Not a link; use original path - Path - end; - not_found -> - Path - end, - % Ensure path ends with / for elmdb:list API - SearchPath = - case ResolvedPath of - <<>> -> <<>>; % Root path - <<"/">> -> <<>>; % Root path variant - _ -> - case binary:last(ResolvedPath) of - $/ -> ResolvedPath; - _ -> <> - end - end, - % Use native elmdb:list function - #{ <<"db">> := DBInstance } = find_env(Opts), - case elmdb:list(DBInstance, SearchPath) of - {ok, Children} -> {ok, Children}; - {error, not_found} -> {ok, []}; % Normalize new error format - not_found -> {ok, []} % Handle both old and new format - end. -``` - -```erlang -match(Opts, MatchKVs) -> - #{ <<"db">> := DBInstance } = find_env(Opts), - WithPrefixes = - lists:map( - fun({Key, Path}) -> - {Key, <<"link:", Path/binary>>} - end, - MatchKVs - ), - ?event({elmdb_match, MatchKVs}), - case elmdb:match(DBInstance, WithPrefixes) of - {ok, Matches} -> - ?event({elmdb_matched, Matches}), - {ok, Matches}; - {error, not_found} -> not_found; - not_found -> not_found - end. -``` - -### create_parent_groups - -Helper function to recursively create parent groups. - -```erlang --spec ensure_parent_groups(map(), binary()) -> ok. -ensure_parent_groups(Opts, Path) -> - PathParts = binary:split(Path, <<"/">>, [global]), - case PathParts of - [_] -> - % Single segment, no parents to create - ok; - _ -> - % Multiple segments, create parent groups - ParentParts = lists:droplast(PathParts), - create_parent_groups(Opts, [], ParentParts) - end. -``` - -```erlang -create_parent_groups(_Opts, _Current, []) -> - ok; -``` - -### create_parent_groups - -Helper function to recursively create parent groups. - -```erlang --spec ensure_parent_groups(map(), binary()) -> ok. -ensure_parent_groups(Opts, Path) -> - PathParts = binary:split(Path, <<"/">>, [global]), - case PathParts of - [_] -> - % Single segment, no parents to create - ok; - _ -> - % Multiple segments, create parent groups - ParentParts = lists:droplast(PathParts), - create_parent_groups(Opts, [], ParentParts) - end. -``` - -```erlang -create_parent_groups(Opts, Current, [Next | Rest]) -> - NewCurrent = Current ++ [Next], - GroupPath = to_path(NewCurrent), - % Only create group if it doesn't already exist. -``` - -### path - -Transform a path into the store's canonical form. - -```erlang --spec make_link(map(), binary() | list(), binary()) -> ok. -make_link(Opts, Existing, New) when is_list(Existing) -> - ExistingBin = to_path(Existing), - make_link(Opts, ExistingBin, New); -make_link(Opts, Existing, New) -> - ExistingBin = hb_util:bin(Existing), - % Ensure parent groups exist for the new link path (like filesystem ensure_dir) - ensure_parent_groups(Opts, New), - write(Opts, New, <<"link:", ExistingBin/binary>>). -``` - -```erlang -path(_Opts, PathParts) when is_list(PathParts) -> - to_path(PathParts); -``` - -### path - -Transform a path into the store's canonical form. - -```erlang --spec make_link(map(), binary() | list(), binary()) -> ok. -make_link(Opts, Existing, New) when is_list(Existing) -> - ExistingBin = to_path(Existing), - make_link(Opts, ExistingBin, New); -make_link(Opts, Existing, New) -> - ExistingBin = hb_util:bin(Existing), - % Ensure parent groups exist for the new link path (like filesystem ensure_dir) - ensure_parent_groups(Opts, New), - write(Opts, New, <<"link:", ExistingBin/binary>>). -``` - -```erlang -path(_Opts, Path) when is_binary(Path) -> - Path. -``` - -### add_path - -Add two path components together. - -```erlang -add_path(_Opts, Path1, Path2) when is_list(Path1), is_list(Path2) -> - Path1 ++ Path2; -``` - -### add_path - -Add two path components together. - -```erlang -add_path(Opts, Path1, Path2) when is_binary(Path1), is_binary(Path2) -> - % Convert binaries to lists, concatenate, then convert back - Parts1 = binary:split(Path1, <<"/">>, [global]), - Parts2 = binary:split(Path2, <<"/">>, [global]), - path(Opts, Parts1 ++ Parts2); -``` - -### add_path - -Add two path components together. - -```erlang -add_path(Opts, Path1, Path2) when is_list(Path1), is_binary(Path2) -> - Parts2 = binary:split(Path2, <<"/">>, [global]), - path(Opts, Path1 ++ Parts2); -``` - -### add_path - -Add two path components together. - -```erlang -add_path(Opts, Path1, Path2) when is_binary(Path1), is_list(Path2) -> - Parts1 = binary:split(Path1, <<"/">>, [global]), - path(Opts, Parts1 ++ Path2). -``` - -### find_env - -Retrieve or create the LMDB environment handle for a database. - -```erlang --spec resolve(map(), binary() | list()) -> binary(). -resolve(Opts, Path) when is_binary(Path) -> - resolve(Opts, binary:split(Path, <<"/">>, [global])); -resolve(Opts, PathParts) when is_list(PathParts) -> - % Handle list paths by resolving directly and converting to binary - case resolve_path_links(Opts, PathParts) of - {ok, ResolvedParts} -> - to_path(ResolvedParts); - {error, _} -> - % If resolution fails, return original path as binary - to_path(PathParts) - end; -resolve(_,_) -> not_found. -``` - -```erlang -find_env(Opts) -> hb_store:find(Opts). -%% Shutdown LMDB environment and cleanup resources -``` - -### stop - -Retrieve or create the LMDB environment handle for a database. - -```erlang --spec resolve(map(), binary() | list()) -> binary(). -resolve(Opts, Path) when is_binary(Path) -> - resolve(Opts, binary:split(Path, <<"/">>, [global])); -resolve(Opts, PathParts) when is_list(PathParts) -> - % Handle list paths by resolving directly and converting to binary - case resolve_path_links(Opts, PathParts) of - {ok, ResolvedParts} -> - to_path(ResolvedParts); - {error, _} -> - % If resolution fails, return original path as binary - to_path(PathParts) - end; -resolve(_,_) -> not_found. -``` - -```erlang -stop(#{ <<"store-module">> := ?MODULE, <<"name">> := DataDir }) -> - StoreKey = {lmdb, ?MODULE, DataDir}, - close_environment(StoreKey, DataDir); -``` - -### stop - -Retrieve or create the LMDB environment handle for a database. - -```erlang --spec resolve(map(), binary() | list()) -> binary(). -resolve(Opts, Path) when is_binary(Path) -> - resolve(Opts, binary:split(Path, <<"/">>, [global])); -resolve(Opts, PathParts) when is_list(PathParts) -> - % Handle list paths by resolving directly and converting to binary - case resolve_path_links(Opts, PathParts) of - {ok, ResolvedParts} -> - to_path(ResolvedParts); - {error, _} -> - % If resolution fails, return original path as binary - to_path(PathParts) - end; -resolve(_,_) -> not_found. -``` - -```erlang -stop(_InvalidStoreOpts) -> - ok. -``` - -### close_environment - -```erlang -close_environment(StoreKey, DataDir) -> - case safe_get_persistent_term(StoreKey) of - {ok, {Env, DBInstance}} -> - close_and_cleanup(Env, DBInstance, StoreKey, DataDir); - not_found -> - ?event({lmdb_stop_not_found_in_persistent_term, DataDir}), - safe_close_by_name(DataDir) - end, - ok. -``` - -### safe_get_persistent_term - -```erlang -safe_get_persistent_term(Key) -> - case persistent_term:get(Key, undefined) of - {Env, DBInstance, _DataDir} -> {ok, {Env, DBInstance}}; - {Env, _DataDir} -> {ok, {Env, undefined}}; % Backwards compatibility - _ -> not_found - end. -``` - -### close_and_cleanup - -```erlang -close_and_cleanup(Env, DBInstance, StoreKey, DataDir) -> - % Close DB instance first if it exists - DBCloseResult = safe_close_db(DBInstance), - ?event({db_close_result, DBCloseResult}), - % Then close the environment - EnvCloseResult = safe_close_env(Env), - persistent_term:erase(StoreKey), - case EnvCloseResult of - ok -> ?event({lmdb_stop_success, DataDir}); - {error, Reason} -> ?event({lmdb_stop_error, Reason}) - end. -``` - -### safe_close_db - -```erlang -safe_close_db(undefined) -> - ok; % No DB instance to close -``` - -### safe_close_db - -```erlang -safe_close_db(DBInstance) -> - try - elmdb:db_close(DBInstance) - catch - error:Reason -> {error, Reason} - end. -``` - -### safe_close_env - -```erlang -safe_close_env(Env) -> - try - elmdb:env_close(Env) - catch - error:Reason -> {error, Reason} - end. -``` - -### safe_close_by_name - -```erlang -safe_close_by_name(DataDir) -> - try - elmdb:env_close_by_name(binary_to_list(DataDir)) - catch - error:_ -> ok - end. -``` - -### reset - -Completely delete the database directory and all its contents. - -```erlang -reset(Opts) -> - case maps:get(<<"name">>, Opts, undefined) of - undefined -> - % No prefix specified, nothing to reset - ok; - DataDir -> - % Stop the store and remove the database. -``` - -### basic_test - -Test suite demonstrating basic store operations. -Basic store test - verifies fundamental read/write functionality. - -```erlang -basic_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store-1">> - }, - reset(StoreOpts), - Res = write(StoreOpts, <<"Hello">>, <<"World2">>), - ?assertEqual(ok, Res), - {ok, Value} = read(StoreOpts, <<"Hello">>), - ?assertEqual(Value, <<"World2">>), - ok = stop(StoreOpts). -``` - -### list_test - -List test - verifies prefix-based key listing functionality. - -```erlang -list_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store-2">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - ?assertEqual(list(StoreOpts, <<"colors">>), {ok, []}), - % Create immediate children under colors/ - write(StoreOpts, <<"colors/red">>, <<"1">>), - write(StoreOpts, <<"colors/blue">>, <<"2">>), - write(StoreOpts, <<"colors/green">>, <<"3">>), - % Create nested directories under colors/ - these should show up as immediate children - write(StoreOpts, <<"colors/multi/foo">>, <<"4">>), - write(StoreOpts, <<"colors/multi/bar">>, <<"5">>), - write(StoreOpts, <<"colors/primary/red">>, <<"6">>), - write(StoreOpts, <<"colors/primary/blue">>, <<"7">>), - write(StoreOpts, <<"colors/nested/deep/value">>, <<"8">>), - % Create other top-level directories - write(StoreOpts, <<"foo/bar">>, <<"baz">>), - write(StoreOpts, <<"beep/boop">>, <<"bam">>), - read(StoreOpts, <<"colors">>), - % Test listing colors/ - should return immediate children only - {ok, ListResult} = list(StoreOpts, <<"colors">>), - ?event({list_result, ListResult}), - % Expected: red, blue, green (files) + multi, primary, nested (directories) - % Should NOT include deeply nested items like foo, bar, deep, value - ExpectedChildren = [<<"blue">>, <<"green">>, <<"multi">>, <<"nested">>, <<"primary">>, <<"red">>], - ?assert(lists:all(fun(Key) -> lists:member(Key, ExpectedChildren) end, ListResult)), - % Test listing a nested directory - should only show immediate children - {ok, NestedListResult} = list(StoreOpts, <<"colors/multi">>), - ?event({nested_list_result, NestedListResult}), - ExpectedNestedChildren = [<<"bar">>, <<"foo">>], - ?assert(lists:all(fun(Key) -> lists:member(Key, ExpectedNestedChildren) end, NestedListResult)), - % Test listing a deeper nested directory - {ok, DeepListResult} = list(StoreOpts, <<"colors/nested">>), - ?event({deep_list_result, DeepListResult}), - ExpectedDeepChildren = [<<"deep">>], - ?assert(lists:all(fun(Key) -> lists:member(Key, ExpectedDeepChildren) end, DeepListResult)), - ok = stop(StoreOpts). -``` - -### group_test - -Group test - verifies group creation and type detection. -Link test - verifies symbolic link creation and resolution. - -```erlang -group_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store3">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - make_group(StoreOpts, <<"colors">>), - % Groups should be detected as composite types - ?assertEqual(composite, type(StoreOpts, <<"colors">>)), - % Groups should not be readable directly (like directories in filesystem) - ?assertEqual(not_found, read(StoreOpts, <<"colors">>)). -%% -``` - -### link_test - -Group test - verifies group creation and type detection. -Link test - verifies symbolic link creation and resolution. - -```erlang -link_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store3">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - write(StoreOpts, <<"foo/bar/baz">>, <<"Bam">>), - make_link(StoreOpts, <<"foo/bar/baz">>, <<"foo/beep/baz">>), - {ok, Result} = read(StoreOpts, <<"foo/beep/baz">>), - ?event({ result, Result}), - ?assertEqual(<<"Bam">>, Result). -``` - -### link_fragment_test - -Group test - verifies group creation and type detection. -Link test - verifies symbolic link creation and resolution. -Type test - verifies type detection for both simple and composite entries. - -```erlang -link_fragment_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store3">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - write(StoreOpts, [<<"data">>, <<"bar">>, <<"baz">>], <<"Bam">>), - make_link(StoreOpts, [<<"data">>, <<"bar">>], <<"my-link">>), - {ok, Result} = read(StoreOpts, [<<"my-link">>, <<"baz">>]), - ?event({ result, Result}), - ?assertEqual(<<"Bam">>, Result). -%% -``` - -### type_test - -Group test - verifies group creation and type detection. -Link test - verifies symbolic link creation and resolution. -Type test - verifies type detection for both simple and composite entries. - -```erlang -type_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store-6">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - make_group(StoreOpts, <<"assets">>), - Type = type(StoreOpts, <<"assets">>), - ?event({type, Type}), - ?assertEqual(composite, Type), - write(StoreOpts, <<"assets/1">>, <<"bam">>), - Type2 = type(StoreOpts, <<"assets/1">>), - ?event({type2, Type2}), - ?assertEqual(simple, Type2). -``` - -### link_key_list_test - -Link key list test - verifies symbolic link creation using structured key paths. -Path traversal link test - verifies link resolution during path traversal. - -```erlang -link_key_list_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store-7">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - write(StoreOpts, [ <<"parent">>, <<"key">> ], <<"value">>), - make_link(StoreOpts, [ <<"parent">>, <<"key">> ], <<"my-link">>), - {ok, Result} = read(StoreOpts, <<"my-link">>), - ?event({result, Result}), - ?assertEqual(<<"value">>, Result). -%% -%% -``` - -### path_traversal_link_test - -Link key list test - verifies symbolic link creation using structured key paths. -Path traversal link test - verifies link resolution during path traversal. - -```erlang -path_traversal_link_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store-8">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - % Create the actual data at group/key - write(StoreOpts, [<<"group">>, <<"key">>], <<"target-value">>), - % Create a link from "link" to "group" - make_link(StoreOpts, <<"group">>, <<"link">>), - % Reading via the link path should resolve to the target value - {ok, Result} = read(StoreOpts, [<<"link">>, <<"key">>]), - ?event({path_traversal_result, Result}), - ?assertEqual(<<"target-value">>, Result), - ok = stop(StoreOpts). -``` - -### exact_hb_store_test - -Test that matches the exact hb_store hierarchical test pattern - -```erlang -exact_hb_store_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store-exact">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - % Follow exact same pattern as hb_store test - ?event(step1_make_group), - make_group(StoreOpts, <<"test-dir1">>), - ?event(step2_write_file), - write(StoreOpts, [<<"test-dir1">>, <<"test-file">>], <<"test-data">>), - ?event(step3_make_link), - make_link(StoreOpts, [<<"test-dir1">>], <<"test-link">>), - % Debug: test that the link behaves like the target (groups are unreadable) - ?event(step4_check_link), - LinkResult = read(StoreOpts, <<"test-link">>), - ?event({link_result, LinkResult}), - % Since test-dir1 is a group and groups are unreadable, the link should also be unreadable - ?assertEqual(not_found, LinkResult), - % Debug: test intermediate steps - ?event(step5_test_direct_read), - DirectResult = read(StoreOpts, <<"test-dir1/test-file">>), - ?event({direct_result, DirectResult}), - % This should work: reading via the link path - ?event(step6_test_link_read), - Result = read(StoreOpts, [<<"test-link">>, <<"test-file">>]), - ?event({final_result, Result}), - ?assertEqual({ok, <<"test-data">>}, Result), - ok = stop(StoreOpts). -``` - -### cache_style_test - -Test cache-style usage through hb_store interface - -```erlang -cache_style_test() -> - hb:init(), - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store-cache-style">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - % Start the store - hb_store:start(StoreOpts), - % Test writing through hb_store interface - ok = hb_store:write(StoreOpts, <<"test-key">>, <<"test-value">>), - % Test reading through hb_store interface - Result = hb_store:read(StoreOpts, <<"test-key">>), - ?event({cache_style_read_result, Result}), - ?assertEqual({ok, <<"test-value">>}, Result), - hb_store:stop(StoreOpts). -``` - -### nested_map_cache_test - -Test nested map storage with cache-like linking behavior - -```erlang -nested_map_cache_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store-nested-cache">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - % Clean up any previous test data - reset(StoreOpts), - % Original nested map structure - OriginalMap = #{ - <<"target">> => <<"Foo">>, - <<"commitments">> => #{ - <<"key1">> => #{ - <<"alg">> => <<"rsa-pss-512">>, - <<"committer">> => <<"unique-id">> - }, - <<"key2">> => #{ - <<"alg">> => <<"hmac">>, - <<"commiter">> => <<"unique-id-2">> - } - }, - <<"other-key">> => #{ - <<"other-key-key">> => <<"other-key-value">> - } - }, - ?event({original_map, OriginalMap}), - % Step 1: Store each leaf value at data/{hash} - TargetValue = <<"Foo">>, - TargetHash = base64:encode(crypto:hash(sha256, TargetValue)), - write(StoreOpts, <<"data/", TargetHash/binary>>, TargetValue), - AlgValue1 = <<"rsa-pss-512">>, - AlgHash1 = base64:encode(crypto:hash(sha256, AlgValue1)), - write(StoreOpts, <<"data/", AlgHash1/binary>>, AlgValue1), - CommitterValue1 = <<"unique-id">>, - CommitterHash1 = base64:encode(crypto:hash(sha256, CommitterValue1)), - write(StoreOpts, <<"data/", CommitterHash1/binary>>, CommitterValue1), - AlgValue2 = <<"hmac">>, - AlgHash2 = base64:encode(crypto:hash(sha256, AlgValue2)), - write(StoreOpts, <<"data/", AlgHash2/binary>>, AlgValue2), - CommitterValue2 = <<"unique-id-2">>, - CommitterHash2 = base64:encode(crypto:hash(sha256, CommitterValue2)), - write(StoreOpts, <<"data/", CommitterHash2/binary>>, CommitterValue2), - OtherKeyValue = <<"other-key-value">>, - OtherKeyHash = base64:encode(crypto:hash(sha256, OtherKeyValue)), - write(StoreOpts, <<"data/", OtherKeyHash/binary>>, OtherKeyValue), - % Step 2: Create the nested structure with groups and links - % Create the root group - make_group(StoreOpts, <<"root">>), - % Create links for the root level keys - make_link(StoreOpts, <<"data/", TargetHash/binary>>, <<"root/target">>), - % Create the commitments subgroup - make_group(StoreOpts, <<"root/commitments">>), - % Create the key1 subgroup within commitments - make_group(StoreOpts, <<"root/commitments/key1">>), - make_link(StoreOpts, <<"data/", AlgHash1/binary>>, <<"root/commitments/key1/alg">>), - make_link(StoreOpts, <<"data/", CommitterHash1/binary>>, <<"root/commitments/key1/committer">>), - % Create the key2 subgroup within commitments - make_group(StoreOpts, <<"root/commitments/key2">>), - make_link(StoreOpts, <<"data/", AlgHash2/binary>>, <<"root/commitments/key2/alg">>), - make_link(StoreOpts, <<"data/", CommitterHash2/binary>>, <<"root/commitments/key2/commiter">>), - % Create the other-key subgroup - make_group(StoreOpts, <<"root/other-key">>), - make_link(StoreOpts, <<"data/", OtherKeyHash/binary>>, <<"root/other-key/other-key-key">>), - % Step 3: Test reading the structure back - % Verify the root is a composite - ?assertEqual(composite, type(StoreOpts, <<"root">>)), - % List the root contents - {ok, RootKeys} = list(StoreOpts, <<"root">>), - ?event({root_keys, RootKeys}), - ExpectedRootKeys = [<<"commitments">>, <<"other-key">>, <<"target">>], - ?assert(lists:all(fun(Key) -> lists:member(Key, ExpectedRootKeys) end, RootKeys)), - % Read the target directly - {ok, TargetValueRead} = read(StoreOpts, <<"root/target">>), - ?assertEqual(<<"Foo">>, TargetValueRead), - % Verify commitments is a composite - ?assertEqual(composite, type(StoreOpts, <<"root/commitments">>)), - % Verify other-key is a composite - ?assertEqual(composite, type(StoreOpts, <<"root/other-key">>)), - % Step 4: Test programmatic reconstruction of the nested map - ReconstructedMap = reconstruct_map(StoreOpts, <<"root">>), - ?event({reconstructed_map, ReconstructedMap}), - % Verify the reconstructed map matches the original structure - ?assert(hb_message:match(OriginalMap, ReconstructedMap)), - stop(StoreOpts). -``` - -### reconstruct_map - -```erlang -reconstruct_map(StoreOpts, Path) -> - case type(StoreOpts, Path) of - composite -> - % This is a group, reconstruct it as a map - {ok, ImmediateChildren} = list(StoreOpts, Path), - % The list function now correctly returns only immediate children - ?event({path, Path, immediate_children, ImmediateChildren}), - maps:from_list([ - {Key, reconstruct_map(StoreOpts, <>)} - || Key <- ImmediateChildren - ]); - simple -> - % This is a simple value, read it directly - {ok, Value} = read(StoreOpts, Path), - Value; - not_found -> - % Path doesn't exist - undefined - end. -``` - -### cache_debug_test - -Debug test to understand cache linking behavior - -```erlang -cache_debug_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/cache-debug">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - % Simulate what the cache does: - % 1. Create a group for message ID - MessageID = <<"test_message_123">>, - make_group(StoreOpts, MessageID), - % 2. Store a value at data/hash - Value = <<"test_value">>, - ValueHash = base64:encode(crypto:hash(sha256, Value)), - DataPath = <<"data/", ValueHash/binary>>, - write(StoreOpts, DataPath, Value), - % 3. Calculate a key hashpath (simplified version) - KeyHashPath = <>, - % 4. Create link from data path to key hash path - make_link(StoreOpts, DataPath, KeyHashPath), - % 5. Test what the cache would see: - ?event(debug_cache_test, {step, check_message_type}), - MsgType = type(StoreOpts, MessageID), - ?event(debug_cache_test, {message_type, MsgType}), - ?event(debug_cache_test, {step, list_message_contents}), - {ok, Subkeys} = list(StoreOpts, MessageID), - ?event(debug_cache_test, {message_subkeys, Subkeys}), - ?event(debug_cache_test, {step, read_key_hashpath}), - KeyHashResult = read(StoreOpts, KeyHashPath), - ?event(debug_cache_test, {key_hash_read_result, KeyHashResult}), - % 6. Test with path as list (what cache does): - ?event(debug_cache_test, {step, read_path_as_list}), - PathAsList = [MessageID, <<"key_hash_abc">>], - PathAsListResult = read(StoreOpts, PathAsList), - ?event(debug_cache_test, {path_as_list_result, PathAsListResult}), - stop(StoreOpts). -``` - -### isolated_type_debug_test - -Isolated test focusing on the exact cache issue - -```erlang -isolated_type_debug_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/isolated-debug">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - % Create the exact scenario from user's description: - % 1. A message ID with nested structure - MessageID = <<"message123">>, - make_group(StoreOpts, MessageID), - % 2. Create nested groups for "commitments" and "other-test-key" - CommitmentsPath = <>, - OtherKeyPath = <>, - ?event(isolated_debug, {creating_nested_groups, CommitmentsPath, OtherKeyPath}), - make_group(StoreOpts, CommitmentsPath), - make_group(StoreOpts, OtherKeyPath), - % 3. Add some actual data within those groups - write(StoreOpts, <>, <<"signature_data_1">>), - write(StoreOpts, <>, <<"nested_value">>), - % 4. Test type detection on the nested paths - ?event(isolated_debug, {testing_main_message_type}), - MainType = type(StoreOpts, MessageID), - ?event(isolated_debug, {main_message_type, MainType}), - ?event(isolated_debug, {testing_commitments_type}), - CommitmentsType = type(StoreOpts, CommitmentsPath), - ?event(isolated_debug, {commitments_type, CommitmentsType}), - ?event(isolated_debug, {testing_other_key_type}), - OtherKeyType = type(StoreOpts, OtherKeyPath), - ?event(isolated_debug, {other_key_type, OtherKeyType}), - % 5. Test what happens when reading these nested paths - ?event(isolated_debug, {reading_commitments_directly}), - CommitmentsResult = read(StoreOpts, CommitmentsPath), - ?event(isolated_debug, {commitments_read_result, CommitmentsResult}), - ?event(isolated_debug, {reading_other_key_directly}), - OtherKeyResult = read(StoreOpts, OtherKeyPath), - ?event(isolated_debug, {other_key_read_result, OtherKeyResult}), - stop(StoreOpts). -``` - -### list_with_link_test - -Test that list function resolves links correctly - -```erlang -list_with_link_test() -> - StoreOpts = #{ - <<"store-module">> => ?MODULE, - <<"name">> => <<"/tmp/store-list-link">>, - <<"capacity">> => ?DEFAULT_SIZE - }, - reset(StoreOpts), - % Create a group with some children - make_group(StoreOpts, <<"real-group">>), - write(StoreOpts, <<"real-group/child1">>, <<"value1">>), - write(StoreOpts, <<"real-group/child2">>, <<"value2">>), - write(StoreOpts, <<"real-group/child3">>, <<"value3">>), - % Create a link to the group - make_link(StoreOpts, <<"real-group">>, <<"link-to-group">>), - % List the real group to verify expected children - {ok, RealGroupChildren} = list(StoreOpts, <<"real-group">>), - ?event({real_group_children, RealGroupChildren}), - ExpectedChildren = [<<"child1">>, <<"child2">>, <<"child3">>], - ?assertEqual(ExpectedChildren, lists:sort(RealGroupChildren)), - % List via the link - should return the same children - {ok, LinkChildren} = list(StoreOpts, <<"link-to-group">>), - ?event({link_children, LinkChildren}), - ?assertEqual(ExpectedChildren, lists:sort(LinkChildren)), -``` - ---- - -*Generated from [hb_store_lmdb.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_lmdb.erl)* diff --git a/docs/book/src/hb_store_lru.erl.md b/docs/book/src/hb_store_lru.erl.md deleted file mode 100644 index be7bb3405..000000000 --- a/docs/book/src/hb_store_lru.erl.md +++ /dev/null @@ -1,1078 +0,0 @@ -# hb_store_lru - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_lru.erl) - -An in-memory store implementation, following the `hb_store` behavior -and interface. This implementation uses a least-recently-used cache first, -and offloads evicted data to a specified non-volatile store over time. -This cache is registered under `{in_memory, HTTPServerID}`, in `hb_name` -so that all processes that are executing using the HTTP server’s Opts -can find it quickly. -The least-recently-used strategy (first is the most recent used, last is the -least recently used) is implemented by keeping track of the order and bytes - on ets tables: -- A cache table containing all the entries along with the value size and - key index. -- A cache indexing table containing all the index pointing to the keys. The - IDs are then sorted to ease the eviction policy. -- A cache statistics table containing all the information about the cache - size, capacity, and indexing. - ---- - -## Exported Functions - -- `list/2` -- `make_group/2` -- `make_link/3` -- `read/2` -- `reset/1` -- `resolve/2` -- `scope/1` -- `start/1` -- `stop/1` -- `type/2` -- `write/3` - ---- - -### start - -An in-memory store implementation, following the `hb_store` behavior -The default capacity is used when no capacity is provided in the store -Maximum number of retries when fetching cache entries that aren't -Start the LRU cache. - -```erlang -start(StoreOpts = #{ <<"name">> := Name }) -> - ?event(cache_lru, {starting_lru_server, Name}), - From = self(), - spawn( - fun() -> - State = init(From, StoreOpts), - server_loop(State, StoreOpts) - end - ), - receive - {ok, InstanceMessage} -> {ok, InstanceMessage} - end. -``` - -### init - -Create the `ets` tables for the LRU cache: - -```erlang -init(From, StoreOpts) -> - % Start the persistent store. -``` - -### stop - -Stop the LRU in memory by offloading the keys in the ETS tables - -```erlang -stop(Opts) -> - ?event(cache_lru, {stopping_lru_server, Opts}), - #{ <<"pid">> := CacheServer } = hb_store:find(Opts), - CacheServer ! {stop, self(), Ref = make_ref()}, - receive - {ok, Ref} -> ok - end. -``` - -### scope - -The LRU store is always local, for now. -Reset the store by completely cleaning the ETS tables and - -```erlang -scope(_) -> local. -``` - -### reset - -The LRU store is always local, for now. -Reset the store by completely cleaning the ETS tables and - -```erlang -reset(Opts) -> - #{ <<"pid">> := CacheServer } = hb_store:find(Opts), - CacheServer ! {reset, self(), Ref = make_ref()}, - receive - {ok, Ref} -> - ?event({reset_store, {in_memory, CacheServer}}), - case get_persistent_store(Opts) of - no_store -> - ok; - Store -> - hb_store:reset(Store) - end - end. -``` - -### server_loop - -```erlang -server_loop(State = - #{cache_table := CacheTable, - stats_table := StatsTable, - index_table := IndexTable}, - Opts) -> - receive - {sync, From} -> - From ! {ok, self()}, - server_loop(State, Opts); - {get_cache_table, From} -> - From ! CacheTable; - {put, Key, Value, From, Ref} -> - put_cache_entry(State, Key, Value, Opts), - ?event(debug_lru, {put, {key, Key}, {value, Value}}), - From ! {ok, Ref}; - {link, Existing, New, From, Ref} -> - link_cache_entry(State, Existing, New, Opts), - From ! {ok, Ref}; - {make_group, Key, From, Ref} -> - ?event(debug_lru, {make_group, Key}), - ensure_dir(State, Key), - From ! {ok, Ref}; - {update_recent, Key, Entry, From, Ref} -> - update_recently_used(State, Key, Entry), - From ! {ok, Ref}; - {reset, From, Ref} -> - ets:delete_all_objects(CacheTable), - ets:delete_all_objects(StatsTable), - ets:delete_all_objects(IndexTable), - From ! {ok, Ref}; - {stop, From, Ref} -> - evict_all_entries(State, Opts), - From ! {ok, Ref}, - exit(self(), ok) - end, - server_loop(State, Opts). -``` - -### sync - -Force the caller to wait until the server has fully processed all - -```erlang -sync(Server) -> - Server ! {sync, self()}, - receive - {ok, Server} -> ok - end. -``` - -### write - -Write an entry in the cache. - -```erlang -write(Opts, RawKey, Value) -> - Key = hb_store:join(RawKey), - #{ <<"pid">> := CacheServer } = hb_store:find(Opts), - CacheServer ! {put, Key, Value, self(), Ref = make_ref()}, - receive - {ok, Ref} -> ok - end. -``` - -### read - -Retrieve value in the cache from the given key. - -```erlang -read(Opts, RawKey) -> - #{ <<"pid">> := Server } = hb_store:find(Opts), - Key = resolve(Opts, RawKey), - case fetch_cache_with_retry(Opts, Key) of - nil -> - case get_persistent_store(Opts) of - no_store -> - not_found; - PersistentStore -> - % FIXME: It might happens some links can be in LRU while data on - % the permanent store and resolve doesn't produce the same key. -``` - -### resolve - -```erlang -resolve(Opts, Key) -> - Res = resolve(Opts, "", hb_path:term_to_path_parts(hb_store:join(Key), Opts)), - ?event({resolved, Key, Res}), - Res. -``` - -### resolve - -```erlang -resolve(_, CurrPath, []) -> - hb_store:join(CurrPath); -``` - -### resolve - -```erlang -resolve(Opts, CurrPath, [Next|Rest]) -> - PathPart = hb_store:join([CurrPath, Next]), - ?event( - {resolving, - {accumulated_path, CurrPath}, - {next_segment, Next}, - {generated_partial_path_to_test, PathPart} - } - ), - case fetch_cache_with_retry(Opts, PathPart) of - {link, Link} -> - resolve(Opts, Link, Rest); - _ -> - resolve(Opts, PathPart, Rest) - end. -``` - -### make_link - -Make a link from a key to another in the store. - -```erlang -make_link(_, Link, Link) -> - ok; -``` - -### make_link - -Make a link from a key to another in the store. - -```erlang -make_link(Opts, RawExisting, New) -> - #{ <<"pid">> := Server } = hb_store:find(Opts), - ExistingKeyBin = convert_if_list(RawExisting), - NewKeyBin = convert_if_list(New), - case fetch_cache_with_retry(Opts, ExistingKeyBin) of - nil -> - case get_persistent_store(Opts) of - no_store -> - not_found; - Store -> - hb_store:make_link(Store, ExistingKeyBin, NewKeyBin) - end; - _ -> - Server ! {link, ExistingKeyBin, NewKeyBin, self(), Ref = make_ref()}, - receive - {ok, Ref} -> - ok - end - end. -``` - -### list - -List all the keys registered. - -```erlang -list(Opts, Path) -> - PersistentKeys = - case get_persistent_store(Opts) of - no_store -> - not_found; - Store -> - ResolvedPath = hb_store:resolve(Store, Path), - case hb_store:list(Store, ResolvedPath) of - {ok, Keys} -> Keys; - not_found -> not_found - end - end, - case {ets_keys(Opts, Path), PersistentKeys} of - {not_found, not_found} -> - not_found; - {InMemoryKeys, not_found} -> - {ok, InMemoryKeys}; - {not_found, PersistentKeys} -> - {ok, PersistentKeys}; - {InMemoryKeys, PersistentKeys} -> - {ok, hb_util:unique(InMemoryKeys ++ PersistentKeys)} - end. -``` - -### ets_keys - -List all of the keys in the store for a given path, supporting a special - -```erlang -ets_keys(Opts, <<"">>) -> ets_keys(Opts, <<"/">>); -``` - -### ets_keys - -List all of the keys in the store for a given path, supporting a special - -```erlang -ets_keys(Opts, <<"/">>) -> - #{ <<"cache-table">> := Table } = hb_store:find(Opts), - table_keys(Table, undefined); -``` - -### ets_keys - -List all of the keys in the store for a given path, supporting a special - -```erlang -ets_keys(Opts, Path) -> - case fetch_cache_with_retry(Opts, Path) of - {group, Set} -> - sets:to_list(Set); - {link, Link} -> - list(Opts, Link); - {raw, #{value := Value}} when is_map(Value) -> - maps:keys(Value); - {raw, #{value := Value}} when is_list(Value) -> - Value; - nil -> - not_found - end. -``` - -### type - -Determine the type of a key in the store. - -```erlang -type(Opts, Key) -> - case fetch_cache_with_retry(Opts, Key) of - nil -> - case get_persistent_store(Opts) of - no_store -> - not_found; - Store -> - ResolvedKey = hb_store:resolve(Store, Key), - hb_store:type(Store, ResolvedKey) - end; - {raw, _} -> - simple; - {link, NewKey} -> - type(Opts, NewKey); - {group, _Item} -> - composite - end. -``` - -### make_group - -Create a directory inside the store. - -```erlang -make_group(Opts, Key) -> - #{ <<"pid">> := Server } = hb_store:find(Opts), - Server ! {make_group, Key, self(), Ref = make_ref()}, - receive - {ok, Ref} -> - ok - end. -``` - -### table_keys - -```erlang -table_keys(TableName) -> - table_keys(TableName, undefined). -``` - -### table_keys - -```erlang -table_keys(TableName, Prefix) -> - FirstKey = ets:first(TableName), - table_keys(TableName, FirstKey, Prefix, []). -``` - -### table_keys - -```erlang -table_keys(_TableName, '$end_of_table', _Prefix, Acc) -> - Acc; -``` - -### table_keys - -```erlang -table_keys(TableName, CurrentKey, Prefix, Acc) -> - NextKey = ets:next(TableName, CurrentKey), - case Prefix of - undefined -> - table_keys(TableName, NextKey, Prefix, [CurrentKey | Acc]); - _ -> - PrefixParts = hb_path:term_to_path_parts(Prefix), - Key = hb_path:term_to_path_parts(CurrentKey), - case lists:prefix(PrefixParts, Key) of - true -> - Extracted = lists:nthtail(length(PrefixParts), Key), - table_keys( - TableName, - NextKey, - Prefix, - [hb_path:to_binary(Extracted) | Acc] - ); - false -> - table_keys(TableName, NextKey, Prefix, Acc) - end - end. -``` - -### get_cache_entry - -```erlang -get_cache_entry(#{cache_table := Table}, Key) -> - get_cache_entry(Table, Key); -``` - -### get_cache_entry - -```erlang -get_cache_entry(Table, Key) -> - case ets:lookup(Table, Key) of - [] -> - nil; - [{_, Entry}] -> - Entry - end. -``` - -### fetch_cache_with_retry - -```erlang -fetch_cache_with_retry(Opts, Key) -> - fetch_cache_with_retry(Opts, Key, 1). -``` - -### fetch_cache_with_retry - -```erlang -fetch_cache_with_retry(Opts, Key, Retries) -> - #{<<"cache-table">> := Table, <<"pid">> := Server} = hb_store:find(Opts), - case get_cache_entry(Table, Key) of - nil -> - case Retries < ?RETRY_THRESHOLD of - true -> - sync(Server), - fetch_cache_with_retry(Opts, Key, Retries + 1); - false -> - nil - end; - Entry -> - Entry - end. -``` - -### put_cache_entry - -```erlang -put_cache_entry(State, Key, Value, Opts) -> - ValueSize = erlang:external_size(Value), - CacheSize = cache_size(State), - ?event(cache_lru, {putting_entry, {size, ValueSize}, {opts, Opts}, {cache_size, CacheSize}}), - Capacity = hb_maps:get(<<"capacity">>, Opts, ?DEFAULT_LRU_CAPACITY), - case get_cache_entry(State, Key) of - nil -> - % For new entries, we check if the size will the fit the full - % capacity (even by evicting keys). -``` - -### handle_group - -```erlang -handle_group(State, Key, Opts) -> - case filename:dirname(hb_store:join(Key)) of - <<".">> -> undefined ; - BaseDir -> - case maps:get(mode, Opts, undefined) of - offload -> - Store = get_persistent_store(Opts), - ?event(cache_lru, {create_group, BaseDir}), - hb_store:make_group(Store, BaseDir), - BaseDir; - undefined -> - ensure_dir(State, BaseDir), - {group, Entry} = get_cache_entry(State, BaseDir), - BaseName = filename:basename(Key), - NewGroup = append_key_to_group(BaseName, Entry), - add_cache_entry(State, BaseDir, {group, NewGroup}), - BaseDir - end - end. -``` - -### ensure_dir - -```erlang -ensure_dir(State, Path) -> - PathParts = hb_path:term_to_path_parts(Path), - [First | Rest] = PathParts, - Result = ensure_dir(State, First, Rest), - Result. -``` - -### ensure_dir - -```erlang -ensure_dir(State, CurrentPath, []) -> - maybe_create_dir(State, CurrentPath, nil); -``` - -### ensure_dir - -```erlang -ensure_dir(State, CurrentPath, [Next]) -> - maybe_create_dir(State, CurrentPath, Next), - ensure_dir(State, hb_store:join([CurrentPath, Next]), []); -``` - -### ensure_dir - -```erlang -ensure_dir(State, CurrentPath, [Next | Rest]) -> - maybe_create_dir(State, CurrentPath, Next), - ensure_dir(State, hb_store:join([CurrentPath, Next]), Rest). -``` - -### maybe_create_dir - -```erlang -maybe_create_dir(State, DirPath, Value) -> - CurrentValueSet = - case get_cache_entry(State, DirPath) of - nil -> - sets:new(); - {group, CurrentValue} -> - CurrentValue - end, - NewValueSet = - case Value of - nil -> - CurrentValueSet; - _ -> - sets:add_element(Value, CurrentValueSet) - end, - ?event(cache_lru, {create_group, DirPath, sets:to_list(NewValueSet)}), - add_cache_entry(State, DirPath, {group, NewValueSet}). -``` - -### append_key_to_group - -```erlang -append_key_to_group(Key, Group) -> - BaseName = filename:basename(Key), - sets:add_element(BaseName, Group). -``` - -### assign_new_entry - -```erlang -assign_new_entry(State, Key, Value, ValueSize, Capacity, Group, Opts) -> - case cache_size(State) + ValueSize >= Capacity of - true -> - ?event(cache_lru, eviction_required), - evict_oldest_entry(State, ValueSize, Opts); - false -> - ok - end, - ID = get_index_id(State), - add_cache_index(State, ID, Key), - add_cache_entry( - State, - Key, - {raw, - #{ - value => Value, - id => ID, - size => ValueSize, - group => Group - } - } - ), - increase_cache_size(State, ValueSize). -``` - -### cache_size - -```erlang -cache_size(#{stats_table := Table}) -> - case ets:lookup(Table, size) of - [{_, Size}] -> - Size; - _ -> - 0 - end. -``` - -### get_index_id - -```erlang -get_index_id(#{stats_table := StatsTable}) -> - ets:update_counter(StatsTable, id, {2, 1}, {0, 0}). -``` - -### add_cache_entry - -```erlang -add_cache_entry(#{cache_table := Table}, Key, Value) -> - ets:insert(Table, {Key, Value}). -``` - -### add_cache_index - -```erlang -add_cache_index(#{index_table := Table}, ID, Key) -> - ets:insert(Table, {ID, Key}). -``` - -### link_cache_entry - -```erlang -link_cache_entry(State = #{cache_table := Table}, Existing, New, Opts) -> - ?event(cache_lru, {link, Existing, New}), - % Remove the link from the previous linked entry - clean_old_link(Table, New), - _ = handle_group(State, New, Opts), - ets:insert(Table, {New, {link, Existing}}), - % Add links to the linked entry - case ets:lookup(Table, Existing) of - [{_, {raw, Entry}}] -> - NewLinks = - case Entry of - #{links := ExistingLinks} -> - [New | ExistingLinks]; - _ -> - [New] - end, - ets:insert(Table, {Existing, {raw, Entry#{links => NewLinks}}}); - _ -> - ignore - end. -``` - -### clean_old_link - -Remove the link association for the the old linked data to the given key - -```erlang -clean_old_link(Table, Link) -> - case ets:lookup(Table, Link) of - [{_, {link, PreviousEntry}}] -> - ?event(cache_lru, {removing_previous_link, - {link, Link}, - {previous_entry, PreviousEntry} - }), - case ets:lookup(Table, PreviousEntry) of - [{_, {raw, OldEntry}}] -> - Links = sets:from_list(maps:get(links, OldEntry, [])), - UpdatedLinks = sets:del_element(Link, Links), - UpdatedEntry = maps:put( - links, - sets:to_list(UpdatedLinks), - OldEntry - ), - ets:insert(Table, {PreviousEntry, {raw, UpdatedEntry}}); - _ -> - skip - end; - _ -> skip - end. -``` - -### increase_cache_size - -```erlang -increase_cache_size(#{stats_table := StatsTable}, ValueSize) -> - ets:update_counter(StatsTable, size, {2, ValueSize}, {0, 0}). -``` - -### evict_oldest_entry - -```erlang -evict_oldest_entry(State, ValueSize, Opts) -> - evict_oldest_entry(State, ValueSize, 0, Opts). -``` - -### evict_oldest_entry - -```erlang -evict_oldest_entry(_State, ValueSize, FreeSize, _Opts) when FreeSize >= ValueSize -> - ok; -``` - -### evict_oldest_entry - -```erlang -evict_oldest_entry(State, ValueSize, FreeSize, Opts) -> - case cache_tail_key(State) of - nil -> - ok; - TailKey -> - Entry = #{ - size := ReclaimedSize, - id := ID, - value := TailValue, - group := Group - } = case get_cache_entry(State, TailKey) of - nil -> - % Raises a runtime error as this represents - % a non-recoverable error. This would signifies a - % inconsistency between the index and the cache table. -``` - -### evict_all_entries - -```erlang -evict_all_entries(#{cache_table := Table}, Opts) -> - lists:foreach( - fun(Key) -> - [{_, {raw, Entry}}] = ets:lookup(Table, Key), - #{ value := Value, group := Group } = Entry, - Links = maps:get(links, Entry, []), - offload_to_store(Key, Value, Links, Group, Opts) - end, - table_keys(Table) - ). -``` - -### offload_to_store - -```erlang -offload_to_store(TailKey, TailValue, Links, Group, Opts) -> - ?event(lru_offload, {offloading_to_store, Opts}), - FoundStore = get_persistent_store(Opts), - ?event(lru_offload, {found_store, FoundStore}), - case FoundStore of - no_store -> - ok; - Store -> - case Group of - undefined -> - ignore; - _ -> - hb_store:make_group(Store, Group) - end, - case hb_store:write(Store, TailKey, TailValue) of - ok -> - lists:foreach( - fun(Link) -> - ResolvedPath = resolve(Opts, Link), - hb_store:make_link(Store, ResolvedPath, Link) - end, - Links - ), - ?event(cache_lru, {offloaded_key, TailKey}), - ok; - Err -> - ?event(warning, {error_offloading_to_local_cache, Err}), - {error, Err} - end - end. -``` - -### cache_tail_key - -```erlang -cache_tail_key(#{index_table := Table}) -> - case ets:first(Table) of - '$end_of_table' -> - nil; - FirstID -> - [{_, Key}] = ets:lookup(Table, FirstID), - Key - end. -``` - -### delete_cache_index - -```erlang -delete_cache_index(#{index_table := IndexTable}, ID) -> - ets:delete(IndexTable, ID). -``` - -### delete_cache_entry - -```erlang -delete_cache_entry(#{cache_table := Table}, Key) -> - ets:delete(Table, Key), - ?event(cache_lru, {deleted, Key}). -``` - -### decrease_cache_size - -```erlang -decrease_cache_size(#{stats_table := Table}, Size) -> - ets:update_counter(Table, size, {2, -Size, 0, 0}). -``` - -### replace_entry - -```erlang -replace_entry(State, Key, Value, ValueSize, {raw, OldEntry = #{ value := OldValue}}) when Value =/= OldValue -> - % Update entry and move the keys in the front of the cache - % as the most used Key - ?event(debug_lru, {replace_entry, - {key, Key}, - {value, Value}, - {explicit, OldEntry} - }), - #{size := PreviousSize} = OldEntry, - NewEntry = OldEntry#{value := Value, size := ValueSize}, - add_cache_entry(State, Key, {raw, NewEntry}), - update_recently_used(State, Key, NewEntry), - update_cache_size(State, PreviousSize, ValueSize); -``` - -### replace_entry - -```erlang -replace_entry(_State, _Key, _Value, _ValueSize, {raw, _}) -> ok; -``` - -### replace_entry - -```erlang -replace_entry(_State, _Key, _Value, _ValueSize, {Type, _}) -> - % Link or group should be handle directly with `make_link` or `make_group` - % This aim of this function is to be used along with direct data insertion. -``` - -### update_recently_used - -```erlang -update_recently_used(State, Key, Entry) -> - % Acquire a new ID - NewID = get_index_id(State), - % Update the entry's ID - add_cache_entry(State, Key, {raw, Entry#{id := NewID}}), - #{id := PreviousID} = Entry, - % Delete previous ID to priorize the new NewID - delete_cache_index(State, PreviousID), - add_cache_index(State, NewID, Key). -``` - -### update_cache_size - -```erlang -update_cache_size(#{stats_table := Table}, PreviousSize, NewSize) -> - ets:update_counter(Table, size, [{2, -PreviousSize}, {2, NewSize}]). -``` - -### get_persistent_store - -```erlang -get_persistent_store(Opts) -> - hb_maps:get( - <<"persistent-store">>, - Opts, - no_store - ). -``` - -### convert_if_list - -```erlang -convert_if_list(Value) when is_list(Value) -> - join(Value); % Perform the conversion if it's a list -``` - -### convert_if_list - -```erlang -convert_if_list(Value) -> - Value. -``` - -### join - -```erlang -join(Key) when is_list(Key) -> - KeyList = hb_store:join(Key), - maybe_convert_to_binary(KeyList); -``` - -### join - -```erlang -join(Key) when is_binary(Key) -> Key. -``` - -### maybe_convert_to_binary - -```erlang -maybe_convert_to_binary(Value) when is_list(Value) -> - list_to_binary(Value); -``` - -### maybe_convert_to_binary - -```erlang -maybe_convert_to_binary(Value) when is_binary(Value) -> - Value. -``` - -### test_opts - -Generate a set of options for testing. The default is to use an `fs` - -```erlang -test_opts(PersistentStore) -> - test_opts(PersistentStore, 1000000). -``` - -### test_opts - -```erlang -test_opts(PersistentStore, Capacity) -> - % Set the server ID to a random address. -``` - -### unknown_value_test - -```erlang -unknown_value_test() -> - ?assertEqual(not_found, read(test_opts(default), <<"key1">>)). -``` - -### cache_term_test - -```erlang -cache_term_test() -> - StoreOpts = test_opts(default), - write(StoreOpts, <<"key1">>, <<"Hello">>), - ?assertEqual({ok, <<"Hello">>}, read(StoreOpts, <<"key1">>)). -``` - -### evict_oldest_items_test - -```erlang -evict_oldest_items_test() -> - StoreOpts = test_opts(no_store, 500), - Binary = crypto:strong_rand_bytes(200), - write(StoreOpts, <<"key1">>, Binary), - write(StoreOpts, <<"key2">>, Binary), - read(StoreOpts, <<"key1">>), - write(StoreOpts, <<"key3">>, Binary), - ?assertEqual({ok, Binary}, read(StoreOpts, <<"key1">>)), - ?assertEqual(not_found, read(StoreOpts, <<"key2">>)). -``` - -### evict_items_with_insufficient_space_test - -```erlang -evict_items_with_insufficient_space_test() -> - StoreOpts = test_opts(no_store, 500), - Binary = crypto:strong_rand_bytes(200), - write(StoreOpts, <<"key1">>, Binary), - write(StoreOpts, <<"key2">>, Binary), - write(StoreOpts, <<"key3">>, crypto:strong_rand_bytes(400)), - ?assertEqual(not_found, read(StoreOpts, <<"key1">>)), - ?assertEqual(not_found, read(StoreOpts, <<"key2">>)). -``` - -### evict_but_able_to_read_from_fs_store_test - -```erlang -evict_but_able_to_read_from_fs_store_test() -> - StoreOpts = test_opts(default, 500), - Binary = crypto:strong_rand_bytes(200), - write(StoreOpts, <<"key1">>, Binary), - write(StoreOpts, <<"key2">>, Binary), - read(StoreOpts, <<"key1">>), - write(StoreOpts, <<"key3">>, Binary), - ?assertEqual({ok, Binary}, read(StoreOpts, <<"key1">>)), - ?assertEqual({ok, Binary}, read(StoreOpts, <<"key2">>)), - % Directly offloads if the data is more than the LRU capacity - write(StoreOpts, <<"sub/key">>, crypto:strong_rand_bytes(600)), - ?assertMatch({ok, _}, read(StoreOpts, <<"sub/key">>)). -``` - -### stop_test - -```erlang -stop_test() -> - StoreOpts = test_opts(default, 500), - Binary = crypto:strong_rand_bytes(200), - write(StoreOpts, <<"key1">>, Binary), - write(StoreOpts, <<"key2">>, Binary), - #{ <<"pid">> := ServerPID } = hb_store:find(StoreOpts), - ok = stop(StoreOpts), - ?assertEqual(false, is_process_alive(ServerPID)), - PersistentStore = hb_maps:get(<<"persistent-store">>, StoreOpts), - ?assertEqual({ok, Binary}, hb_store:read(PersistentStore, <<"key1">>)), - ?assertEqual({ok, Binary}, hb_store:read(PersistentStore, <<"key2">>)). -``` - -### reset_test - -```erlang -reset_test() -> - StoreOpts = test_opts(default), - write(StoreOpts, <<"key1">>, <<"Hello">>), - write(StoreOpts, <<"key2">>, <<"Hi">>), - reset(StoreOpts), - ?assertEqual(not_found, read(StoreOpts, <<"key1">>)), - #{ <<"cache-table">> := Table } = hb_store:find(StoreOpts), - ?assertEqual([], ets:tab2list(Table)). -``` - -### list_test - -```erlang -list_test() -> - StoreOpts = test_opts(default, 500), - Binary = crypto:strong_rand_bytes(200), - make_group(StoreOpts, <<"sub">>), - write(StoreOpts, <<"hello">>, <<"world">>), - write(StoreOpts, <<"sub/key1">>, Binary), - write(StoreOpts, <<"sub/key2">>, Binary), - {ok, Keys1} = list(StoreOpts, <<"sub">>), - ?assertEqual([<<"key1">>, <<"key2">>], lists:sort(Keys1)), - write(StoreOpts, <<"sub/key3">>, Binary), - {ok, Keys2} = list(StoreOpts, <<"sub">>), - ?assertEqual( - [<<"key1">>, <<"key2">>, <<"key3">>], - lists:sort(Keys2) - ), - write(StoreOpts, <<"sub/inner/key1">>, Binary), - {ok, Keys3} = list(StoreOpts, <<"sub">>), - ?assertEqual([<<"inner">>, <<"key1">>, <<"key2">>, <<"key3">>], - lists:sort(Keys3)), - write(StoreOpts, <<"complex">>, #{<<"a">> => 10, <<"b">> => Binary}), - ?assertEqual({ok, [<<"a">>, <<"b">>]}, list(StoreOpts, <<"complex">>)). -``` - -### type_test - -```erlang -type_test() -> - StoreOpts = test_opts(default, 500), - Binary = crypto:strong_rand_bytes(200), - write(StoreOpts, <<"key1">>, Binary), - ?assertEqual(simple, type(StoreOpts, <<"key1">>)), - write(StoreOpts, <<"sub/key1">>, Binary), - ?assertEqual(composite, type(StoreOpts, <<"sub">>)), - make_link(StoreOpts, <<"key1">>, <<"keylink">>), - ?assertEqual(simple, type(StoreOpts, <<"keylink">>)). -``` - -### replace_link_test - -```erlang -replace_link_test() -> - StoreOpts = test_opts(default), - write(StoreOpts, <<"key1">>, <<"Hello">>), - make_link(StoreOpts, <<"key1">>, <<"keylink">>), - ?assertEqual({ok, <<"Hello">>}, read(StoreOpts, <<"keylink">>)), - write(StoreOpts, <<"key2">>, <<"Hello2">>), - make_link(StoreOpts, <<"key2">>, <<"keylink">>), - ?assertEqual({ok, <<"Hello2">>}, read(StoreOpts, <<"keylink">>)), - #{ <<"cache-table">> := Table } = hb_store:find(StoreOpts), - {raw, #{links := Links }}= get_cache_entry(Table, <<"key1">>), -``` - ---- - -*Generated from [hb_store_lru.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_lru.erl)* diff --git a/docs/book/src/hb_store_opts.erl.md b/docs/book/src/hb_store_opts.erl.md deleted file mode 100644 index 4a13ee2aa..000000000 --- a/docs/book/src/hb_store_opts.erl.md +++ /dev/null @@ -1,316 +0,0 @@ -# hb_store_opts - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_opts.erl) - -A module responsible for applying default configuration to store options. -This module takes store options and store defaults and returns a new list -of stores with default properties applied based on the store-module type. -Supports recursive application to nested store configurations. - ---- - -## Exported Functions - -- `apply/2` - ---- - -### apply - -A module responsible for applying default configuration to store options. -Apply store defaults to store options. - -```erlang -apply(StoreOpts, Defaults) when is_list(StoreOpts), is_map(Defaults) -> - lists:map( - fun(StoreOpt) -> - apply_defaults_to_store(StoreOpt, Defaults) - end, - StoreOpts - ). -``` - -### apply_defaults_to_store - -Apply defaults to a single store configuration. - -```erlang -apply_defaults_to_store(StoreOpt, Defaults) when is_map(StoreOpt), is_map(Defaults) -> - UpdatedStore = apply_defaults_by_module_type(StoreOpt, Defaults), - apply_defaults_to_substores(UpdatedStore, Defaults). -``` - -### apply_defaults_by_module_type - -Apply defaults based on store-module. - -```erlang -apply_defaults_by_module_type(StoreOpt, Defaults) -> - case maps:get(<<"store-module">>, StoreOpt, undefined) of - hb_store_lmdb -> - apply_type_defaults(StoreOpt, <<"lmdb">>, Defaults); - hb_store_fs -> - apply_type_defaults(StoreOpt, <<"fs">>, Defaults); - hb_store_rocksdb -> - apply_type_defaults(StoreOpt, <<"rocksdb">>, Defaults); - hb_store_gateway -> - apply_type_defaults(StoreOpt, <<"gateway">>, Defaults); - _ -> - StoreOpt - end. -``` - -### apply_type_defaults - -Apply type-specific defaults to a store. - -```erlang -apply_type_defaults(StoreOpt, TypeKey, Defaults) -> - case maps:get(TypeKey, Defaults, #{}) of - TypeDefaults when is_map(TypeDefaults) -> - maps:merge(TypeDefaults, StoreOpt); - _ -> - StoreOpt - end. -``` - -### apply_defaults_to_substores - -Apply defaults to sub-stores recursively. - -```erlang -apply_defaults_to_substores(StoreOpt, Defaults) -> - case maps:get(<<"store">>, StoreOpt, undefined) of - SubStores when is_list(SubStores) -> - UpdatedSubStores = - lists:map( - fun(SubStore) -> - apply_defaults_to_store(SubStore, Defaults) - end, - SubStores - ), - maps:put(<<"store">>, UpdatedSubStores, StoreOpt); - _ -> - StoreOpt - end. -``` - -### basic_apply_test - -```erlang -basic_apply_test() -> - StoreOpts = - [ - #{ - <<"name">> => <<"cache-mainnet/lmdb">>, - <<"store-module">> => hb_store_lmdb - } - ], - Defaults = - #{ - <<"lmdb">> => #{ - <<"capacity">> => 1073741824 - } - }, - Expected = - [ - #{ - <<"name">> => <<"cache-mainnet/lmdb">>, - <<"store-module">> => hb_store_lmdb, - <<"capacity">> => 1073741824 - } - ], - Result = apply(StoreOpts, Defaults), - ?assertEqual(Expected, Result). -``` - -### empty_defaults_test - -```erlang -empty_defaults_test() -> - StoreOpts = - [ - #{ - <<"name">> => <<"cache-mainnet/lmdb">>, - <<"store-module">> => hb_store_lmdb - } - ], - Defaults = #{}, - Expected = - [ - #{ - <<"name">> => <<"cache-mainnet/lmdb">>, - <<"store-module">> => hb_store_lmdb - } - ], - Result = apply(StoreOpts, Defaults), - ?assertEqual(Expected, Result). -``` - -### empty_store_opts_test - -```erlang -empty_store_opts_test() -> - StoreOpts = [], - Defaults = - #{ - <<"lmdb">> => #{ - <<"capacity">> => 1073741824 - } - }, - Expected = [], - Result = apply(StoreOpts, Defaults), - ?assertEqual(Expected, Result). -``` - -### nested_stores_test - -```erlang -nested_stores_test() -> - StoreOpts = - [ - #{ - <<"store-module">> => hb_store_gateway, - <<"store">> => [ - #{ - <<"name">> => <<"cache-mainnet/lmdb">>, - <<"store-module">> => hb_store_lmdb - } - ] - } - ], - Defaults = - #{ - <<"lmdb">> => #{ - <<"capacity">> => 1073741824 - } - }, - Expected = - [ - #{ - <<"store-module">> => hb_store_gateway, - <<"store">> => [ - #{ - <<"name">> => <<"cache-mainnet/lmdb">>, - <<"store-module">> => hb_store_lmdb, - <<"capacity">> => 1073741824 - } - ] - } - ], - Result = apply(StoreOpts, Defaults), - ?assertEqual(Expected, Result). -``` - -### lmdb_capacity_integration_test - -Integration test to verify that capacity is properly set for hb_store_lmdb -Full integration test simulating the hb_http_server flow - -```erlang -lmdb_capacity_integration_test() -> - CustomCapacity = 5000, - StoreOpts = - [ - #{ - <<"name">> => <<"test-lmdb">>, - <<"store-module">> => hb_store_lmdb - } - ], - Defaults = - #{ - <<"lmdb">> => #{ - <<"capacity">> => CustomCapacity - } - }, - [UpdatedStoreOpt] = apply(StoreOpts, Defaults), - ?assertEqual(CustomCapacity, maps:get(<<"capacity">>, UpdatedStoreOpt)), - ?assertEqual(<<"test-lmdb">>, maps:get(<<"name">>, UpdatedStoreOpt)), - ?assertEqual(hb_store_lmdb, maps:get(<<"store-module">>, UpdatedStoreOpt)), - ?assertNotEqual(16 * 1024 * 1024 * 1024, maps:get(<<"capacity">>, UpdatedStoreOpt)), - MultipleStoreOpts = - [ - #{ - <<"name">> => <<"test-lmdb-1">>, - <<"store-module">> => hb_store_lmdb - }, - #{ - <<"name">> => <<"test-lmdb-2">>, - <<"store-module">> => hb_store_lmdb - }, - #{ - <<"name">> => <<"test-fs">>, - <<"store-module">> => hb_store_fs - } - ], - UpdatedMultipleStoreOpts = apply(MultipleStoreOpts, Defaults), - [LmdbStore1, LmdbStore2, FsStore] = UpdatedMultipleStoreOpts, - ?assertEqual(CustomCapacity, maps:get(<<"capacity">>, LmdbStore1)), - ?assertEqual(CustomCapacity, maps:get(<<"capacity">>, LmdbStore2)), - ?assertEqual(false, maps:is_key(<<"capacity">>, FsStore)), - ?event({integration_test_passed, {lmdb_capacity, CustomCapacity}, {note, "correctly applied to store options"}}). -``` - -### full_integration_flow_test - -Integration test to verify that capacity is properly set for hb_store_lmdb -Full integration test simulating the hb_http_server flow - -```erlang -full_integration_flow_test() -> - LoadedConfig = #{ - <<"store_defaults">> => #{ - <<"lmdb">> => #{ - <<"capacity">> => 5000 - } - } - }, - DefaultStoreOpts = [ - #{ - <<"name">> => <<"cache-mainnet/lmdb">>, - <<"store-module">> => hb_store_lmdb - }, - #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-mainnet">> - }, - #{ - <<"store-module">> => hb_store_gateway, - <<"subindex">> => [ - #{ - <<"name">> => <<"Data-Protocol">>, - <<"value">> => <<"ao">> - } - ], - <<"store">> => [ - #{ - <<"store-module">> => hb_store_lmdb, - <<"name">> => <<"cache-mainnet/lmdb">> - } - ] - } - ], - MergedConfig = maps:merge( - #{<<"store">> => DefaultStoreOpts}, - LoadedConfig - ), - StoreOpts = maps:get(<<"store">>, MergedConfig), - StoreDefaults = maps:get(<<"store_defaults">>, MergedConfig, #{}), - UpdatedStoreOpts = apply(StoreOpts, StoreDefaults), - [LmdbStore, FsStore, GatewayStore] = UpdatedStoreOpts, - ?assertEqual(5000, maps:get(<<"capacity">>, LmdbStore)), - ?assertEqual(<<"cache-mainnet/lmdb">>, maps:get(<<"name">>, LmdbStore)), - ?assertEqual(hb_store_lmdb, maps:get(<<"store-module">>, LmdbStore)), - ?assertEqual(false, maps:is_key(<<"capacity">>, FsStore)), - ?assertEqual(hb_store_fs, maps:get(<<"store-module">>, FsStore)), - ?assertEqual(hb_store_gateway, maps:get(<<"store-module">>, GatewayStore)), - NestedStores = maps:get(<<"store">>, GatewayStore), - [NestedLmdbStore] = NestedStores, - ?assertEqual(5000, maps:get(<<"capacity">>, NestedLmdbStore)), - ?assertEqual(hb_store_lmdb, maps:get(<<"store-module">>, NestedLmdbStore)), - ?assertEqual(3, length(UpdatedStoreOpts)), -``` - ---- - -*Generated from [hb_store_opts.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_opts.erl)* diff --git a/docs/book/src/hb_store_remote_node.erl.md b/docs/book/src/hb_store_remote_node.erl.md deleted file mode 100644 index 3371c8345..000000000 --- a/docs/book/src/hb_store_remote_node.erl.md +++ /dev/null @@ -1,194 +0,0 @@ -# hb_store_remote_node - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_remote_node.erl) - -A store module that reads data from another AO node. -Notably, this store only provides the _read_ side of the store interface. -The write side could be added, returning an commitment that the data has -been written to the remote node. In that case, the node would probably want -to upload it to an Arweave bundler to ensure persistence, too. - ---- - -## Exported Functions - -- `make_link/3` -- `maybe_cache/2` -- `maybe_cache/3` -- `read/2` -- `resolve/2` -- `scope/1` -- `type/2` -- `write/3` - ---- - -### scope - -A store module that reads data from another AO node. -Return the scope of this store. - -```erlang -scope(_StoreOpts) -> - remote. -``` - -### resolve - -Resolve a key path in the remote store. - -```erlang -resolve(#{ <<"node">> := Node }, Key) -> - ?event({remote_resolve, {node, Node}, {key, Key}}), - Key. -``` - -### type - -Determine the type of value at a given key. - -```erlang -type(Opts = #{ <<"node">> := Node }, Key) -> - ?event({remote_type, {node, Node}, {key, Key}}), - case read(Opts, Key) of - not_found -> not_found; - _ -> simple - end. -``` - -### read - -Read a key from the remote node. - -```erlang -read(Opts = #{ <<"node">> := Node }, Key) -> - ?event(store_remote_node, {executing_read, {node, Node}, {key, Key}}), - HTTPRes = - hb_http:get( - Node, - #{ <<"path">> => <<"/~cache@1.0/read">>, <<"target">> => Key }, - Opts - ), - case HTTPRes of - {ok, Res} -> - % returning the whole response to get the test-key - {ok, Msg} = hb_message:with_only_committed(Res, Opts), - ?event(store_remote_node, {read_found, {result, Msg, response, Res}}), - maybe_cache(Opts, Msg, [Key]), - {ok, Msg}; - {error, _Err} -> - ?event(store_remote_node, {read_not_found, {key, Key}}), - not_found - end. -``` - -### maybe_cache - -Cache the data if the cache is enabled. The `local-store` option may - -```erlang -maybe_cache(StoreOpts, Data) -> - maybe_cache(StoreOpts, Data, []). -``` - -### maybe_cache - -```erlang -maybe_cache(StoreOpts, Data, Links) -> - ?event({maybe_cache, StoreOpts, Data}), - % Check if the local store is in our store options. -``` - -### write - -Write a key to the remote node. - -```erlang -write(Opts = #{ <<"node">> := Node }, Key, Value) -> - ?event({write, {node, Node}, {key, Key}, {value, Value}}), - WriteMsg = #{ - <<"path">> => <<"/~cache@1.0/write">>, - <<"method">> => <<"POST">>, - <<"body">> => Value - }, - SignedMsg = hb_message:commit(WriteMsg, Opts), - ?event({write, {signed, SignedMsg}}), - case hb_http:post(Node, SignedMsg, Opts) of - {ok, Response} -> - Status = hb_ao:get(<<"status">>, Response, 0, #{}), - ?event(store_remote_node, {write_completed, {response, Response}}), - case Status of - 200 -> ok; - _ -> {error, {unexpected_status, Status}} - end; - {error, Err} -> - ?event({write, {error, Err}}), - {error, Err} - end. -``` - -### make_link - -Link a source to a destination in the remote node. - -```erlang -make_link(Opts = #{ <<"node">> := Node }, Source, Destination) -> - ?event({make_remote_link, {node, Node}, {source, Source}, - {destination, Destination}}), - LinkMsg = #{ - <<"path">> => <<"/~cache@1.0/link">>, - <<"method">> => <<"POST">>, - <<"source">> => Source, - <<"destination">> => Destination - }, - SignedMsg = hb_message:commit(LinkMsg, Opts), - ?event({make_remote_link, {signed, SignedMsg}}), - case hb_http:post(Node, SignedMsg, Opts) of - {ok, Response} -> - Status = hb_ao:get(<<"status">>, Response, 0, #{}), - ?event(store_remote_node, {make_link_completed, {response, Response}}), - case Status of - 200 -> ok; - _ -> {error, {unexpected_status, Status}} - end; - {error, Err} -> - ?event(store_remote_node, {make_link_error, {error, Err}}), - {error, Err} - end. -``` - -### read_test - -Test that we can create a store, write a random message to it, then - -```erlang -read_test() -> - rand:seed(default), - LocalStore = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache-mainnet">> - }, - hb_store:reset(LocalStore), - M = #{ <<"test-key">> => Rand = rand:uniform(1337) }, - ID = hb_message:id(M), - {ok, ID} = - hb_cache:write( - M, - #{ store => LocalStore } - ), - ?event({wrote, ID}), - Node = - hb_http_server:start_node( - #{ - store => LocalStore - } - ), - RemoteStore = [ - #{ <<"store-module">> => hb_store_remote_node, <<"node">> => Node } - ], - {ok, RetrievedMsg} = hb_cache:read(ID, #{ store => RemoteStore }), -``` - ---- - -*Generated from [hb_store_remote_node.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_remote_node.erl)* diff --git a/docs/book/src/hb_store_rocksdb.erl.md b/docs/book/src/hb_store_rocksdb.erl.md deleted file mode 100644 index ce581e1d0..000000000 --- a/docs/book/src/hb_store_rocksdb.erl.md +++ /dev/null @@ -1,884 +0,0 @@ -# hb_store_rocksdb - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_rocksdb.erl) - -A process wrapper over rocksdb storage. Replicates functionality of the - hb_fs_store module. - Encodes the item types with the help of prefixes, see `encode_value/2` - and `decode_value/1` - ---- - -## Exported Functions - -- `add_path/3` -- `code_change/3` -- `enabled/0` -- `handle_call/3` -- `handle_cast/2` -- `handle_info/2` -- `init/1` -- `list/0` -- `list/2` -- `make_group/2` -- `make_link/3` -- `path/2` -- `read/2` -- `reset/1` -- `resolve/2` -- `scope/1` -- `start_link/1` -- `start/1` -- `stop/1` -- `terminate/2` -- `type/2` -- `write/3` - ---- - -### enabled - -A process wrapper over rocksdb storage. Replicates functionality of the -Returns whether the RocksDB store is enabled. - -```erlang -enabled() -> true. --else. -``` - -### enabled - -```erlang -enabled() -> false. --endif. -``` - -### start_link - -Start the RocksDB store. - -```erlang -start_link(#{ <<"store-module">> := hb_store_rocksdb, <<"name">> := Dir}) -> - ?event(rocksdb, {starting, Dir}), - application:ensure_all_started(rocksdb), - gen_server:start_link({local, ?MODULE}, ?MODULE, Dir, []); -``` - -### start_link - -Start the RocksDB store. - -```erlang -start_link(Stores) when is_list(Stores) -> - RocksStores = - [ - Store - || - Store = #{ <<"store-module">> := Module } <- Stores, - Module =:= hb_store_rocksdb - ], - case RocksStores of - [Store] -> start_link(Store); - _ -> ignore - end; -``` - -### start_link - -Start the RocksDB store. - -```erlang -start_link(Store) -> - ?event(rocksdb, {invalid_store_config, Store}), - ignore. -``` - -### start_link - -```erlang -start_link(_Opts) -> ignore. --endif. -``` - -### start - -```erlang -start(Opts = #{ <<"store-module">> := hb_store_rocksdb, <<"name">> := _Dir}) -> - start_link(Opts); -``` - -### start - -```erlang -start(Opts) -> - start_link(Opts). -``` - -### scope - -Return scope (local) -Return path - -```erlang --spec reset([]) -> ok | no_return(). -reset(_Opts) -> - gen_server:call(?MODULE, reset, ?TIMEOUT). -``` - -```erlang -scope(_) -> local. -``` - -### path - -Return scope (local) -Return path - -```erlang --spec reset([]) -> ok | no_return(). -reset(_Opts) -> - gen_server:call(?MODULE, reset, ?TIMEOUT). -``` - -```erlang -path(_Opts, Path) -> - hb_store:path(Path). -``` - -### read - -Read data by the key. - -```erlang --spec read(Opts, Key) -> Result when - Opts :: map(), - Key :: key() | list(), - Result :: {ok, value()} | not_found | {error, {corruption, string()}} | {error, any()}. -``` - -```erlang -read(Opts, RawPath) -> - ?event({read, RawPath}), - Path = resolve(Opts, RawPath), - case do_read(Opts, Path) of - not_found -> - not_found; - {error, _Reason} = Err -> Err; - {ok, {raw, Result}} -> - {ok, Result}; - {ok, {link, Link}} -> - ?event({link_found, Path, Link}), - read(Opts, Link); - {ok, {group, _Result}} -> - not_found - end. -``` - -### write - -Write given Key and Value to the database - -```erlang --spec write(Opts, Key, Value) -> Result when - Opts :: map(), - Key :: key(), - Value :: value(), - Result :: ok | {error, any()}. -``` - -```erlang -write(Opts, RawKey, Value) -> - Key = hb_store:join(RawKey), - EncodedValue = encode_value(raw, Value), - ?event({writing, Key, byte_size(EncodedValue)}), - do_write(Opts, Key, EncodedValue). -``` - -### list - -```erlang --spec list(Opts, Path) -> Result when - Opts :: any(), - Path :: any(), - Result :: {ok, [string()]} | {error, term()}. -``` - -```erlang -list(Opts, Path) -> - case do_read(Opts, Path) of - not_found -> {error, not_found}; - {error, _Reason} = Err -> - ?event(rocksdb, {could_not_list_folder, Err}), - Err; - {ok, {group, Value}} -> - {ok, sets:to_list(Value)}; - {ok, {link, LinkedPath}} -> - list(Opts, LinkedPath); - Reason -> - ?event(rocksdb, {could_not_list_folder, Reason}), - {ok, []} - end. -``` - -### resolve - -Replace links in a path with the target of the link. - -```erlang --spec resolve(Opts, Path) -> Result when - Opts :: any(), - Path :: binary() | list(), - Result :: not_found | string(). -``` - -```erlang -resolve(Opts, Path) -> - PathList = hb_path:term_to_path_parts(hb_store:join(Path)), - ResolvedPath = do_resolve(Opts, "", PathList), - ResolvedPath. -``` - -### do_resolve - -```erlang -do_resolve(_Opts, FinalPath, []) -> - FinalPath; -``` - -### do_resolve - -```erlang -do_resolve(Opts, CurrentPath, [CurrentPath | Rest]) -> - do_resolve(Opts, CurrentPath, Rest); -``` - -### do_resolve - -```erlang -do_resolve(Opts, CurrentPath, [Next | Rest]) -> - PathPart = hb_store:join([CurrentPath, Next]), - case do_read(Opts, PathPart) of - not_found -> do_resolve(Opts, PathPart, Rest); - {error, _Reason} = Err -> Err; - {ok, {link, LinkValue}} -> - do_resolve(Opts, LinkValue, Rest); - {ok, _OtherType} -> do_resolve(Opts, PathPart, Rest) - end. -``` - -### type - -```erlang --spec type(Opts, Key) -> Result when - Opts :: map(), - Key :: binary(), - Result :: composite | simple | not_found. -``` - -```erlang -type(Opts, RawKey) -> - Key = hb_store:join(RawKey), - case do_read(Opts, Key) of - not_found -> not_found; - {ok, {raw, _Item}} -> simple; - {ok, {link, NewKey}} -> type(Opts, NewKey); - {ok, {group, _Item}} -> composite - end. -``` - -### make_group - -Creates group under the given path. - -```erlang --spec make_group(Opts, Key) -> Result when - Opts :: any(), - Key :: binary(), - Result :: ok | {error, already_added}. -``` - -```erlang -make_group(#{ <<"name">> := _DataDir }, Key) -> - gen_server:call(?MODULE, {make_group, Key}, ?TIMEOUT); -``` - -### make_group - -Creates group under the given path. - -```erlang --spec make_group(Opts, Key) -> Result when - Opts :: any(), - Key :: binary(), - Result :: ok | {error, already_added}. -``` - -```erlang -make_group(_Opts, Key) -> - gen_server:call(?MODULE, {make_group, Key}, ?TIMEOUT). -``` - -### add_path - -Add two path components together. // is not used - -```erlang --spec make_link(any(), key(), key()) -> ok. -make_link(_, Key1, Key1) -> - ok; -make_link(Opts, Existing, New) -> - ExistingBin = convert_if_list(Existing), - NewBin = convert_if_list(New), - % Create: NewValue -> ExistingBin - case do_read(Opts, NewBin) of - not_found -> - do_write(Opts, NewBin, encode_value(link, ExistingBin)); - _ -> - ok - end. -``` - -```erlang -add_path(_Opts, Path1, Path2) -> - Path1 ++ Path2. -``` - -### list - -List all items registered in rocksdb store. Should be used only - -```erlang -list() -> - gen_server:call(?MODULE, list, ?TIMEOUT). -``` - -### init - -```erlang -init(Dir) -> - filelib:ensure_dir(Dir), - case open_rockdb(Dir) of - {ok, DBHandle} -> - State = #{ - db_handle => DBHandle, - dir => Dir - }, - {ok, State}; - {error, Reason} -> - {stop, Reason} - end. -``` - -### handle_cast - -```erlang -handle_cast(_Request, State) -> - {noreply, State}. -``` - -### handle_info - -```erlang -handle_info(_Info, State) -> - {noreply, State}. -``` - -### handle_call - -```erlang -handle_call(Request, From, #{ db_handle := undefined, dir := Dir } = State) -> - % Re-initialize the DB handle if it's not set. -``` - -### handle_call - -```erlang -handle_call({do_write, Key, Value}, _From, #{db_handle := DBHandle} = State) -> - BaseName = filename:basename(Key), - rocksdb:put(DBHandle, Key, Value, #{}), - case filename:dirname(Key) of - <<".">> -> - ignore; - BaseDir -> - ensure_dir(DBHandle, BaseDir), - {ok, RawDirContent} = rocksdb:get(DBHandle, BaseDir, #{}), - NewDirContent = maybe_append_key_to_group(BaseName, RawDirContent), - ok = rocksdb:put(DBHandle, BaseDir, NewDirContent, #{}) - end, - {reply, ok, State}; -``` - -### handle_call - -```erlang -handle_call({do_read, Key}, _From, #{db_handle := DBHandle} = State) -> - Response = - case rocksdb:get(DBHandle, Key, #{}) of - {ok, Result} -> - {Type, Value} = decode_value(Result), - {ok, {Type, Value}}; - not_found -> - not_found; - {error, _Reason} = Err -> - Err - end, - {reply, Response, State}; -``` - -### handle_call - -```erlang -handle_call(reset, _From, State = #{db_handle := DBHandle, dir := Dir}) -> - ok = rocksdb:close(DBHandle), - ok = rocksdb:destroy(DirStr = ensure_list(Dir), []), - os:cmd(binary_to_list(<< "rm -Rf ", (list_to_binary(DirStr))/binary >>)), - {reply, ok, State#{ db_handle := undefined }}; -``` - -### handle_call - -```erlang -handle_call(list, _From, State = #{db_handle := DBHandle}) -> - {ok, Iterator} = rocksdb:iterator(DBHandle, []), - Items = collect(Iterator), - {reply, Items, State}; -``` - -### handle_call - -```erlang -handle_call({make_group, Path}, _From, #{db_handle := DBHandle} = State) -> - Result = ensure_dir(DBHandle, Path), - {reply, Result, State}; -``` - -### handle_call - -```erlang -handle_call(_Request, _From, State) -> - {reply, handle_call_unrecognized_message, State}. -``` - -### terminate - -```erlang -terminate(_Reason, _State) -> - ok. -``` - -### code_change - -```erlang -code_change(_OldVsn, State, _Extra) -> - {ok, State}. -``` - -### do_write - -Write given Key and Value to the database - -```erlang --spec do_write(Opts, Key, Value) -> Result when - Opts :: map(), - Key :: key(), - Value :: value(), - Result :: ok | {error, any()}. -``` - -```erlang -do_write(_Opts, Key, Value) -> - gen_server:call(?MODULE, {do_write, Key, Value}, ?TIMEOUT). -``` - -### do_read - -```erlang -do_read(_Opts, Key) -> - gen_server:call(?MODULE, {do_read, Key}, ?TIMEOUT). -``` - -### ensure_dir - -```erlang --spec decode_value(binary()) -> {value_type(), binary()}. -decode_value(<<1, Value/binary>>) -> {link, Value}; -decode_value(<<2, Value/binary>>) -> {raw, Value}; -decode_value(<<3, Value/binary>>) -> {group, binary_to_term(Value)}. -``` - -```erlang -ensure_dir(DBHandle, BaseDir) -> - PathParts = hb_path:term_to_path_parts(BaseDir), - [First | Rest] = PathParts, - Result = ensure_dir(DBHandle, First, Rest), - Result. -``` - -### ensure_dir - -```erlang -ensure_dir(DBHandle, CurrentPath, []) -> - maybe_create_dir(DBHandle, CurrentPath, nil), - ok; -``` - -### ensure_dir - -```erlang -ensure_dir(DBHandle, CurrentPath, [Next]) -> - maybe_create_dir(DBHandle, CurrentPath, Next), - ensure_dir(DBHandle, hb_store:join([CurrentPath, Next]), []); -``` - -### ensure_dir - -```erlang -ensure_dir(DBHandle, CurrentPath, [Next | Rest]) -> - maybe_create_dir(DBHandle, CurrentPath, Next), - ensure_dir(DBHandle, hb_store:join([CurrentPath, Next]), Rest). -``` - -### maybe_create_dir - -```erlang -maybe_create_dir(DBHandle, DirPath, Value) -> - CurrentValueSet = - case rocksdb:get(DBHandle, DirPath, #{}) of - not_found -> sets:new(); - {ok, CurrentValue} -> - {group, DecodedOldValue} = decode_value(CurrentValue), - DecodedOldValue - end, - NewValueSet = - case Value of - nil -> CurrentValueSet; - _ -> sets:add_element(Value, CurrentValueSet) - end, - rocksdb:put(DBHandle, DirPath, encode_value(group, NewValueSet), #{}). -``` - -### open_rockdb - -```erlang -open_rockdb(RawDir) -> - filelib:ensure_dir(Dir = ensure_list(RawDir)), - Options = [{create_if_missing, true}], - rocksdb:open(Dir, Options). -``` - -### convert_if_list - -```erlang -convert_if_list(Value) when is_list(Value) -> - join(Value); % Perform the conversion if it's a list -``` - -### convert_if_list - -Ensure that the given filename is a list, not a binary. - -```erlang -convert_if_list(Value) -> - Value. % Leave unchanged if it's not a list -``` - -### ensure_list - -Ensure that the given filename is a list, not a binary. - -```erlang -ensure_list(Value) when is_binary(Value) -> binary_to_list(Value); -``` - -### ensure_list - -Ensure that the given filename is a list, not a binary. - -```erlang -ensure_list(Value) -> Value. -``` - -### maybe_convert_to_binary - -Ensure that the given filename is a list, not a binary. - -```erlang -maybe_convert_to_binary(Value) when is_list(Value) -> - list_to_binary(Value); -``` - -### maybe_convert_to_binary - -Ensure that the given filename is a list, not a binary. - -```erlang -maybe_convert_to_binary(Value) when is_binary(Value) -> - Value. -``` - -### join - -```erlang -join(Key) when is_list(Key) -> - KeyList = hb_store:join(Key), - maybe_convert_to_binary(KeyList); -``` - -### join - -```erlang -join(Key) when is_binary(Key) -> Key. -``` - -### collect - -```erlang -collect(Iterator) -> - case rocksdb:iterator_move(Iterator, <<>>) of - {error, invalid_iterator} -> []; - {ok, Key, Value} -> - DecodedValue = decode_value(Value), - collect(Iterator, [{Key, DecodedValue}]) - end. -``` - -### collect - -```erlang -collect(Iterator, Acc) -> - case rocksdb:iterator_move(Iterator, next) of - {ok, Key, Value} -> - % Continue iterating, accumulating the key-value pair in the list - DecodedValue = decode_value(Value), - collect(Iterator, [{Key, DecodedValue} | Acc]); - {error, invalid_iterator} -> - % Reached the end of the iterator, return the accumulated list - lists:reverse(Acc) - end. -``` - -### maybe_append_key_to_group - -```erlang -maybe_append_key_to_group(Key, CurrentDirContents) -> - case decode_value(CurrentDirContents) of - {group, GroupSet} -> - BaseName = filename:basename(Key), - NewGroupSet = sets:add_element(BaseName, GroupSet), - encode_value(group, NewGroupSet); - _ -> - CurrentDirContents - end. -``` - -### get_or_start_server - -```erlang -get_or_start_server() -> - % Store = lists:keyfind(hb_store_rocksdb2, 1, hb_store:test_stores()), - Opts = #{ - <<"store-module">> => hb_store_rocksdb, - <<"name">> => <<"cache-TEST/rocksdb">> - }, - case start_link(Opts) of - {ok, Pid} -> - Pid; - {error, {already_started, Pid}} -> - Pid - end. -``` - -### write_read_test_ - -```erlang -write_read_test_() -> - {foreach, - fun() -> - Pid = get_or_start_server(), - unlink(Pid) - end, - fun(_) -> reset([]) end, - [ - {"can read/write data", fun() -> - ok = write(#{}, <<"test_key">>, <<"test_value">>), - {ok, Value} = read(#{}, <<"test_key">>), - ?assertEqual(<<"test_value">>, Value) - end}, - {"returns not_found for non existing keys", fun() -> - Value = read(#{}, <<"non_existing">>), - ?assertEqual(not_found, Value) - end}, - {"follows links", fun() -> - ok = write(#{}, <<"test_key2">>, <<"value_under_linked_key">>), - ok = make_link(#{}, <<"test_key2">>, <<"test_key">>), - {ok, Value} = read(#{}, <<"test_key">>), - ?assertEqual(<<"value_under_linked_key">>, Value) - end} - ]}. -``` - -### api_test_ - -```erlang -api_test_() -> - {foreach, - fun() -> - Pid = get_or_start_server(), - unlink(Pid) - end, - fun(_) -> reset([]) end, [ - {"write/3 can automatically create folders", fun() -> - ok = write(#{}, <<"messages/key1">>, <<"val1">>), - ok = write(#{}, <<"messages/key2">>, <<"val2">>), - {ok, Items} = list(#{}, <<"messages">>), - ?assertEqual( - lists:sort([<<"key1">>, <<"key2">>]), - lists:sort(Items) - ), - {ok, Item} = read(#{}, <<"messages/key1">>), - ?assertEqual(<<"val1">>, Item) - end}, - {"list/2 lists keys under given path", fun() -> - ok = write(#{}, <<"messages/key1">>, <<"val1">>), - ok = write(#{}, <<"messages/key2">>, <<"val2">>), - ok = write(#{}, <<"other_path/key3">>, <<"val3">>), - {ok, Items} = list(#{}, <<"messages">>), - ?assertEqual( - lists:sort([<<"key1">>, <<"key2">>]), lists:sort(Items) - ) - end}, - {"list/2 when database is empty", fun() -> - ?assertEqual({error, not_found}, list(#{}, <<"process/slot">>)) - end}, - {"make_link/3 creates a link to actual data", fun() -> - ok = write(ignored_options, <<"key1">>, <<"test_value">>), - ok = make_link([], <<"key1">>, <<"key2">>), - {ok, Value} = read([], <<"key2">>), - ?assertEqual(<<"test_value">>, Value) - end}, - {"make_link/3 does not create links if keys are same", fun() -> - ok = make_link([], <<"key1">>, <<"key1">>), - ?assertEqual(not_found, read(#{}, <<"key1">>)) - end}, - {"reset cleans up the database", fun() -> - ok = write(ignored_options, <<"test_key">>, <<"test_value">>), - ok = reset([]), - ?assertEqual(not_found, read(ignored_options, <<"test_key">>)) - end}, - { - "type/2 can identify simple items", - fun() -> - ok = write(#{}, <<"simple_item">>, <<"test">>), - ?assertEqual(simple, type(#{}, <<"simple_item">>)) - end - }, - { - "type/2 returns not_found for non existing keys", - fun() -> - ?assertEqual(not_found, type(#{}, <<"random_key">>)) - end - }, - { - "type/2 resolves links before checking real type of the following item", - fun() -> - ok = write(#{}, <<"messages/key1">>, <<"val1">>), - ok = write(#{}, <<"messages/key2">>, <<"val2">>), - make_link(#{}, <<"messages">>, <<"CompositeKey">>), - make_link(#{}, <<"messages/key2">>, <<"SimpleKey">>), - ?assertEqual(composite, type(#{}, <<"CompositeKey">>)), - ?assertEqual(simple, type(#{}, <<"SimpleKey">>)) - end - }, - { - "type/2 treats groups as composite items", - fun() -> - make_group(#{}, <<"messages_folder">>), - ?assertEqual(composite, type(#{}, <<"messages_folder">>)) - end - }, - { - "resolve/2 resolves raw/groups items", - fun() -> - write(#{}, <<"top_level/level1/item1">>, <<"1">>), - write(#{}, <<"top_level/level1/item2">>, <<"1">>), - write(#{}, <<"top_level/level1/item3">>, <<"1">>), - ?assertEqual( - <<"top_level/level1/item3">>, - resolve(#{}, <<"top_level/level1/item3">>) - ) - end - }, - { - "resolve/2 follows links", - fun() -> - write(#{}, <<"data/the_data_item">>, <<"the_data">>), - make_link(#{}, <<"data/the_data_item">>, <<"top_level/level1/item">>), - ?assertEqual( - <<"data/the_data_item">>, - resolve(#{}, <<"top_level/level1/item">>) - ) - end - }, - { - "make_group/2 creates a folder", - fun() -> - ?assertEqual(ok, make_group(#{}, <<"messages">>)), - ?assertEqual( - list(#{}, <<"messages">>), - {ok, []} - ) - end - }, - { - "make_group/2 does not override folder contents", - fun() -> - write(#{}, <<"messages/id">>, <<"1">>), - write(#{}, <<"messages/commitments">>, <<"2">>), - ?assertEqual(ok, make_group(#{}, <<"messages">>)), - ?assertEqual( - list(#{}, <<"messages">>), - {ok, [<<"id">>, <<"commitments">>]} - ) - end - }, - { - "make_group/2 making deep nested groups", - fun() -> - make_group(#{}, <<"messages/ids/items">>), - ?assertEqual( - {ok, [<<"ids">>]}, - list(#{}, <<"messages">>) - ), - ?assertEqual( - {ok, [<<"items">>]}, - list(#{}, <<"messages/ids">>) - ), - ?assertEqual( - {ok, []}, - list(#{}, <<"messages/ids/items">>) - ) - end - }, - { - "write/3 automatically does deep groups", - fun() -> - write(#{}, <<"messages/ids/item1">>, <<"1">>), - write(#{}, <<"messages/ids/item2">>, <<"2">>), - ?assertEqual( - {ok, [<<"ids">>]}, - list(#{}, <<"messages">>) - ), - ?assertEqual( - {ok, [<<"item2">>, <<"item1">>]}, - list(#{}, <<"messages/ids">>) - ), - ?assertEqual(read(#{}, <<"messages/ids/item1">>),{ok, <<"1">>}), - ?assertEqual(read(#{}, <<"messages/ids/item2">>), {ok, <<"2">>}) - end - } - ]}. -``` - ---- - -*Generated from [hb_store_rocksdb.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_store_rocksdb.erl)* diff --git a/docs/book/src/hb_structured_fields.erl.md b/docs/book/src/hb_structured_fields.erl.md deleted file mode 100644 index 62a699964..000000000 --- a/docs/book/src/hb_structured_fields.erl.md +++ /dev/null @@ -1,1426 +0,0 @@ -# hb_structured_fields - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_structured_fields.erl) - -A module for parsing and converting between Erlang and HTTP Structured -Fields, as described in RFC-9651. -The mapping between Erlang and structured headers types is as follow: -List: list() -Inner list: {list, [item()], params()} -Dictionary: [{binary(), item()}] - There is no distinction between empty list and empty dictionary. -Item with parameters: {item, bare_item(), params()} -Parameters: [{binary(), bare_item()}] -Bare item: one bare_item() that can be of type: -Integer: integer() -Decimal: {decimal, {integer(), integer()}} -String: {string, binary()} -Token: {token, binary()} -Byte sequence: {binary, binary()} -Boolean: boolean() - ---- - -## Exported Functions - -- `bare_item/1` -- `dictionary/1` -- `from_bare_item/1` -- `item/1` -- `list/1` -- `parse_bare_item/1` -- `parse_binary/1` -- `parse_dictionary/1` -- `parse_item/1` -- `parse_list/1` -- `to_dictionary/1` -- `to_item/1` -- `to_item/2` -- `to_list/1` - ---- - -### to_dictionary - -A module for parsing and converting between Erlang and HTTP Structured -Convert a map to a dictionary. - -```erlang -to_dictionary(Map) when is_map(Map) -> - to_dictionary(maps:to_list(Map)); -``` - -### to_dictionary - -A module for parsing and converting between Erlang and HTTP Structured -Convert a map to a dictionary. - -```erlang -to_dictionary(Pairs) when is_list(Pairs) -> - to_dictionary([], Pairs). -``` - -### to_dictionary - -```erlang -to_dictionary(Dict, []) -> - {ok, Dict}; -``` - -### to_dictionary - -```erlang -to_dictionary(_Dict, [{ Name, Value } | _Rest]) when is_map(Value) -> - {too_deep, Name}; -``` - -### to_dictionary - -```erlang -to_dictionary(Dict, [{Name, Value} | Rest]) -> - case to_item_or_inner_list(Value) of - {ok, ItemOrInner} -> to_dictionary([{key_to_binary(Name), ItemOrInner} | Dict], Rest); - E -> E - end. -``` - -### to_item - -Convert an item to a dictionary. - -```erlang -to_item({item, Kind, Params}) when is_list(Params) -> - {ok, {item, to_bare_item(Kind), [to_param(Pair) || Pair <- Params] }}; -``` - -### to_item - -Convert an item to a dictionary. - -```erlang -to_item(Item) -> - to_item(Item, []). -``` - -### to_item - -```erlang -to_item(Item, Params) when is_list(Params) -> - to_item({ item, to_bare_item(Item), Params}). -``` - -### to_list - -Convert a list to an SF term. - -```erlang -to_list(List) when is_list(List) -> - to_list([], List). -``` - -### to_list - -```erlang -to_list(Acc, []) -> - {ok, lists:reverse(Acc)}; -``` - -### to_list - -```erlang -to_list(Acc, [ItemOrInner | Rest]) -> - Res = to_item_or_inner_list(ItemOrInner), - case Res of - {ok, Elem} -> to_list([Elem | Acc], Rest); - E -> E - end. -``` - -### to_inner_list - -Convert an inner list to an SF term. - -```erlang -to_inner_list({list, Inner, Params}) when is_list(Inner) andalso is_list(Params) -> - {ok, {list, [to_inner_item(I) || I <-- Inner], [to_param(Pair) || Pair <- Params]}}; -``` - -### to_inner_list - -Convert an inner list to an SF term. - -```erlang -to_inner_list(Inner) -> - to_inner_list(Inner, []). -``` - -### to_inner_list - -```erlang -to_inner_list(Inner, Params) when is_list(Inner) andalso is_list(Params) -> - to_inner_list([], Inner, Params). -``` - -### to_inner_list - -```erlang -to_inner_list(Inner, [], Params) when is_list(Params) -> - {ok, {list, lists:reverse(Inner), [to_param(Param) || Param <- Params]}}; -``` - -### to_inner_list - -```erlang -to_inner_list(_List, [Item | _Rest], _Params) when is_list(Item) orelse is_map(Item) -> - {too_deep, Item}; -``` - -### to_inner_list - -```erlang -to_inner_list(Inner, [Item | Rest], Params) -> - case to_item(Item) of - {ok, I} -> to_inner_list([I | Inner], Rest, Params); - E -> E - end. -``` - -### to_item_or_inner_list - -Convert an Erlang term to an SF `item` or `inner_list`. - -```erlang -to_item_or_inner_list(ItemOrInner) -> - case ItemOrInner of - Map when is_map(Map) -> {too_deep, Map}; - % pre-parsed inner list - {list, Inner, Params} -> to_inner_list(Inner, Params); - Item when not is_list(Item) -> to_item(Item); - Inner when is_list(Inner) -> to_inner_list(Inner) - end. -``` - -### to_inner_item - -Convert an Erlang term to an SF `item`. - -```erlang -to_inner_item(Item) when is_list(Item) -> - {too_deep, Item}; -``` - -### to_inner_item - -Convert an Erlang term to an SF `item`. - -```erlang -to_inner_item(Item) -> - case to_item(Item) of - {ok, I} -> I; - E -> E - end. -``` - -### to_param - -Convert an Erlang term to an SF `parameter`. - -```erlang -to_param({Name, Value}) -> - NormalizedName = key_to_binary(Name), - {NormalizedName, to_bare_item(Value)}. -``` - -### to_bare_item - -Convert an Erlang term to an SF `bare_item`. - -```erlang -to_bare_item(BareItem) -> - case BareItem of - % Assume tuple is already parsed - BI when is_tuple(BI) -> BI; - % Serialize -> Parse numbers in order to ensure their lengths adhere to structured fields - B when is_boolean(B) -> B; - I when is_integer(I) -> - {Int, _} = parse_bare_item(bare_item(I)), - Int; - F when is_float(F) -> - {Dec, _} = parse_bare_item(bare_item({decimal, {F, 0}})), - Dec; - A when is_atom(A) -> {token, atom_to_binary(A)}; - S when is_binary(S) or is_list(S) -> {string, iolist_to_binary(S)} - end. -``` - -### from_bare_item - -Convert an SF `bare_item` to an Erlang term. - -```erlang -from_bare_item(BareItem) -> - case BareItem of - I when is_integer(I) -> I; - B when is_boolean(B) -> B; - D = {decimal, _} -> - list_to_float( - binary_to_list( - iolist_to_binary( - bare_item(D) - ) - ) - ); - {string, S} -> S; - {token, T} -> - try binary_to_existing_atom(T) of - Atom -> Atom - catch - error:badarg -> T - end; - {binary, B} -> B - end. -``` - -### key_to_binary - -Convert an Erlang term to a binary key. - -```erlang -key_to_binary(Key) when is_atom(Key) -> atom_to_binary(Key); -``` - -### key_to_binary - -Convert an Erlang term to a binary key. -Parse a binary SF dictionary. - -```erlang --spec parse_dictionary(binary()) -> sh_dictionary(). -parse_dictionary(<<>>) -> - []; -parse_dictionary(<>) when ?IS_ALPHA(C) - or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) - or (C =:= $.) -> - parse_dict_key(R, [], <>). -``` - -```erlang -key_to_binary(Key) -> iolist_to_binary(Key). -``` - -### parse_dict_key - -Convert an Erlang term to a binary key. -Parse a binary SF dictionary. - -```erlang --spec parse_dictionary(binary()) -> sh_dictionary(). -parse_dictionary(<<>>) -> - []; -parse_dictionary(<>) when ?IS_ALPHA(C) - or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) - or (C =:= $.) -> - parse_dict_key(R, [], <>). -``` - -```erlang -parse_dict_key(<<$=, $(, R0/bits>>, Acc, K) -> - {Item, R} = parse_inner_list(R0, []), - parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, Item})); -``` - -### parse_dict_key - -Convert an Erlang term to a binary key. -Parse a binary SF dictionary. - -```erlang --spec parse_dictionary(binary()) -> sh_dictionary(). -parse_dictionary(<<>>) -> - []; -parse_dictionary(<>) when ?IS_ALPHA(C) - or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) - or (C =:= $.) -> - parse_dict_key(R, [], <>). -``` - -```erlang -parse_dict_key(<<$=, R0/bits>>, Acc, K) -> - {Item, R} = parse_item1(R0), - parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, Item})); -``` - -### parse_dict_key - -Convert an Erlang term to a binary key. -Parse a binary SF dictionary. - -```erlang --spec parse_dictionary(binary()) -> sh_dictionary(). -parse_dictionary(<<>>) -> - []; -parse_dictionary(<>) when ?IS_ALPHA(C) - or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) - or (C =:= $.) -> - parse_dict_key(R, [], <>). -``` - -```erlang -parse_dict_key(<>, Acc, K) when - ?IS_ALPHA(C) or ?IS_DIGIT(C) or - (C =:= $_) or (C =:= $-) or (C =:= $.) or (C =:= $*) or (C =:= $%) -> - parse_dict_key(R, Acc, <>); -``` - -### parse_dict_key - -Convert an Erlang term to a binary key. -Parse a binary SF dictionary. - -```erlang --spec parse_dictionary(binary()) -> sh_dictionary(). -parse_dictionary(<<>>) -> - []; -parse_dictionary(<>) when ?IS_ALPHA(C) - or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) - or (C =:= $.) -> - parse_dict_key(R, [], <>). -``` - -```erlang -parse_dict_key(<<$;, R0/bits>>, Acc, K) -> - {Params, R} = parse_before_param(R0, []), - parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, {item, true, Params}})); -``` - -### parse_dict_key - -Convert an Erlang term to a binary key. -Parse a binary SF dictionary. - -```erlang --spec parse_dictionary(binary()) -> sh_dictionary(). -parse_dictionary(<<>>) -> - []; -parse_dictionary(<>) when ?IS_ALPHA(C) - or ?IS_DIGIT(C) or (C =:= $*) or (C =:= $%) or (C =:= $_) or (C =:= $-) - or (C =:= $.) -> - parse_dict_key(R, [], <>). -``` - -```erlang -parse_dict_key(R, Acc, K) -> - parse_dict_before_sep(R, lists:keystore(K, 1, Acc, {K, {item, true, []}})). -``` - -### parse_dict_before_sep - -Parse a binary SF dictionary before a separator. - -```erlang -parse_dict_before_sep(<<$\s, R/bits>>, Acc) -> - parse_dict_before_sep(R, Acc); -``` - -### parse_dict_before_sep - -Parse a binary SF dictionary before a separator. - -```erlang -parse_dict_before_sep(<<$\t, R/bits>>, Acc) -> - parse_dict_before_sep(R, Acc); -``` - -### parse_dict_before_sep - -Parse a binary SF dictionary before a separator. - -```erlang -parse_dict_before_sep(<>, Acc) when C =:= $, -> - parse_dict_before_member(R, Acc); -``` - -### parse_dict_before_sep - -Parse a binary SF dictionary before a separator. - -```erlang -parse_dict_before_sep(<<>>, Acc) -> - Acc. -``` - -### parse_dict_before_member - -Parse a binary SF dictionary before a member. - -```erlang -parse_dict_before_member(<<$\s, R/bits>>, Acc) -> - parse_dict_before_member(R, Acc); -``` - -### parse_dict_before_member - -Parse a binary SF dictionary before a member. - -```erlang -parse_dict_before_member(<<$\t, R/bits>>, Acc) -> - parse_dict_before_member(R, Acc); -``` - -### parse_dict_before_member - -Parse a binary SF dictionary before a member. - -```erlang -parse_dict_before_member(<>, Acc) - when ?IS_ALPHA(C) or ?IS_DIGIT(C) or (C =:= $*) - or (C =:= $%) or (C =:= $_) or (C =:= $-) -> - parse_dict_key(R, Acc, <>). -``` - -### parse_item1 - -```erlang --spec parse_item(binary()) -> sh_item(). -parse_item(Bin) -> - {Item, <<>>} = parse_item1(Bin), - Item. -``` - -```erlang -parse_item1(Bin) -> - case parse_bare_item(Bin) of - {Item, <<$;, R/bits>>} -> - {Params, Rest} = parse_before_param(R, []), - {{item, Item, Params}, Rest}; - {Item, Rest} -> - {{item, Item, []}, Rest} - end. -``` - -### parse_list_member - -Parse a binary SF list before a member. - -```erlang --spec parse_list(binary()) -> sh_list(). -parse_list(<<>>) -> - []; -parse_list(Bin) -> - parse_list_before_member(Bin, []). -``` - -```erlang -parse_list_member(<<$(, R0/bits>>, Acc) -> - {Item, R} = parse_inner_list(R0, []), - parse_list_before_sep(R, [Item | Acc]); -``` - -### parse_list_member - -Parse a binary SF list before a member. - -```erlang --spec parse_list(binary()) -> sh_list(). -parse_list(<<>>) -> - []; -parse_list(Bin) -> - parse_list_before_member(Bin, []). -``` - -```erlang -parse_list_member(R0, Acc) -> - {Item, R} = parse_item1(R0), - parse_list_before_sep(R, [Item | Acc]). -``` - -### parse_list_before_sep - -Parse a binary SF list before a separator. - -```erlang -parse_list_before_sep(<<$\s, R/bits>>, Acc) -> - parse_list_before_sep(R, Acc); -``` - -### parse_list_before_sep - -Parse a binary SF list before a separator. - -```erlang -parse_list_before_sep(<<$\t, R/bits>>, Acc) -> - parse_list_before_sep(R, Acc); -``` - -### parse_list_before_sep - -Parse a binary SF list before a separator. - -```erlang -parse_list_before_sep(<<$,, R/bits>>, Acc) -> - parse_list_before_member(R, Acc); -``` - -### parse_list_before_sep - -Parse a binary SF list before a separator. - -```erlang -parse_list_before_sep(<<>>, Acc) -> - lists:reverse(Acc). -``` - -### parse_list_before_member - -Parse a binary SF list before a member. - -```erlang -parse_list_before_member(<<$\s, R/bits>>, Acc) -> - parse_list_before_member(R, Acc); -``` - -### parse_list_before_member - -Parse a binary SF list before a member. - -```erlang -parse_list_before_member(<<$\t, R/bits>>, Acc) -> - parse_list_before_member(R, Acc); -``` - -### parse_list_before_member - -Parse a binary SF list before a member. - -```erlang -parse_list_before_member(R, Acc) -> - parse_list_member(R, Acc). -``` - -### parse_inner_list - -```erlang -parse_inner_list(<<$\s, R/bits>>, Acc) -> - parse_inner_list(R, Acc); -``` - -### parse_inner_list - -```erlang -parse_inner_list(<<$), $;, R0/bits>>, Acc) -> - {Params, R} = parse_before_param(R0, []), - {{list, lists:reverse(Acc), Params}, R}; -``` - -### parse_inner_list - -```erlang -parse_inner_list(<<$), R/bits>>, Acc) -> - {{list, lists:reverse(Acc), []}, R}; -``` - -### parse_inner_list - -```erlang -parse_inner_list(R0, Acc) -> - {Item, R = <>} = parse_item1(R0), - true = (C =:= $\s) orelse (C =:= $)), - parse_inner_list(R, [Item | Acc]). -``` - -### parse_before_param - -```erlang -parse_before_param(<<$\s, R/bits>>, Acc) -> - parse_before_param(R, Acc); -``` - -### parse_before_param - -```erlang -parse_before_param(<>, Acc) when ?IS_LC_ALPHA(C) or (C =:= $*) -> - parse_param(R, Acc, <>). -``` - -### parse_param - -```erlang -parse_param(<<$;, R/bits>>, Acc, K) -> - parse_before_param(R, lists:keystore(K, 1, Acc, {K, true})); -``` - -### parse_param - -```erlang -parse_param(<<$=, R0/bits>>, Acc, K) -> - case parse_bare_item(R0) of - {Item, <<$;, R/bits>>} -> - parse_before_param(R, lists:keystore(K, 1, Acc, {K, Item})); - {Item, R} -> - {lists:keystore(K, 1, Acc, {K, Item}), R} - end; -``` - -### parse_param - -```erlang -parse_param(<>, Acc, K) when - ?IS_LC_ALPHA(C) or ?IS_DIGIT(C) or - (C =:= $_) or (C =:= $-) or (C =:= $.) or (C =:= $*) -> - parse_param(R, Acc, <>); -``` - -### parse_param - -```erlang -parse_param(R, Acc, K) -> - {lists:keystore(K, 1, Acc, {K, true}), R}. -``` - -### parse_bare_item - -Parse an integer or decimal. - -```erlang -parse_bare_item(<<$-, R/bits>>) -> parse_number(R, 0, <<$->>); -``` - -### parse_bare_item - -Parse an integer or decimal. - -```erlang -parse_bare_item(<>) when ?IS_DIGIT(C) -> parse_number(R, 1, <>); -``` - -### parse_bare_item - -Parse an integer or decimal. - -```erlang -parse_bare_item(<<$", R/bits>>) -> - % Parse a string. -``` - -### parse_bare_item - -```erlang -parse_bare_item(<>) when ?IS_ALPHA(C) or (C =:= $*) -> - % Parse a token. -``` - -### parse_bare_item - -```erlang -parse_bare_item(<<$:, R/bits>>) -> - % Parse a byte sequence. -``` - -### parse_bare_item - -```erlang -parse_bare_item(<<"?0", R/bits>>) -> - % Parse a boolean false. -``` - -### parse_bare_item - -```erlang -parse_bare_item(<<"?1", R/bits>>) -> - % Parse a boolean true. -``` - -### parse_number - -Parse an integer or decimal binary. - -```erlang -parse_number(<>, L, Acc) when ?IS_DIGIT(C) -> - parse_number(R, L + 1, <>); -``` - -### parse_number - -Parse an integer or decimal binary. - -```erlang -parse_number(<<$., R/bits>>, L, Acc) -> - parse_decimal(R, L, 0, Acc, <<>>); -``` - -### parse_number - -Parse an integer or decimal binary. - -```erlang -parse_number(R, L, Acc) when L =< 15 -> - {binary_to_integer(Acc), R}. -``` - -### parse_decimal - -Parse a decimal binary. - -```erlang -parse_decimal(<>, L1, L2, IntAcc, FracAcc) when ?IS_DIGIT(C) -> - parse_decimal(R, L1, L2 + 1, IntAcc, <>); -``` - -### parse_decimal - -Parse a decimal binary. - -```erlang -parse_decimal(R, L1, L2, IntAcc, FracAcc0) when L1 =< 12, L2 >= 1, L2 =< 3 -> - %% While not strictly required this gives a more consistent representation. -``` - -### parse_string - -Parse a string binary. - -```erlang -parse_string(<<$\\, $", R/bits>>, Acc) -> - parse_string(R, <>); -``` - -### parse_string - -Parse a string binary. - -```erlang -parse_string(<<$\\, $\\, R/bits>>, Acc) -> - parse_string(R, <>); -``` - -### parse_string - -Parse a string binary. - -```erlang -parse_string(<<$", R/bits>>, Acc) -> - {{string, Acc}, R}; -``` - -### parse_string - -Parse a string binary. - -```erlang -parse_string(<>, Acc) when - C >= 16#20, C =< 16#21; - C >= 16#23, C =< 16#5b; - C >= 16#5d, C =< 16#7e -> - parse_string(R, <>). -``` - -### parse_token - -Parse a token binary. - -```erlang -parse_token(<>, Acc) when ?IS_TOKEN(C) or (C =:= $:) or (C =:= $/) -> - parse_token(R, <>); -``` - -### parse_token - -Parse a token binary. - -```erlang -parse_token(R, Acc) -> - {{token, Acc}, R}. -``` - -### parse_binary - -Parse a byte sequence binary. - -```erlang -parse_binary(Bin) when is_binary(Bin) -> - parse_binary(Bin, <<>>). -``` - -### parse_binary - -```erlang -parse_binary(<<$:, R/bits>>, Acc) -> - {{binary, base64:decode(Acc)}, R}; -``` - -### parse_binary - -```erlang -parse_binary(<>, Acc) when ?IS_ALPHANUM(C) or (C =:= $+) or (C =:= $/) or (C =:= $=) -> - parse_binary(R, <>). -``` - -### parse_struct_hd_test_ - -```erlang -parse_struct_hd_test_() -> - Files = filelib:wildcard("deps/structured-header-tests/*.json"), - lists:flatten([ - begin - {ok, JSON} = file:read_file(File), - Tests = json:decode(JSON), - [ - {iolist_to_binary(io_lib:format("~s: ~s", [filename:basename(File), Name])), fun() -> - %% The implementation is strict. We fail whenever we can. -``` - -### expected_to_term - -```erlang -expected_to_term([Bare, []]) when - is_boolean(Bare); is_number(Bare); is_binary(Bare); is_map(Bare) -> - {item, e2tb(Bare), []}; -``` - -### expected_to_term - -```erlang -expected_to_term([Bare, Params = [[<<_/bits>>, _] | _]]) when - is_boolean(Bare); is_number(Bare); is_binary(Bare); is_map(Bare) -> - {item, e2tb(Bare), e2tp(Params)}; -%% Empty list or dictionary. -``` - -### expected_to_term - -```erlang -expected_to_term([]) -> - []; -%% Dictionary. -``` - -### expected_to_term - -```erlang -expected_to_term(Dict = [[<<_/bits>>, V] | _]) when V =/= [] -> - e2t(Dict); -%% Outer list. -``` - -### expected_to_term - -```erlang -expected_to_term(List) when is_list(List) -> - [ e2t(E) || E <- List ]. -``` - -### e2t - -```erlang -e2t(Dict = [[<<_/bits>>, _] | _]) -> - [{K, e2t(V)} || [K, V] <- Dict]; -%% Inner list. -``` - -### e2t - -```erlang -e2t([List, Params]) when is_list(List) -> - {list, [ e2t(E) || E <- List ], e2tp(Params)}; -%% Item. -``` - -### e2t - -```erlang -e2t([Bare, Params]) -> - {item, e2tb(Bare), e2tp(Params)}. -``` - -### e2tb - -```erlang -e2tb(#{<<"__type">> := <<"token">>, <<"value">> := V}) -> - {token, V}; -``` - -### e2tb - -```erlang -e2tb(#{<<"__type">> := <<"binary">>, <<"value">> := V}) -> - {binary, base32:decode(V)}; -``` - -### e2tb - -```erlang -e2tb(V) when is_binary(V) -> - {string, V}; -``` - -### e2tb - -```erlang -e2tb(V) when is_float(V) -> - %% There should be no rounding needed for the test cases. -``` - -### e2tb - -```erlang -e2tb(V) -> - V. -``` - -### e2tp - -```erlang -e2tp([]) -> - []; -``` - -### e2tp - -```erlang -e2tp(Params) -> - [{K, e2tb(V)} || [K, V] <- Params]. -``` - -### raw_to_binary - -```erlang -raw_to_binary(RawList) -> - trim_ws(iolist_to_binary(lists:join(<<", ">>, RawList))). -``` - -### trim_ws - -```erlang -trim_ws(<<$\s, R/bits>>) -> trim_ws(R); -``` - -### trim_ws - -```erlang -trim_ws(R) -> trim_ws_end(R, byte_size(R) - 1). -``` - -### trim_ws_end - -```erlang -trim_ws_end(_, -1) -> - <<>>; -``` - -### trim_ws_end - -```erlang -trim_ws_end(Value, N) -> - case binary:at(Value, N) of - $\s -> - trim_ws_end(Value, N - 1); - _ -> - S = N + 1, - <> = Value, - Value2 - end. -``` - -### dictionary - -```erlang --spec dictionary(#{binary() => sh_item() | sh_inner_list()} | sh_dictionary()) -> - iolist(). -``` - -```erlang -dictionary(Map) when is_map(Map) -> - dictionary(maps:to_list(Map)); -``` - -### dictionary - -```erlang --spec dictionary(#{binary() => sh_item() | sh_inner_list()} | sh_dictionary()) -> - iolist(). -``` - -```erlang -dictionary(KVList) when is_list(KVList) -> - lists:join( - <<", ">>, - [ - case Value of - true -> Key; - _ -> [Key, $=, item_or_inner_list(Value)] - end - || - {Key, Value} <- KVList - ] - ). -``` - -### item_or_inner_list - -```erlang --spec list(sh_list()) -> iolist(). -list(List) -> - lists:join(<<", ">>, [item_or_inner_list(Value) || Value <- List]). -``` - -```erlang -item_or_inner_list(Value = {list, _, _}) -> - inner_list(Value); -``` - -### item_or_inner_list - -```erlang --spec list(sh_list()) -> iolist(). -list(List) -> - lists:join(<<", ">>, [item_or_inner_list(Value) || Value <- List]). -``` - -```erlang -item_or_inner_list(Value) -> - item(Value). -``` - -### inner_list - -```erlang -inner_list({list, List, Params}) -> - [$(, lists:join($\s, [item(Value) || Value <- List]), $), params(Params)]. -``` - -### bare_item - -```erlang -bare_item({string, String}) -> - [$", escape_string(String, <<>>), $"]; -%% @todo Must fail if Token has invalid characters. -``` - -### bare_item - -```erlang -bare_item({token, Token}) -> - Token; -``` - -### bare_item - -```erlang -bare_item({binary, Binary}) -> - [$:, base64:encode(Binary), $:]; -``` - -### bare_item - -```erlang -bare_item({decimal, {Base, Exp}}) when Exp >= 0 -> - Mul = - case Exp of - 0 -> 1; - 1 -> 10; - 2 -> 100; - 3 -> 1000; - 4 -> 10000; - 5 -> 100000; - 6 -> 1000000; - 7 -> 10000000; - 8 -> 100000000; - 9 -> 1000000000; - 10 -> 10000000000; - 11 -> 100000000000; - 12 -> 1000000000000 - end, - MaxLenWithSign = - if - Base < 0 -> 13; - true -> 12 - end, - Bin = integer_to_binary(Base * Mul), - true = byte_size(Bin) =< MaxLenWithSign, - [Bin, <<".0">>]; -``` - -### bare_item - -```erlang -bare_item({decimal, {Base, -1}}) -> - Int = Base div 10, - Frac = abs(Base) rem 10, - [integer_to_binary(Int), $., integer_to_binary(Frac)]; -``` - -### bare_item - -```erlang -bare_item({decimal, {Base, -2}}) -> - Int = Base div 100, - Frac = abs(Base) rem 100, - [integer_to_binary(Int), $., integer_to_binary(Frac)]; -``` - -### bare_item - -```erlang -bare_item({decimal, {Base, -3}}) -> - Int = Base div 1000, - Frac = abs(Base) rem 1000, - [integer_to_binary(Int), $., integer_to_binary(Frac)]; -``` - -### bare_item - -```erlang -bare_item({decimal, {Base, Exp}}) -> - Div = exp_div(Exp), - Int0 = Base div Div, - true = abs(Int0) < 1000000000000, - Frac0 = abs(Base) rem Div, - DivFrac = Div div 1000, - Frac1 = Frac0 div DivFrac, - {Int, Frac} = - if - (Frac0 rem DivFrac) > (DivFrac div 2) -> - case Frac1 of - 999 when Int0 < 0 -> {Int0 - 1, 0}; - 999 -> {Int0 + 1, 0}; - _ -> {Int0, Frac1 + 1} - end; - true -> - {Int0, Frac1} - end, - [ - integer_to_binary(Int), - $., - if - Frac < 10 -> [$0, $0, integer_to_binary(Frac)]; - Frac < 100 -> [$0, integer_to_binary(Frac)]; - true -> integer_to_binary(Frac) - end - ]; -``` - -### bare_item - -```erlang -bare_item(Integer) when is_integer(Integer) -> - integer_to_binary(Integer); -``` - -### bare_item - -```erlang -bare_item(true) -> - <<"?1">>; -``` - -### bare_item - -```erlang -bare_item(false) -> - <<"?0">>. -``` - -### exp_div - -```erlang -exp_div(0) -> 1; -``` - -### exp_div - -```erlang -exp_div(N) -> 10 * exp_div(N + 1). -``` - -### escape_string - -```erlang -escape_string(<<>>, Acc) -> Acc; -``` - -### escape_string - -```erlang -escape_string(<<$\\, R/bits>>, Acc) -> escape_string(R, <>); -``` - -### escape_string - -```erlang -escape_string(<<$", R/bits>>, Acc) -> escape_string(R, <>); -``` - -### escape_string - -```erlang -escape_string(<>, Acc) -> escape_string(R, <>). -``` - -### params - -```erlang -params(Params) -> - [ - case Param of - {Key, true} -> [$;, Key]; - {Key, Value} -> [$;, Key, $=, bare_item(Value)] - end - || - Param <- Params - ]. -``` - -### to_dictionary_test - -```erlang -to_dictionary_test() -> - {ok, SfDictionary} = to_dictionary(#{ - foo => bar, - <<"fizz">> => <<"buzz">>, - <<"item-with">> => { item, <<"params">>, [{first, param}, {another, true}] }, - <<"int-item">> => 1, - <<"int-item-with-params">> => { item, 1, [{int, <<"param">>}] }, - <<"no">> => <<"params">>, - <<"empty">> => {item, params, []}, - inner => [<<"a">>, b, true, 3], - inner_with_params => {list, [{item, 1, []}, 2], [{first, param}]}, - inner_inner_params => [{item, 1, [{heres, <<"one">>}]}, 2] - }), - ?assertEqual( - {<<"foo">>, {item, {token,<<"bar">>}, []}}, - lists:keyfind(<<"foo">>, 1, SfDictionary) - ), - ?assertEqual( - {<<"fizz">>, {item, {string,<<"buzz">>}, []}}, - lists:keyfind(<<"fizz">>, 1, SfDictionary) - ), - ?assertEqual( - {<<"item-with">>, - {item, - {string,<<"params">>}, - [{<<"first">>, {token,<<"param">>}}, {<<"another">>, true}] - } - }, - lists:keyfind(<<"item-with">>, 1, SfDictionary) - ), - ?assertEqual( - {<<"int-item">>, {item, 1, []}}, - lists:keyfind(<<"int-item">>, 1, SfDictionary) - ), - ?assertEqual( - {<<"int-item-with-params">>, {item, 1, [{<<"int">>, {string, <<"param">>}}]}}, - lists:keyfind(<<"int-item-with-params">>, 1, SfDictionary) - ), - ?assertEqual( - {<<"no">>, {item, {string, <<"params">>}, []}}, - lists:keyfind(<<"no">>, 1, SfDictionary) - ), - ?assertEqual( - {<<"empty">>, {item, {token, <<"params">>}, []}}, - lists:keyfind(<<"empty">>, 1, SfDictionary) - ), - ?assertEqual( - { - <<"inner">>, - { - list, - [ - {item, {string, <<"a">>}, []}, - {item, {token, <<"b">>}, []}, - {item, true, []}, - {item, 3, []} - ], - [] - } - }, - lists:keyfind(<<"inner">>, 1, SfDictionary) - ), - ?assertEqual( - {<<"inner_with_params">>, - {list, - [{item, 1, []}, {item, 2, []}], - [{<<"first">>, {token, <<"param">>}}] - } - }, - lists:keyfind(<<"inner_with_params">>, 1, SfDictionary) - ), - ?assertEqual( - {<<"inner_inner_params">>, - {list, - [{item, 1, [{<<"heres">>, {string, <<"one">>}}]}, {item, 2, []}], - [] - } - }, - lists:keyfind(<<"inner_inner_params">>, 1, SfDictionary) - ), - dictionary(SfDictionary). -``` - -### to_dictionary_depth_test - -```erlang -to_dictionary_depth_test() -> - {too_deep, _} = to_dictionary(#{ - foo => #{ bar => buzz } - }), - {too_deep, _} = to_dictionary(#{ - foo => [1, 2, [3]] - }), - ok. -``` - -### to_item_test - -```erlang -to_item_test() -> - ?assertEqual(to_item(1), {ok, {item, 1, []}}), - ?assertEqual(to_item(true), {ok, {item, true, []}}), - ?assertEqual(to_item(<<"foobar">>), {ok, {item, {string, <<"foobar">>}, []}}), - ?assertEqual(to_item("foobar"), {ok, {item, {string, <<"foobar">>}, []}}), - ?assertEqual(to_item(foobar), {ok, {item, {token, <<"foobar">>}, []}}), - ?assertEqual( - to_item({item, "foobar", [{first, param}]}), - {ok, {item, {string, <<"foobar">>}, [{<<"first">>, {token, <<"param">>}}]}} - ), - ok. -``` - -### to_list_test - -```erlang -to_list_test() -> - ?assertEqual( - to_list( - [1, 2, <<"three">>, [4, <<"five">>], - {list, [6, <<"seven">>], - [{<<"first">>, {token, <<"param">>}}] - } - ] - ), - {ok, [ - {item, 1, []}, - {item, 2, []}, - {item, {string, <<"three">>}, []}, - {list, [{ item, 4, []}, {item, {string, <<"five">>}, []}], []}, - {list, - [{ item, 6, []}, {item, {string, <<"seven">>}, []}], - [{<<"first">>, {token, <<"param">>}}] - } - ]} - ), - ok. -``` - -### to_list_depth_test - -```erlang -to_list_depth_test() -> - {too_deep, _} = to_list([1,2,3, [4, [5]]]), - {too_deep, _} = to_list([1,2,3, #{ foo => bar } ]), - {too_deep, _} = to_list([1,2,3, [#{ foo => bar }] ]), - ok. -``` - ---- - -*Generated from [hb_structured_fields.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_structured_fields.erl)* diff --git a/docs/book/src/hb_sup.erl.md b/docs/book/src/hb_sup.erl.md deleted file mode 100644 index 5d115e181..000000000 --- a/docs/book/src/hb_sup.erl.md +++ /dev/null @@ -1,89 +0,0 @@ -# hb_sup - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_sup.erl) - -## Exported Functions - -- `init/1` -- `start_link/0` -- `start_link/1` - ---- - -### start_link - -```erlang -start_link() -> - start_link(#{}). -``` - -### start_link - -```erlang -start_link(Opts) -> - supervisor:start_link({local, ?SERVER}, ?MODULE, Opts). -``` - -### init - -```erlang -init(Opts) -> - SupFlags = #{strategy => one_for_all, - intensity => 0, - period => 1}, - StoreChildren = store_children(hb_opts:get(store, [], Opts)), - GunChild = - #{ - id => hb_http_client, - start => {hb_http_client, start_link, [Opts]}, - restart => permanent, - shutdown => 5000, - type => worker, - modules => [hb_http_client] - }, - {ok, {SupFlags, [GunChild | StoreChildren]}}. -``` - -### store_children - -Generate a child spec for stores in the given Opts. - -```erlang -store_children(Store) when not is_list(Store) -> - store_children([Store]); -``` - -### store_children - -Generate a child spec for stores in the given Opts. - -```erlang -store_children([]) -> []; -``` - -### store_children - -Generate a child spec for stores in the given Opts. - -```erlang -store_children([RocksDBOpts = #{ <<"store-module">> := hb_store_rocksdb } | Rest]) -> - [ - #{ - id => hb_store_rocksdb, - start => {hb_store_rocksdb, start_link, [RocksDBOpts]} - } - ] ++ store_children(Rest); -``` - -### store_children - -Generate a child spec for stores in the given Opts. - -```erlang -store_children([_ | Rest]) -> - store_children(Rest). -``` - ---- - -*Generated from [hb_sup.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_sup.erl)* diff --git a/docs/book/src/hb_test_utils.erl.md b/docs/book/src/hb_test_utils.erl.md deleted file mode 100644 index f5a86ce3c..000000000 --- a/docs/book/src/hb_test_utils.erl.md +++ /dev/null @@ -1,351 +0,0 @@ -# hb_test_utils - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_test_utils.erl) - -Simple utilities for testing HyperBEAM. Includes functions for -generating isolated (fresh) test stores, running suites of tests with -differing options, as well as executing and reporting benchmarks. - ---- - -## Exported Functions - -- `assert_throws/4` -- `benchmark_iterations/2` -- `benchmark_print/2` -- `benchmark_print/3` -- `benchmark_print/4` -- `benchmark/1` -- `benchmark/2` -- `benchmark/3` -- `compare_events/3` -- `compare_events/4` -- `compare_events/5` -- `run/4` -- `suite_with_opts/2` -- `test_store/0` -- `test_store/1` -- `test_store/2` - ---- - -### test_store - -Simple utilities for testing HyperBEAM. Includes functions for -Generate a new, unique test store as an isolated context for an execution. - -```erlang -test_store() -> - test_store(maps:get(<<"store-module">>, hd(hb_opts:get(store)))). -``` - -### test_store - -Simple utilities for testing HyperBEAM. Includes functions for -Generate a new, unique test store as an isolated context for an execution. - -```erlang -test_store(Mod) -> - test_store(Mod, <<"default">>). -``` - -### test_store - -Simple utilities for testing HyperBEAM. Includes functions for -Generate a new, unique test store as an isolated context for an execution. - -```erlang -test_store(Mod, Tag) -> - TestDir = - << - "cache-TEST/run-", - Tag/binary, "-", - (integer_to_binary(erlang:system_time(millisecond)))/binary - >>, - % Wait a tiny interval to ensure that any further tests will get their own - % directory. -``` - -### suite_with_opts - -Run each test in a suite with each set of options. Start and reset - -```erlang -suite_with_opts(Suite, OptsList) -> - lists:filtermap( - fun(OptSpec = #{ name := _Name, opts := Opts, desc := ODesc}) -> - Store = hb_opts:get(store, hb_opts:get(store), Opts), - Skip = hb_maps:get(skip, OptSpec, [], Opts), - case satisfies_requirements(OptSpec) of - true -> - {true, {foreach, - fun() -> - ?event({starting, Store}), - % Create and set a random server ID for the test - % process. -``` - -### satisfies_requirements - -Determine if the environment satisfies the given test requirements. - -```erlang -satisfies_requirements(Requirements) when is_map(Requirements) -> - satisfies_requirements(hb_maps:get(requires, Requirements, [])); -``` - -### satisfies_requirements - -Determine if the environment satisfies the given test requirements. - -```erlang -satisfies_requirements(Requirements) -> - lists:all( - fun(Req) -> - case hb_features:enabled(Req) of - true -> true; - false -> - case code:is_loaded(Req) of - false -> false; - {file, _} -> - case erlang:function_exported(Req, enabled, 0) of - true -> Req:enabled(); - false -> true - end - end - end - end, - Requirements - ). -``` - -### opts_from_list - -Find the options from a list of options by name. - -```erlang -opts_from_list(OptsName, OptsList) -> - hd([ O || #{ name := OName, opts := O } <- OptsList, OName == OptsName ]). -``` - -### run - -```erlang -run(Name, OptsName, Suite, OptsList) -> - {_, _, Test} = lists:keyfind(Name, 1, Suite), - Test(opts_from_list(OptsName, OptsList)). -``` - -### compare_events - -Compares the events generated by executing a test/function with two - -```erlang -compare_events(Fun, Opts1, Opts2) -> - hb_store:reset(hb_opts:get(store, hb_opts:get(store), Opts1)), - hb_store:write( - hb_opts:get(store, hb_opts:get(store), Opts1), - <<"test">>, - <<"test">> - ), - {EventsSample1, _Res2} = hb_event:diff( - fun() -> - Fun(Opts1) - end - ), - hb_store:reset(hb_opts:get(store, hb_opts:get(store), Opts1)), - hb_store:reset(hb_opts:get(store, hb_opts:get(store), Opts2)), - {EventsSample2, _Res} = hb_event:diff( - fun() -> - Fun(Opts2) - end - ), - hb_store:reset(hb_opts:get(store, hb_opts:get(store), Opts2)), - EventsDiff = hb_message:diff(EventsSample1, EventsSample2, #{}), - ?event( - debug_perf, - {events, - {sample1, EventsSample1}, - {sample2, EventsSample2}, - {events_diff, EventsDiff} - } - ), - EventsDiff. -``` - -### compare_events - -```erlang -compare_events(Fun, OptsName1, OptsName2, OptsList) -> - compare_events( - Fun, - opts_from_list(OptsName1, OptsList), - opts_from_list(OptsName2, OptsList) - ). -``` - -### compare_events - -```erlang -compare_events(Name, OptsName1, OptsName2, Suite, OptsList) -> - {_, _, Test} = lists:keyfind(Name, 1, Suite), - compare_events( - Test, - opts_from_list(OptsName1, OptsList), - opts_from_list(OptsName2, OptsList) - ). -``` - -### assert_throws - -Assert that a function throws an expected exception. Needed to work around some - -```erlang -assert_throws(Fun, Args, ExpectedException, Label) -> - Error = try - apply(Fun, Args), - failed_to_throw - catch - error:ExpectedException -> expected_exception; - ExpectedException -> expected_exception; - error:Other -> {wrong_exception, Other}; - Other -> {wrong_exception, Other} - end, - ?assertEqual(expected_exception, Error, Label). -``` - -### benchmark - -Run a function as many times as possible in a given amount of time. - -```erlang -benchmark(Fun) -> - benchmark(Fun, ?DEFAULT_BENCHMARK_TIME). -``` - -### benchmark - -```erlang -benchmark(Fun, TLen) -> - T0 = erlang:system_time(millisecond), - hb_util:until( - fun() -> erlang:system_time(millisecond) - T0 > (TLen * 1000) end, - Fun, - 0 - ). -``` - -### benchmark_iterations - -Return the amount of time required to execute N iterations of a function - -```erlang -benchmark_iterations(Fun, N) -> - {Time, _} = timer:tc( - fun() -> - lists:foreach( - fun(I) -> Fun(I) end, - lists:seq(1, N) - ) - end - ), - Time / 1_000_000. -``` - -### benchmark - -Run multiple instances of a function in parallel for a given amount of time. - -```erlang -benchmark(Fun, TLen, Procs) -> - Parent = self(), - receive _ -> worker_synchronized end, - StartWorker = - fun(_) -> - Ref = make_ref(), - spawn_link(fun() -> - Count = benchmark(Fun, TLen), - Parent ! {work_complete, Ref, Count} - end), - Ref - end, - CollectRes = - fun(R) -> - receive - {work_complete, R, Count} -> - %?event(benchmark, {work_complete, R, Count}), - Count - end - end, - Refs = lists:map(StartWorker, lists:seq(1, Procs)), - lists:sum(lists:map(CollectRes, Refs)). -``` - -### benchmark_print - -Print benchmark results in a human-readable format that EUnit writes to - -```erlang -benchmark_print(Verb, Iterations) -> - benchmark_print(Verb, Iterations, ?DEFAULT_BENCHMARK_TIME). -``` - -### benchmark_print - -```erlang -benchmark_print(Verb, Iterations, Time) when is_integer(Iterations) -> - hb_format:eunit_print( - "~s ~s in ~s (~s/s)", - [ - Verb, - hb_util:human_int(Iterations), - format_time(Time), - hb_util:human_int(Iterations / Time) - ] - ); -``` - -### benchmark_print - -```erlang -benchmark_print(Verb, Noun, Iterations) -> - benchmark_print(Verb, Noun, Iterations, ?DEFAULT_BENCHMARK_TIME). -``` - -### benchmark_print - -```erlang -benchmark_print(Verb, Noun, Iterations, Time) -> - hb_format:eunit_print( - "~s ~s ~s in ~s (~s ~s/s)", - [ - Verb, - hb_util:human_int(Iterations), - Noun, - format_time(Time), - hb_util:human_int(Iterations / Time), - Noun - ] - ). -``` - -### format_time - -Format a time in human-readable format. Takes arguments in seconds. - -```erlang -format_time(Time) when is_integer(Time) -> - hb_util:human_int(Time) ++ "s"; -``` - -### format_time - -Format a time in human-readable format. Takes arguments in seconds. - -```erlang -format_time(Time) -> -``` - ---- - -*Generated from [hb_test_utils.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_test_utils.erl)* diff --git a/docs/book/src/hb_tracer.erl.md b/docs/book/src/hb_tracer.erl.md deleted file mode 100644 index 001567bb3..000000000 --- a/docs/book/src/hb_tracer.erl.md +++ /dev/null @@ -1,186 +0,0 @@ -# hb_tracer - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_tracer.erl) - -A module for tracing the flow of requests through the system. -This allows for tracking the lifecycle of a request from HTTP receipt through processing and response. - ---- - -## Exported Functions - -- `format_error_trace/1` -- `get_trace/1` -- `record_step/2` -- `start_trace/0` - ---- - -### start_trace - -A module for tracing the flow of requests through the system. -Start a new tracer acting as queue of events registered. - -```erlang -start_trace() -> - Trace = #{steps => queue:new()}, - TracePID = spawn(fun() -> trace_loop(Trace) end), - ?event(trace, {trace_started, TracePID}), - TracePID. -``` - -### trace_loop - -```erlang -trace_loop(Trace) -> - receive - {record_step, Step} -> - Steps = maps:get(steps, Trace), - NewTrace = Trace#{steps => queue:in(Step, Steps)}, - ?event(trace, {step_recorded, Step}), - trace_loop(NewTrace); - {get_trace, From} -> - % Convert queue to list for the response - TraceWithList = - Trace#{steps => - queue:to_list( - maps:get(steps, Trace))}, - From ! {trace, TraceWithList}, - trace_loop(Trace) - end. -``` - -### record_step - -Register a new step into a tracer - -```erlang -record_step(TracePID, Step) -> - TracePID ! {record_step, Step}. -``` - -### get_trace - -Exports the complete queue of events - -```erlang -get_trace(TracePID) -> - TracePID ! {get_trace, self()}, - receive - {trace, Trace} -> - Trace - after 5000 -> - ?event(trace, {trace_timeout, TracePID}), - {trace, #{}} - end. -``` - -### format_error_trace - -Format a trace for error in a user-friendly emoji oriented output - -```erlang -format_error_trace(Trace) -> - Steps = maps:get(steps, Trace, []), - TraceMap = - lists:foldl(fun(TraceItem, Acc) -> - case TraceItem of - {http, {parsed_singleton, _ReqSingleton, _}} -> - maps:put(request_parsing, true, Acc); - {ao_core, {stage, Stage, _Task}} -> - maps:put(resolve_stage, Stage, Acc); - {ao_result, - {load_device_failed, _, _, _, _, {exec_exception, Exception}, _, _}} -> - maps:put(error, Exception, Acc); - {ao_result, - {exec_failed, - _, - _, - _, - {func, Fun}, - _, - {exec_exception, Error}, - _, - _}} -> - maps:put(error, {Fun, Error}, Acc); - _ -> Acc - end - end, - #{}, - Steps), - % Build the trace message - TraceStrings = <<"Oops! Something went wrong. Here's the rundown:">>, - % Add parsing status - ParsingTrace = - case maps:get(request_parsing, TraceMap, false) of - false -> - Emoji = failure_emoji(), - <>; - true -> - Emoji = checkmark_emoji(), - <> - end, - % Add stage information - StageTrace = - case maps:get(resolve_stage, TraceMap, undefined) of - undefined -> - ParsingTrace; - Stage -> - StageEmoji = stage_to_emoji(Stage), - try << ParsingTrace/binary, "\n", StageEmoji/binary, - " Resolved steps of your execution" >> - catch - error:badarg -> - iolist_to_binary(io_lib:format("~p", [ParsingTrace])) - end - end, - % Add error information - case maps:get(error, TraceMap, undefined) of - undefined -> - StageTrace; - {Fun, Reason} -> - FailureEmoji = failure_emoji(), - ErrMsg = list_to_binary(io_lib:format("~p -> ~p", [Fun, Reason])), - <>; - Error -> - FailureEmoji = failure_emoji(), - <> - end. -``` - -### checkmark_emoji - -```erlang -checkmark_emoji() -> - % Unicode for checkmark - <<"\xE2\x9C\x85">>. % \xE2\x9C\x85 is the checkmark emoji in UTF-8 -``` - -### failure_emoji - -```erlang -failure_emoji() -> - % Unicode for failure emoji - <<"\xE2\x9D\x8C">>. % \xE2\x9D\x8C is the failure emoji in UTF-8 -% Helper function to convert stage number to emoji -``` - -### stage_to_emoji - -```erlang -stage_to_emoji(Stage) when Stage >= 1, Stage =< 9 -> - % Unicode for circled numbers 1-9 - StageEmoji = Stage + 48, - <>; -``` - -### stage_to_emoji - -```erlang -stage_to_emoji(_) -> - "". -``` - ---- - -*Generated from [hb_tracer.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_tracer.erl)* diff --git a/docs/book/src/hb_util.erl.md b/docs/book/src/hb_util.erl.md deleted file mode 100644 index 52245617c..000000000 --- a/docs/book/src/hb_util.erl.md +++ /dev/null @@ -1,1666 +0,0 @@ -# hb_util - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_util.erl) - -Simple type coercion functions, useful for quickly turning inputs from the -HTTP API into the correct types for the HyperBEAM runtime, if they are not -annotated by the user. - ---- - -## Exported Functions - -- `all_atoms/0` -- `all_hb_modules/0` -- `atom/1` -- `bin/1` -- `binary_is_atom/1` -- `binary_to_addresses/1` -- `ceil_int/2` -- `check_size/2` -- `check_type/2` -- `check_value/2` -- `count/2` -- `decode/1` -- `deep_get/3` -- `deep_get/4` -- `deep_merge/3` -- `deep_set/4` -- `encode/1` -- `find_target_path/2` -- `find_value/2` -- `find_value/3` -- `float/1` -- `floor_int/2` -- `hd/1` -- `hd/2` -- `hd/3` -- `human_id/1` -- `human_int/1` -- `id/1` -- `id/2` -- `int/1` -- `is_hb_module/1` -- `is_hb_module/2` -- `is_ordered_list/2` -- `is_string_list/1` -- `key_to_atom/1` -- `key_to_atom/2` -- `list_replace/3` -- `list_to_numbered_message/1` -- `list_with/2` -- `list_without/2` -- `list/1` -- `lower_case_key_map/2` -- `map/1` -- `maybe_throw/2` -- `mean/1` -- `message_to_ordered_list/1` -- `message_to_ordered_list/2` -- `native_id/1` -- `number/1` -- `numbered_keys_to_list/2` -- `ok_or_throw/3` -- `ok/1` -- `ok/2` -- `remove_common/2` -- `safe_decode/1` -- `safe_encode/1` -- `split_depth_string_aware_single/2` -- `split_depth_string_aware/2` -- `split_escaped_single/2` -- `stddev/1` -- `template_matches/3` -- `to_hex/1` -- `to_lower/1` -- `to_sorted_keys/1` -- `to_sorted_keys/2` -- `to_sorted_list/1` -- `to_sorted_list/2` -- `unique/1` -- `until/1` -- `until/2` -- `until/3` -- `variance/1` -- `weighted_random/1` - ---- - -### int - -A collection of utility functions for building with HyperBEAM. -Coerce a string to an integer. - -```erlang -int(Str) when is_binary(Str) -> - list_to_integer(binary_to_list(Str)); -``` - -### int - -A collection of utility functions for building with HyperBEAM. -Coerce a string to an integer. - -```erlang -int(Str) when is_list(Str) -> - list_to_integer(Str); -``` - -### int - -A collection of utility functions for building with HyperBEAM. -Coerce a string to an integer. - -```erlang -int(Int) when is_integer(Int) -> - Int. -``` - -### float - -Coerce a string to a float. - -```erlang -float(Str) when is_binary(Str) -> - list_to_float(binary_to_list(Str)); -``` - -### float - -Coerce a string to a float. - -```erlang -float(Str) when is_list(Str) -> - list_to_float(Str); -``` - -### float - -Coerce a string to a float. - -```erlang -float(Float) when is_float(Float) -> - Float; -``` - -### float - -Coerce a string to a float. - -```erlang -float(Int) when is_integer(Int) -> - Int / 1. -``` - -### atom - -Coerce a string to an atom. - -```erlang -atom(Str) when is_binary(Str) -> - list_to_existing_atom(binary_to_list(Str)); -``` - -### atom - -Coerce a string to an atom. - -```erlang -atom(Str) when is_list(Str) -> - list_to_existing_atom(Str); -``` - -### atom - -Coerce a string to an atom. - -```erlang -atom(Atom) when is_atom(Atom) -> - Atom. -``` - -### bin - -Coerce a value to a binary. - -```erlang -bin(Value) when is_atom(Value) -> - atom_to_binary(Value, utf8); -``` - -### bin - -Coerce a value to a binary. - -```erlang -bin(Value) when is_integer(Value) -> - integer_to_binary(Value); -``` - -### bin - -Coerce a value to a binary. - -```erlang -bin(Value) when is_float(Value) -> - float_to_binary(Value, [{decimals, 10}, compact]); -``` - -### bin - -Coerce a value to a binary. - -```erlang -bin(Value) when is_list(Value) -> - list_to_binary(Value); -``` - -### bin - -Coerce a value to a binary. - -```erlang -bin(Value) when is_binary(Value) -> - Value. -``` - -### list - -Coerce a value to a string list. - -```erlang -list(Value) when is_binary(Value) -> - binary_to_list(Value); -``` - -### list - -Coerce a value to a string list. - -```erlang -list(Value) when is_list(Value) -> Value; -``` - -### list - -Coerce a value to a string list. -Ensure that a value is a map. Only supports maps and lists of key-value - -```erlang -list(Value) when is_atom(Value) -> atom_to_list(Value). -``` - -### map - -Coerce a value to a string list. -Ensure that a value is a map. Only supports maps and lists of key-value - -```erlang -map(Value) when is_list(Value) -> - maps:from_list(Value); -``` - -### map - -Coerce a value to a string list. -Ensure that a value is a map. Only supports maps and lists of key-value - -```erlang -map(Value) when is_map(Value) -> - Value. -``` - -### ceil_int - -```erlang -ceil_int(IntValue, Nearest) -> - IntValue - (IntValue rem Nearest) + Nearest. -``` - -### floor_int - -```erlang -floor_int(IntValue, Nearest) -> - IntValue - (IntValue rem Nearest). -``` - -### ok - -Unwrap a tuple of the form `{ok, Value}`, or throw/return, depending on - -```erlang -ok(Value) -> ok(Value, #{}). -``` - -### ok - -Unwrap a tuple of the form `{ok, Value}`, or throw/return, depending on - -```erlang -ok({ok, Value}, _Opts) -> Value; -``` - -### ok - -Unwrap a tuple of the form `{ok, Value}`, or throw/return, depending on - -```erlang -ok(Other, Opts) -> - case hb_opts:get(error_strategy, throw, Opts) of - throw -> throw({unexpected, Other}); - _ -> {unexpected, Other} - end. -``` - -### until - -Utility function to wait for a condition to be true. Optionally, - -```erlang -until(Condition) -> - until(Condition, 0). -``` - -### until - -```erlang -until(Condition, Count) -> - until(Condition, fun() -> receive after 100 -> 1 end end, Count). -``` - -### until - -```erlang -until(Condition, Fun, Count) -> - case Condition() of - false -> - case apply(Fun, hb_ao:truncate_args(Fun, [Count])) of - {count, AddToCount} -> - until(Condition, Fun, Count + AddToCount); - _ -> - until(Condition, Fun, Count + 1) - end; - true -> Count - end. -``` - -### id - -Return the human-readable form of an ID of a message when given either - -```erlang -id(Item) -> id(Item, unsigned). -``` - -### id - -Return the human-readable form of an ID of a message when given either - -```erlang -id(TX, Type) when is_record(TX, tx) -> - encode(ar_bundles:id(TX, Type)); -``` - -### id - -Return the human-readable form of an ID of a message when given either - -```erlang -id(Map, Type) when is_map(Map) -> - hb_message:id(Map, Type); -``` - -### id - -Return the human-readable form of an ID of a message when given either - -```erlang -id(Bin, _) when is_binary(Bin) andalso byte_size(Bin) == 43 -> - Bin; -``` - -### id - -Return the human-readable form of an ID of a message when given either - -```erlang -id(Bin, _) when is_binary(Bin) andalso byte_size(Bin) == 32 -> - encode(Bin); -``` - -### id - -Return the human-readable form of an ID of a message when given either - -```erlang -id(Data, Type) when is_list(Data) -> - id(list_to_binary(Data), Type). -``` - -### to_lower - -Convert a binary to a lowercase. - -```erlang -to_lower(Str) -> - string:lowercase(Str). -``` - -### is_string_list - -Is the given term a string list? - -```erlang -is_string_list(MaybeString) -> - lists:all(fun is_integer/1, MaybeString). -``` - -### to_sorted_list - -Given a map or KVList, return a deterministically sorted list of its - -```erlang -to_sorted_list(Msg) -> - to_sorted_list(Msg, #{}). -``` - -### to_sorted_list - -```erlang -to_sorted_list(Msg, Opts) when is_map(Msg) -> - to_sorted_list(hb_maps:to_list(Msg, Opts), Opts); -``` - -### to_sorted_list - -```erlang -to_sorted_list(Msg = [{_Key, _} | _], _Opts) when is_list(Msg) -> - lists:sort(fun({Key1, _}, {Key2, _}) -> Key1 < Key2 end, Msg); -``` - -### to_sorted_list - -```erlang -to_sorted_list(Msg, _Opts) when is_list(Msg) -> - lists:sort(fun(Key1, Key2) -> Key1 < Key2 end, Msg). -``` - -### to_sorted_keys - -Given a map or KVList, return a deterministically ordered list of its keys. - -```erlang -to_sorted_keys(Msg) -> - to_sorted_keys(Msg, #{}). -``` - -### to_sorted_keys - -```erlang -to_sorted_keys(Msg, Opts) when is_map(Msg) -> - to_sorted_keys(hb_maps:keys(Msg, Opts), Opts); -``` - -### to_sorted_keys - -```erlang -to_sorted_keys(Msg, _Opts) when is_list(Msg) -> - lists:sort(fun(Key1, Key2) -> Key1 < Key2 end, Msg). -``` - -### key_to_atom - -Convert keys in a map to atoms, lowering `-` to `_`. - -```erlang -key_to_atom(Key) -> key_to_atom(Key, existing). -``` - -### key_to_atom - -Convert keys in a map to atoms, lowering `-` to `_`. - -```erlang -key_to_atom(Key, _Mode) when is_atom(Key) -> Key; -``` - -### key_to_atom - -Convert keys in a map to atoms, lowering `-` to `_`. - -```erlang -key_to_atom(Key, Mode) -> - WithoutDashes = to_lower(binary:replace(Key, <<"-">>, <<"_">>, [global])), - case Mode of - new_atoms -> binary_to_atom(WithoutDashes, utf8); - _ -> binary_to_existing_atom(WithoutDashes, utf8) - end. -``` - -### native_id - -Convert a human readable ID to a native binary ID. If the ID is already - -```erlang -native_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 43 -> - decode(Bin); -``` - -### native_id - -Convert a human readable ID to a native binary ID. If the ID is already - -```erlang -native_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 32 -> - Bin; -``` - -### native_id - -Convert a human readable ID to a native binary ID. If the ID is already - -```erlang -native_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 42 -> - Bin; -``` - -### native_id - -Convert a human readable ID to a native binary ID. If the ID is already - -```erlang -native_id(Wallet = {_Priv, _Pub}) -> - native_id(ar_wallet:to_address(Wallet)). -``` - -### human_id - -Convert a native binary ID to a human readable ID. If the ID is already - -```erlang -human_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 32 -> - encode(Bin); -``` - -### human_id - -Convert a native binary ID to a human readable ID. If the ID is already - -```erlang -human_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 43 -> - Bin; -``` - -### human_id - -Convert a native binary ID to a human readable ID. If the ID is already - -```erlang -human_id(Bin) when is_binary(Bin) andalso byte_size(Bin) == 42 -> - Bin; -``` - -### human_id - -Convert a native binary ID to a human readable ID. If the ID is already - -```erlang -human_id(Wallet = {_Priv, _Pub}) -> - human_id(ar_wallet:to_address(Wallet)). -``` - -### human_int - -Add `,` characters to a number every 3 digits to make it human readable. - -```erlang -human_int(Float) when is_float(Float) -> - human_int(erlang:round(Float)); -``` - -### human_int - -Add `,` characters to a number every 3 digits to make it human readable. - -```erlang -human_int(Int) -> - lists:reverse(add_commas(lists:reverse(integer_to_list(Int)))). -``` - -### add_commas - -```erlang -add_commas([A,B,C,Z|Rest]) -> [A,B,C,$,|add_commas([Z|Rest])]; -``` - -### add_commas - -Encode a binary to URL safe base64 binary string. - -```erlang -add_commas(List) -> List. -``` - -### encode - -Encode a binary to URL safe base64 binary string. - -```erlang -encode(Bin) -> - b64fast:encode(Bin). -``` - -### decode - -Try to decode a URL safe base64 into a binary or throw an error when - -```erlang -decode(Input) -> - b64fast:decode(Input). -``` - -### safe_encode - -Safely encode a binary to URL safe base64. - -```erlang -safe_encode(Bin) when is_binary(Bin) -> - encode(Bin); -``` - -### safe_encode - -Safely encode a binary to URL safe base64. - -```erlang -safe_encode(Bin) -> - Bin. -``` - -### safe_decode - -Safely decode a URL safe base64 into a binary returning an ok or error - -```erlang -safe_decode(E) -> - try - D = decode(E), - {ok, D} - catch - _:_ -> {error, invalid} - end. -``` - -### to_hex - -Convert a binary to a hex string. Do not use this for anything other than - -```erlang -to_hex(Bin) when is_binary(Bin) -> - to_lower( - iolist_to_binary( - [io_lib:format("~2.16.0B", [X]) || X <- binary_to_list(Bin)] - ) - ). -``` - -### deep_merge - -Deep merge two maps, recursively merging nested maps. - -```erlang -deep_merge(Map1, Map2, Opts) when is_map(Map1), is_map(Map2) -> - hb_maps:fold( - fun(Key, Value2, AccMap) -> - case deep_get(Key, AccMap, Opts) of - Value1 when is_map(Value1), is_map(Value2) -> - % Both values are maps, recursively merge them - deep_set(Key, deep_merge(Value1, Value2, Opts), AccMap, Opts); - _ -> - % Either the key doesn't exist in Map1 or at least one of - % the values isn't a map. Simply use the value from Map2 - deep_set(Key, Value2, AccMap, Opts) - end - end, - Map1, - Map2, - Opts - ). -``` - -### deep_set - -Set a deep value in a message by its path, _assuming all messages are - -```erlang -deep_set(_Path, undefined, Msg, _Opts) -> Msg; -``` - -### deep_set - -Set a deep value in a message by its path, _assuming all messages are - -```erlang -deep_set(Path, Value, Msg, Opts) when not is_list(Path) -> - deep_set(hb_path:term_to_path_parts(Path, Opts), Value, Msg, Opts); -``` - -### deep_set - -Set a deep value in a message by its path, _assuming all messages are - -```erlang -deep_set([Key], unset, Msg, Opts) -> - hb_maps:remove(Key, Msg, Opts); -``` - -### deep_set - -Set a deep value in a message by its path, _assuming all messages are - -```erlang -deep_set([Key], Value, Msg, Opts) -> - case hb_maps:get(Key, Msg, not_found, Opts) of - ExistingMap when is_map(ExistingMap) andalso is_map(Value) -> - % If both are maps, merge them - Msg#{ Key => hb_maps:merge(ExistingMap, Value, Opts) }; - _ -> - Msg#{ Key => Value } - end; -``` - -### deep_set - -Set a deep value in a message by its path, _assuming all messages are - -```erlang -deep_set([Key|Rest], Value, Map, Opts) -> - SubMap = hb_maps:get(Key, Map, #{}, Opts), - hb_maps:put(Key, deep_set(Rest, Value, SubMap, Opts), Map, Opts). -``` - -### deep_get - -Get a deep value from a message. - -```erlang -deep_get(Path, Msg, Opts) -> deep_get(Path, Msg, not_found, Opts). -``` - -### deep_get - -Get a deep value from a message. - -```erlang -deep_get(Path, Msg, Default, Opts) when not is_list(Path) -> - deep_get(hb_path:term_to_path_parts(Path, Opts), Msg, Default, Opts); -``` - -### deep_get - -Get a deep value from a message. - -```erlang -deep_get([Key], Msg, Default, Opts) -> - case hb_maps:find(Key, Msg, Opts) of - {ok, Value} -> Value; - error -> Default - end; -``` - -### deep_get - -Get a deep value from a message. - -```erlang -deep_get([Key|Rest], Msg, Default, Opts) -> - case hb_maps:find(Key, Msg, Opts) of - {ok, DeepMsg} when is_map(DeepMsg) -> - deep_get(Rest, DeepMsg, Default, Opts); - error -> Default - end. -``` - -### find_target_path - -Find the target path to route for a request message. - -```erlang -find_target_path(Msg, Opts) -> - case hb_ao:get(<<"route-path">>, Msg, not_found, Opts) of - not_found -> - ?event({find_target_path, {msg, Msg}, not_found}), - hb_ao:get(<<"path">>, Msg, no_path, Opts); - RoutePath -> RoutePath - end. -``` - -### template_matches - -Check if a message matches a given template. - -```erlang -template_matches(ToMatch, Template, _Opts) when is_map(Template) -> - case hb_message:match(Template, ToMatch, primary) of - {value_mismatch, _Key, _Val1, _Val2} -> false; - Match -> Match - end; -``` - -### template_matches - -Check if a message matches a given template. - -```erlang -template_matches(ToMatch, Regex, Opts) when is_binary(Regex) -> - MsgPath = find_target_path(ToMatch, Opts), - hb_path:regex_matches(MsgPath, Regex). -``` - -### number - -Label a list of elements with a number. - -```erlang -number(List) -> - lists:map( - fun({N, Item}) -> {integer_to_binary(N), Item} end, - lists:zip(lists:seq(1, length(List)), List) - ). -``` - -### list_to_numbered_message - -Convert a list of elements to a map with numbered keys. - -```erlang -list_to_numbered_message(Msg) when is_map(Msg) -> - case is_ordered_list(Msg, #{}) of - true -> Msg; - false -> - throw({cannot_convert_to_numbered_message, Msg}) - end; -``` - -### list_to_numbered_message - -Convert a list of elements to a map with numbered keys. - -```erlang -list_to_numbered_message(List) -> - hb_maps:from_list(number(List)). -``` - -### is_ordered_list - -Determine if the message given is an ordered list, starting from 1. - -```erlang -is_ordered_list(Msg, _Opts) when is_list(Msg) -> true; -``` - -### is_ordered_list - -Determine if the message given is an ordered list, starting from 1. - -```erlang -is_ordered_list(Msg, Opts) -> - is_ordered_list(1, hb_ao:normalize_keys(Msg, Opts), Opts). -``` - -### is_ordered_list - -```erlang -is_ordered_list(_, Msg, _Opts) when map_size(Msg) == 0 -> true; -``` - -### is_ordered_list - -```erlang -is_ordered_list(N, Msg, _Opts) -> - case maps:get(NormKey = hb_ao:normalize_key(N), Msg, not_found) of - not_found -> false; - _ -> - is_ordered_list( - N + 1, - maps:without([NormKey], Msg), - _Opts - ) - end. -``` - -### list_replace - -Replace a key in a list with a new value. - -```erlang -list_replace(List, Key, Value) -> - lists:foldr( - fun(Elem, Acc) -> - case Elem of - Key when is_list(Value) -> Value ++ Acc; - Key -> [Value | Acc]; - _ -> [Elem | Acc] - end - end, - [], - List - ). -``` - -### unique - -Take a list and return a list of unique elements. The function is - -```erlang -unique(List) -> - Unique = - lists:foldl( - fun(Item, Acc) -> - case lists:member(Item, Acc) of - true -> Acc; - false -> [Item | Acc] - end - end, - [], - List - ), - lists:reverse(Unique). -``` - -### list_with - -Returns the intersection of two lists, with stable ordering. - -```erlang -list_with(List1, List2) -> - lists:filter(fun(Item) -> lists:member(Item, List2) end, List1). -``` - -### list_without - -Remove all occurrences of all items in the first list from the second list. - -```erlang -list_without(List1, List2) -> - lists:filter(fun(Item) -> not lists:member(Item, List1) end, List2). -``` - -### message_to_ordered_list - -Take a message with numbered keys and convert it to a list of tuples - -```erlang -message_to_ordered_list(Message) -> - message_to_ordered_list(Message, #{}). -``` - -### message_to_ordered_list - -```erlang -message_to_ordered_list(Message, _Opts) when ?IS_EMPTY_MESSAGE(Message) -> - []; -``` - -### message_to_ordered_list - -```erlang -message_to_ordered_list(List, _Opts) when is_list(List) -> - List; -``` - -### message_to_ordered_list - -```erlang -message_to_ordered_list(Message, Opts) -> - NormMessage = hb_ao:normalize_keys(Message, Opts), - Keys = hb_maps:keys(NormMessage, Opts) -- [<<"priv">>, <<"commitments">>], - SortedKeys = - lists:map( - fun hb_ao:normalize_key/1, - lists:sort(lists:map(fun int/1, Keys)) - ), - message_to_ordered_list(NormMessage, SortedKeys, erlang:hd(SortedKeys), Opts). -``` - -### message_to_ordered_list - -```erlang -message_to_ordered_list(_Message, [], _Key, _Opts) -> - []; -``` - -### message_to_ordered_list - -```erlang -message_to_ordered_list(Message, [Key|Keys], Key, Opts) -> - case hb_maps:get(Key, Message, undefined, Opts#{ hashpath => ignore }) of - undefined -> - throw( - {missing_key, - {key, Key}, - {remaining_keys, Keys}, - {message, Message} - } - ); - Value -> - [ - Value - | - message_to_ordered_list( - Message, - Keys, - hb_ao:normalize_key(int(Key) + 1), - Opts - ) - ] - end; -``` - -### message_to_ordered_list - -```erlang -message_to_ordered_list(Message, [Key|_Keys], ExpectedKey, _Opts) -> - throw({missing_key, {expected, ExpectedKey, {next, Key}, {message, Message}}}). -``` - -### numbered_keys_to_list - -Convert a message with numbered keys and others to a sorted list with only - -```erlang -numbered_keys_to_list(Message, Opts) -> - OnlyNumbered = - hb_maps:filter( - fun(Key, _Value) -> - try int(hb_ao:normalize_key(Key)) of - IntKey when is_integer(IntKey) -> true; - _ -> false - catch _:_ -> false - end - end, - Message, - Opts - ), - message_to_ordered_list(OnlyNumbered, Opts). -``` - -### hd - -Get the first element (the lowest integer key >= 1) of a numbered map. - -```erlang -hd(Message) -> hd(Message, value). -``` - -### hd - -Get the first element (the lowest integer key >= 1) of a numbered map. - -```erlang -hd(Message, ReturnType) -> - hd(Message, ReturnType, #{ error_strategy => throw }). -``` - -### hd - -```erlang -hd(Message, ReturnType, Opts) -> - hd(Message, hb_ao:keys(Message, Opts), 1, ReturnType, Opts). -``` - -### hd - -```erlang -hd(_Map, [], _Index, _ReturnType, #{ error_strategy := throw }) -> - throw(no_integer_keys); -``` - -### hd - -```erlang -hd(_Map, [], _Index, _ReturnType, _Opts) -> undefined; -``` - -### hd - -```erlang -hd(Message, [Key|Rest], Index, ReturnType, Opts) -> - case hb_ao:normalize_key(Key, Opts#{ error_strategy => return }) of - undefined -> - hd(Message, Rest, Index + 1, ReturnType, Opts); - Key -> - case ReturnType of - key -> Key; - value -> hb_ao:resolve(Message, Key, #{}) - end - end. -``` - -### find_value - -Find the value associated with a key in parsed a JSON structure list. - -```erlang -find_value(Key, List) -> - find_value(Key, List, undefined). -``` - -### find_value - -```erlang -find_value(Key, Map, Default) -> - find_value(Key, Map, Default, #{}). -``` - -### find_value - -```erlang -find_value(Key, Map, Default, Opts) when is_map(Map) -> - case hb_maps:find(Key, Map, Opts) of - {ok, Value} -> Value; - error -> Default - end; -``` - -### find_value - -```erlang -find_value(Key, List, Default, _Opts) -> - case lists:keyfind(Key, 1, List) of - {Key, Val} -> Val; - false -> Default - end. -``` - -### remove_common - -Remove the common prefix from two strings, returning the remainder of the - -```erlang -remove_common(MainStr, SubStr) when is_binary(MainStr) and is_list(SubStr) -> - remove_common(MainStr, list_to_binary(SubStr)); -``` - -### remove_common - -Remove the common prefix from two strings, returning the remainder of the - -```erlang -remove_common(MainStr, SubStr) when is_list(MainStr) and is_binary(SubStr) -> - binary_to_list(remove_common(list_to_binary(MainStr), SubStr)); -``` - -### remove_common - -Remove the common prefix from two strings, returning the remainder of the - -```erlang -remove_common(<< X:8, Rest1/binary>>, << X:8, Rest2/binary>>) -> - remove_common(Rest1, Rest2); -``` - -### remove_common - -Remove the common prefix from two strings, returning the remainder of the - -```erlang -remove_common([X|Rest1], [X|Rest2]) -> - remove_common(Rest1, Rest2); -``` - -### remove_common - -Remove the common prefix from two strings, returning the remainder of the - -```erlang -remove_common([$/|Path], _) -> Path; -``` - -### remove_common - -Remove the common prefix from two strings, returning the remainder of the -Throw an exception if the Opts map has an `error_strategy` key with the - -```erlang -remove_common(Rest, _) -> Rest. -``` - -### maybe_throw - -Remove the common prefix from two strings, returning the remainder of the -Throw an exception if the Opts map has an `error_strategy` key with the - -```erlang -maybe_throw(Val, Opts) -> - case hb_ao:get(error_strategy, Opts) of - throw -> throw(Val); - _ -> Val - end. -``` - -### is_hb_module - -Is the given module part of HyperBEAM? - -```erlang -is_hb_module(Atom) -> - is_hb_module(Atom, hb_opts:get(stack_print_prefixes, [], #{})). -``` - -### is_hb_module - -```erlang -is_hb_module(Atom, Prefixes) when is_atom(Atom) -> - is_hb_module(atom_to_list(Atom), Prefixes); -``` - -### is_hb_module - -```erlang -is_hb_module("hb_event" ++ _, _) -> - % Explicitly exclude hb_event from the stack trace, as it is always included, - % creating noise in the output. -``` - -### is_hb_module - -```erlang -is_hb_module(Str, Prefixes) -> - case string:tokens(Str, "_") of - [Pre|_] -> - lists:member(Pre, Prefixes); - _ -> - false - end. -``` - -### all_hb_modules - -Get all loaded modules that are loaded and are part of HyperBEAM. - -```erlang -all_hb_modules() -> - lists:filter(fun(Module) -> is_hb_module(Module) end, erlang:loaded()). -``` - -### count - -```erlang -count(Item, List) -> - length(lists:filter(fun(X) -> X == Item end, List)). -``` - -### mean - -```erlang -mean(List) -> - lists:sum(List) / length(List). -``` - -### stddev - -```erlang -stddev(List) -> - math:sqrt(variance(List)). -``` - -### variance - -```erlang -variance(List) -> - Mean = mean(List), - lists:sum([ math:pow(X - Mean, 2) || X <- List ]) / length(List). -``` - -### shuffle - -Shuffle a list. - -```erlang -shuffle(List) -> - [ Y || {_, Y} <- lists:sort([ {rand:uniform(), X} || X <- List]) ]. -``` - -### weighted_random - -Return a random element from a list, weighted by the values in the list. - -```erlang -weighted_random(List) -> - TotalWeight = lists:sum([ Weight || {_, Weight} <- List ]), - Normalized = [ {Item, Weight / TotalWeight} || {Item, Weight} <- List ], - Shuffled = shuffle(Normalized), - pick_weighted(Shuffled, rand:uniform()). -``` - -### pick_weighted - -Pick a random element from a list, weighted by the values in the list. - -```erlang -pick_weighted([], _) -> - error(empty_list); -``` - -### pick_weighted - -Pick a random element from a list, weighted by the values in the list. - -```erlang -pick_weighted([{Item, Weight}|_Rest], Remaining) when Remaining < Weight -> - Item; -``` - -### pick_weighted - -Pick a random element from a list, weighted by the values in the list. - -```erlang -pick_weighted([{_Item, Weight}|Rest], Remaining) -> - pick_weighted(Rest, Remaining - Weight). -``` - -### addresses_to_binary - -Serialize the given list of addresses to a binary, using the structured - -```erlang -addresses_to_binary(List) when is_list(List) -> - try - iolist_to_binary( - hb_structured_fields:list( - [ - {item, {string, hb_util:human_id(Addr)}, []} - || - Addr <- List - ] - ) - ) - catch - _:_ -> - error({cannot_parse_list, List}) - end. -``` - -### binary_to_addresses - -Parse a list from a binary. First attempts to parse the binary as a - -```erlang -binary_to_addresses(List) when is_list(List) -> - % If the argument is already a list, return it. -``` - -### binary_to_addresses - -```erlang -binary_to_addresses(List) when is_binary(List) -> - try - Res = lists:map( - fun({item, {string, Item}, []}) -> - Item - end, - hb_structured_fields:parse_list(List) - ), - Res - catch - _:_ -> - try - binary:split( - binary:replace(List, <<"\"">>, <<"">>, [global]), - <<",">>, - [global, trim_all] - ) - catch - _:_ -> - error({cannot_parse_list, List}) - end - end. -``` - -### split_depth_string_aware - -Extract all of the parts from the binary, given (a list of) separators. - -```erlang -split_depth_string_aware(_Sep, <<>>) -> []; -``` - -### split_depth_string_aware - -Extract all of the parts from the binary, given (a list of) separators. - -```erlang -split_depth_string_aware(Sep, Bin) -> - {_MatchedSep, Part, Rest} = split_depth_string_aware_single(Sep, Bin), - [Part | split_depth_string_aware(Sep, Rest)]. -``` - -### split_depth_string_aware_single - -Parse a binary, extracting a part until a separator is found, while - -```erlang -split_depth_string_aware_single(Sep, Bin) when not is_list(Sep) -> - split_depth_string_aware_single([Sep], Bin); -``` - -### split_depth_string_aware_single - -Parse a binary, extracting a part until a separator is found, while - -```erlang -split_depth_string_aware_single(Seps, Bin) -> - split_depth_string_aware_single(Seps, Bin, 0, <<>>). -``` - -### split_depth_string_aware_single - -```erlang -split_depth_string_aware_single(_Seps, <<>>, _Depth, CurrAcc) -> - {no_match, CurrAcc, <<>>}; -``` - -### split_depth_string_aware_single - -```erlang -split_depth_string_aware_single(Seps, << $\", Rest/binary>>, Depth, CurrAcc) -> - {QuotedStr, AfterStr} = split_escaped_single($\", Rest), - split_depth_string_aware_single( - Seps, - AfterStr, - Depth, - << CurrAcc/binary, "\"", QuotedStr/binary, "\"">> - ); -``` - -### split_depth_string_aware_single - -```erlang -split_depth_string_aware_single(Seps, << $\(, Rest/binary>>, Depth, CurrAcc) -> - %% Increase depth - split_depth_string_aware_single(Seps, Rest, Depth + 1, << CurrAcc/binary, "(" >>); -``` - -### split_depth_string_aware_single - -```erlang -split_depth_string_aware_single(Seps, << $\), Rest/binary>>, Depth, Acc) when Depth > 0 -> - %% Decrease depth - split_depth_string_aware_single(Seps, Rest, Depth - 1, << Acc/binary, ")">>); -``` - -### split_depth_string_aware_single - -```erlang -split_depth_string_aware_single(Seps, <>, Depth, CurrAcc) -> - case Depth == 0 andalso lists:member(C, Seps) of - true -> {C, CurrAcc, Rest}; - false -> - split_depth_string_aware_single( - Seps, - Rest, - Depth, - << CurrAcc/binary, C:8/integer >> - ) - end. -``` - -### split_escaped_single - -Read a binary until a separator is found without a preceding backslash. - -```erlang -split_escaped_single(Sep, Bin) -> - split_escaped_single(Sep, Bin, []). -``` - -### split_escaped_single - -```erlang -split_escaped_single(_Sep, <<>>, Acc) -> - {hb_util:bin(lists:reverse(Acc)), <<>>}; -``` - -### split_escaped_single - -```erlang -split_escaped_single(Sep, <<"\\", Char:8/integer, Rest/binary>>, Acc) -> - split_escaped_single(Sep, Rest, [Char, $\\ | Acc]); -``` - -### split_escaped_single - -```erlang -split_escaped_single(Sep, <>, Acc) -> - {hb_util:bin(lists:reverse(Acc)), Rest}; -``` - -### split_escaped_single - -```erlang -split_escaped_single(Sep, <>, Acc) -> - split_escaped_single(Sep, Rest, [C | Acc]). -``` - -### check_size - -Force that a binary is either empty or the given number of bytes. - -```erlang -check_size(Bin, {range, Start, End}) -> - check_type(Bin, binary) - andalso byte_size(Bin) >= Start - andalso byte_size(Bin) =< End; -``` - -### check_size - -Force that a binary is either empty or the given number of bytes. - -```erlang -check_size(Bin, Sizes) -> - check_type(Bin, binary) - andalso lists:member(byte_size(Bin), Sizes). -``` - -### check_value - -```erlang -check_value(Value, ExpectedValues) -> - lists:member(Value, ExpectedValues). -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, binary) -> is_binary(Value); -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, integer) -> is_integer(Value); -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, list) -> is_list(Value); -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, map) -> is_map(Value); -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, tx) -> is_record(Value, tx); -``` - -### check_type - -Ensure that a value is of the given type. - -```erlang -check_type(Value, message) -> - is_record(Value, tx) or is_map(Value) or is_list(Value); -``` - -### check_type - -Ensure that a value is of the given type. -Throw an error if the given value is not ok. - -```erlang -check_type(_Value, _) -> false. -``` - -### ok_or_throw - -Ensure that a value is of the given type. -Throw an error if the given value is not ok. - -```erlang -ok_or_throw(_, true, _) -> true; -``` - -### ok_or_throw - -Ensure that a value is of the given type. -Throw an error if the given value is not ok. - -```erlang -ok_or_throw(_TX, false, Error) -> - throw(Error). -``` - -### all_atoms - -List the loaded atoms in the Erlang VM. - -```erlang -all_atoms() -> all_atoms(0). -``` - -### all_atoms - -List the loaded atoms in the Erlang VM. - -```erlang -all_atoms(N) -> - case atom_from_int(N) of - not_found -> []; - A -> [A | all_atoms(N+1)] - end. -``` - -### atom_from_int - -Find the atom with the given integer reference. - -```erlang -atom_from_int(Int) -> - case catch binary_to_term(<<131,75,Int:24>>) of - A -> A; - _ -> not_found - end. -``` - -### binary_is_atom - -Check if a given binary is already an atom. - -```erlang -binary_is_atom(X) -> - lists:member(X, lists:map(fun hb_util:bin/1, all_atoms())). -``` - -### lower_case_key_map - -```erlang -lower_case_key_map(Map, Opts) -> - hb_maps:fold(fun - (K, V, Acc) when is_map(V) -> - maps:put(hb_util:to_lower(K), lower_case_key_map(V, Opts), Acc); - (K, V, Acc) -> - maps:put(hb_util:to_lower(K), V, Acc) -``` - ---- - -*Generated from [hb_util.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_util.erl)* diff --git a/docs/book/src/hb_volume.erl.md b/docs/book/src/hb_volume.erl.md deleted file mode 100644 index 96ae8207b..000000000 --- a/docs/book/src/hb_volume.erl.md +++ /dev/null @@ -1,1047 +0,0 @@ -# hb_volume - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_volume.erl) - -Helper functions for list_partitions - ---- - -## Exported Functions - -- `change_node_store/2` -- `check_for_device/1` -- `create_partition/2` -- `format_disk/2` -- `list_partitions/0` -- `mount_disk/4` - ---- - -### process_disk_line - -```erlang --spec list_partitions() -> {ok, map()} | {error, binary()}. -list_partitions() -> - ?event(debug_volume, {list_partitions, entry, starting}), - % Get the partition information using fdisk -l - ?event(debug_volume, {list_partitions, executing_fdisk, command}), - case os:cmd("sudo fdisk -l") of - [] -> - % Empty output indicates an error - Reason = <<"Failed to list partitions: no output">>, - ?event(debug_volume, {list_partitions, fdisk_error, no_output}), - {error, Reason}; - Output -> - ?event(debug_volume, {list_partitions, fdisk_success, parsing}), - % Split output into lines - Lines = string:split(Output, "\n", all), - % Process the output to group information by disk - {_, DiskData} = lists:foldl( - fun process_disk_line/2, - {undefined, []}, - Lines - ), - % Process each disk's data to extract all information - DiskObjects = lists:filtermap( - fun(DiskEntry) -> - Device = maps:get(<<"device">>, DiskEntry), - DiskLines = lists:reverse(maps:get(<<"data">>, DiskEntry)), - DiskInfo = parse_disk_info(Device, DiskLines), - {true, DiskInfo} - end, - DiskData - ), - % Return the partition information - ?event(debug_volume, - {list_partitions, success, - {disk_count, length(DiskObjects)} - } - ), - {ok, #{ - <<"status">> => 200, - <<"content-type">> => <<"application/json">>, - <<"body">> => hb_json:encode(#{<<"disks">> => DiskObjects}) - }} - end. -``` - -```erlang -process_disk_line(Line, {CurrentDisk, Acc}) -> - % Match for a new disk entry - DiskPattern = "^Disk (/dev/(?!ram)\\S+):", - case re:run(Line, DiskPattern, [{capture, [1], binary}]) of - {match, [Device]} -> - % Start a new disk entry - NewDisk = #{ - <<"device">> => Device, - <<"data">> => [Line] - }, - {NewDisk, [NewDisk | Acc]}; - _ when CurrentDisk =:= undefined -> - % Not a disk line and no current disk - {undefined, Acc}; - _ -> - % Add line to current disk's data - CurrentData = maps:get(<<"data">>, CurrentDisk), - UpdatedDisk = CurrentDisk#{ - <<"data">> => [Line | CurrentData] - }, - % Update the list with the modified disk entry - UpdatedAcc = [UpdatedDisk | lists:delete(CurrentDisk, Acc)], - {UpdatedDisk, UpdatedAcc} - end. -``` - -### parse_disk_info - -```erlang -parse_disk_info(Device, Lines) -> - % Initialize with device ID - DiskInfo = #{<<"device">> => Device}, - % Process each line to extract information - lists:foldl( - fun parse_disk_line/2, - DiskInfo, - Lines - ). -``` - -### parse_disk_line - -```erlang -parse_disk_line(Line, Info) -> - % Extract disk size and bytes - SizePattern = "^Disk .+: ([0-9.]+ [KMGT]iB), ([0-9]+) bytes, ([0-9]+) sectors", - case re:run(Line, SizePattern, [{capture, [1, 2, 3], binary}]) of - {match, [Size, Bytes, Sectors]} -> - Info#{ - <<"size">> => Size, - <<"bytes">> => binary_to_integer(Bytes), - <<"sectors">> => binary_to_integer(Sectors) - }; - _ -> - parse_disk_model_line(Line, Info) - end. -``` - -### parse_disk_model_line - -```erlang -parse_disk_model_line(Line, Info) -> - % Extract disk model - ModelPattern = "^Disk model: (.+)\\s*$", - case re:run(Line, ModelPattern, [{capture, [1], binary}]) of - {match, [Model]} -> - Info#{<<"model">> => string:trim(Model)}; - _ -> - parse_disk_units_line(Line, Info) - end. -``` - -### parse_disk_units_line - -```erlang -parse_disk_units_line(Line, Info) -> - % Extract units information - UnitsPattern = "^Units: (.+)$", - case re:run(Line, UnitsPattern, [{capture, [1], binary}]) of - {match, [Units]} -> - Info#{<<"units">> => Units}; - _ -> - parse_sector_size_line(Line, Info) - end. -``` - -### parse_sector_size_line - -```erlang -parse_sector_size_line(Line, Info) -> - % Extract sector size - SectorPattern = "^Sector size \\(logical/physical\\): ([^/]+)/(.+)$", - case re:run(Line, SectorPattern, [{capture, [1, 2], binary}]) of - {match, [LogicalSize, PhysicalSize]} -> - Info#{ - <<"sector_size">> => #{ - <<"logical">> => string:trim(LogicalSize), - <<"physical">> => string:trim(PhysicalSize) - } - }; - _ -> - parse_io_size_line(Line, Info) - end. -``` - -### parse_io_size_line - -```erlang -parse_io_size_line(Line, Info) -> - % Extract I/O size - IOPattern = "^I/O size \\(minimum/optimal\\): ([^/]+)/(.+)$", - case re:run(Line, IOPattern, [{capture, [1, 2], binary}]) of - {match, [MinSize, OptSize]} -> - Info#{ - <<"io_size">> => #{ - <<"minimum">> => string:trim(MinSize), - <<"optimal">> => string:trim(OptSize) - } - }; - _ -> - Info - end. -``` - -### create_partition - -```erlang --spec create_partition(Device :: binary(), PartType :: binary()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -create_partition(undefined, _PartType) -> - ?event(debug_volume, {create_partition, error, device_undefined}), - {error, <<"Device path not specified">>}; -``` - -### create_partition - -```erlang --spec create_partition(Device :: binary(), PartType :: binary()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -create_partition(Device, PartType) -> - ?event(debug_volume, - {create_partition, entry, - {device, Device, part_type, PartType} - } - ), - % Create a GPT partition table - DeviceStr = binary_to_list(Device), - MklabelCmd = "sudo parted " ++ DeviceStr ++ " mklabel gpt", - ?event(debug_volume, - {create_partition, creating_gpt_label, - {device, Device} - } - ), - ?event(debug_volume, - {create_partition, executing_mklabel, - {command, MklabelCmd} - } - ), - case safe_exec(MklabelCmd) of - {ok, Result} -> - ?event(debug_volume, - {create_partition, gpt_label_success, - {result, Result} - } - ), - create_actual_partition(Device, PartType); - {error, ErrorMsg} -> - ?event(debug_volume, - {create_partition, gpt_label_error, - {error, ErrorMsg} - } - ), - {error, ErrorMsg} - end. -``` - -### create_actual_partition - -```erlang -create_actual_partition(Device, PartType) -> - ?event(debug_volume, - {create_actual_partition, entry, - {device, Device, part_type, PartType} - } - ), - DeviceStr = binary_to_list(Device), - PartTypeStr = binary_to_list(PartType), - % Build the parted command to create the partition - MkpartCmd = - "sudo parted -a optimal " ++ DeviceStr ++ - " mkpart primary " ++ PartTypeStr ++ " 0% 100%", - ?event(debug_volume, - {create_actual_partition, executing_mkpart, - {command, MkpartCmd} - } - ), - case safe_exec(MkpartCmd) of - {ok, Result} -> - ?event(debug_volume, - {create_actual_partition, mkpart_success, - {result, Result} - } - ), - get_partition_info(Device); - {error, ErrorMsg} -> - ?event(debug_volume, - {create_actual_partition, mkpart_error, - {error, ErrorMsg} - } - ), - {error, ErrorMsg} - end. -``` - -### get_partition_info - -```erlang -get_partition_info(Device) -> - ?event(debug_volume, {get_partition_info, entry, {device, Device}}), - DeviceStr = binary_to_list(Device), - % Print partition information - PrintCmd = "sudo parted " ++ DeviceStr ++ " print", - ?event(debug_volume, - {get_partition_info, executing_print, {command, PrintCmd}} - ), - PartitionInfo = os:cmd(PrintCmd), - ?event(debug_volume, - {get_partition_info, success, partition_created, - {result, PartitionInfo} - } - ), - {ok, #{ - <<"status">> => 200, - <<"message">> => <<"Partition created successfully.">>, - <<"device_path">> => Device, - <<"partition_info">> => list_to_binary(PartitionInfo) - }}. -``` - -### format_disk - -```erlang --spec format_disk(Partition :: binary(), EncKey :: binary()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -format_disk(undefined, _EncKey) -> - ?event(debug_volume, {format_disk, error, partition_undefined}), - {error, <<"Partition path not specified">>}; -``` - -### format_disk - -```erlang --spec format_disk(Partition :: binary(), EncKey :: binary()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -format_disk(_Partition, undefined) -> - ?event(debug_volume, {format_disk, error, key_undefined}), - {error, <<"Encryption key not specified">>}; -``` - -### format_disk - -```erlang --spec format_disk(Partition :: binary(), EncKey :: binary()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -format_disk(Partition, EncKey) -> - ?event(debug_volume, - {format_disk, entry, - { - partition, Partition, - key_present, true - } - } - ), - PartitionStr = binary_to_list(Partition), - ?event(debug_volume, {format_disk, creating_secure_key_file, starting}), - with_secure_key_file(EncKey, fun(KeyFile) -> - FormatCmd = - "sudo cryptsetup luksFormat --batch-mode " ++ - "--key-file " ++ KeyFile ++ " " ++ PartitionStr, - ?event(debug_volume, - {format_disk, executing_luks_format, {command, FormatCmd}} - ), - case safe_exec(FormatCmd, ["failed"]) of - {ok, Result} -> - ?event(debug_volume, - {format_disk, luks_format_success, completed, - {result, Result} - } - ), - {ok, #{ - <<"status">> => 200, - <<"message">> => - <<"Partition formatted with LUKS encryption " - "successfully.">> - }}; - {error, ErrorMsg} -> - ?event(debug_volume, - {format_disk, luks_format_error, ErrorMsg} - ), - {error, ErrorMsg} - end - end). -``` - -### mount_disk - -```erlang --spec mount_disk( - Partition :: binary(), - EncKey :: binary(), - MountPoint :: binary(), - VolumeName :: binary() -) -> {ok, map()} | {error, binary()}. -``` - -```erlang -mount_disk(undefined, _EncKey, _MountPoint, _VolumeName) -> - ?event(debug_volume, {mount_disk, error, partition_undefined}), - {error, <<"Partition path not specified">>}; -``` - -### mount_disk - -```erlang --spec mount_disk( - Partition :: binary(), - EncKey :: binary(), - MountPoint :: binary(), - VolumeName :: binary() -) -> {ok, map()} | {error, binary()}. -``` - -```erlang -mount_disk(_Partition, undefined, _MountPoint, _VolumeName) -> - ?event(debug_volume, {mount_disk, error, key_undefined}), - {error, <<"Encryption key not specified">>}; -``` - -### mount_disk - -```erlang --spec mount_disk( - Partition :: binary(), - EncKey :: binary(), - MountPoint :: binary(), - VolumeName :: binary() -) -> {ok, map()} | {error, binary()}. -``` - -```erlang -mount_disk(_Partition, _EncKey, undefined, _VolumeName) -> - ?event(debug_volume, {mount_disk, error, mount_point_undefined}), - {error, <<"Mount point not specified">>}; -``` - -### mount_disk - -```erlang --spec mount_disk( - Partition :: binary(), - EncKey :: binary(), - MountPoint :: binary(), - VolumeName :: binary() -) -> {ok, map()} | {error, binary()}. -``` - -```erlang -mount_disk(Partition, EncKey, MountPoint, VolumeName) -> - ?event(debug_volume, - {mount_disk, entry, - { - partition, Partition, - mount_point, MountPoint, - volume_name, VolumeName} - } - ), - PartitionStr = binary_to_list(Partition), - VolumeNameStr = binary_to_list(VolumeName), - ?event(debug_volume, {mount_disk, opening_luks_volume, starting}), - with_secure_key_file(EncKey, fun(KeyFile) -> - OpenCmd = - "sudo cryptsetup luksOpen --key-file " ++ KeyFile ++ - " " ++ PartitionStr ++ " " ++ VolumeNameStr, - ?event(debug_volume, {mount_disk, executing_luks_open, {command, OpenCmd}}), - case safe_exec(OpenCmd, ["failed"]) of - {ok, Result} -> - ?event(debug_volume, - {mount_disk, luks_open_success, proceeding_to_mount, - {result, Result} - } - ), - mount_opened_volume(Partition, MountPoint, VolumeName); - {error, ErrorMsg} -> - ?event(debug_volume, {mount_disk, luks_open_error, ErrorMsg}), - {error, ErrorMsg} - end - end). -``` - -### mount_opened_volume - -```erlang -mount_opened_volume(Partition, MountPoint, VolumeName) -> - ?event(debug_volume, - {mount_opened_volume, entry, - { - partition, Partition, - mount_point, MountPoint, - volume_name, VolumeName - } - } - ), - % Create mount point if it doesn't exist - MountPointStr = binary_to_list(MountPoint), - ?event(debug_volume, - {mount_opened_volume, creating_mount_point, MountPoint} - ), - os:cmd("sudo mkdir -p " ++ MountPointStr), - % Check if filesystem exists on the opened LUKS volume - VolumeNameStr = binary_to_list(VolumeName), - DeviceMapperPath = "/dev/mapper/" ++ VolumeNameStr, - % Check filesystem type - FSCheckCmd = "sudo blkid " ++ DeviceMapperPath, - ?event(debug_volume, - {mount_opened_volume, checking_filesystem, {command, FSCheckCmd}} - ), - FSCheckResult = os:cmd(FSCheckCmd), - ?event(debug_volume, - {mount_opened_volume, filesystem_check_result, FSCheckResult} - ), - % Create filesystem if none exists - case string:find(FSCheckResult, "TYPE=") of - nomatch -> - % No filesystem found, create ext4 - ?event(debug_volume, - {mount_opened_volume, creating_filesystem, ext4} - ), - MkfsCmd = "sudo mkfs.ext4 -F " ++ DeviceMapperPath, - ?event(debug_volume, - {mount_opened_volume, executing_mkfs, {command, MkfsCmd}} - ), - MkfsResult = os:cmd(MkfsCmd), - ?event(debug_volume, - {mount_opened_volume, mkfs_result, MkfsResult} - ); - _ -> - ?event(debug_volume, - {mount_opened_volume, filesystem_exists, skipping_creation} - ) - end, - % Mount the unlocked LUKS volume - MountCmd = "sudo mount " ++ DeviceMapperPath ++ " " ++ MountPointStr, - ?event(debug_volume, - {mount_opened_volume, executing_mount, - {command, MountCmd} - } - ), - case safe_exec(MountCmd, ["failed"]) of - {ok, Result} -> - ?event(debug_volume, - {mount_opened_volume, mount_success, - creating_info, {result, Result} - } - ), - create_mount_info(Partition, MountPoint, VolumeName); - {error, ErrorMsg} -> - ?event(debug_volume, - {mount_opened_volume, mount_error, - {error, ErrorMsg, closing_luks} - } - ), - % Close the LUKS volume if mounting failed - os:cmd("sudo cryptsetup luksClose " ++ VolumeNameStr), - {error, ErrorMsg} - end. -``` - -### create_mount_info - -```erlang -create_mount_info(Partition, MountPoint, VolumeName) -> - ?event(debug_volume, - {create_mount_info, success, - { - partition, Partition, - mount_point, MountPoint, - volume_name, VolumeName - } - } - ), - {ok, #{ - <<"status">> => 200, - <<"message">> => - <<"Encrypted partition mounted successfully.">>, - <<"mount_point">> => MountPoint, - <<"mount_info">> => #{ - partition => Partition, - mount_point => MountPoint, - volume_name => VolumeName - } - }}. -``` - -### change_node_store - -```erlang --spec change_node_store(StorePath :: binary(), - CurrentStore :: list()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -change_node_store(undefined, _CurrentStore) -> - ?event(debug_volume, {change_node_store, error, store_path_undefined}), - {error, <<"Store path not specified">>}; -``` - -### change_node_store - -```erlang --spec change_node_store(StorePath :: binary(), - CurrentStore :: list()) -> - {ok, map()} | {error, binary()}. -``` - -```erlang -change_node_store(StorePath, CurrentStore) -> - ?event(debug_volume, - {change_node_store, entry, - {store_path, StorePath, current_store, CurrentStore} - } - ), - % Create the store directory if it doesn't exist - StorePathStr = binary_to_list(StorePath), - ?event(debug_volume, {change_node_store, creating_directory, StorePath}), - os:cmd("sudo mkdir -p " ++ StorePathStr), - % Update the store configuration with the new path - ?event(debug_volume, - {change_node_store, updating_config, - {current_store, CurrentStore} - } - ), - NewStore = update_store_config(CurrentStore, StorePath), - % Return the result - ?event(debug_volume, - {change_node_store, success, {new_store_config, NewStore}} - ), - {ok, #{ - <<"status">> => 200, - <<"message">> => - <<"Node store updated to use encrypted disk.">>, - <<"store_path">> => StorePath, - <<"store">> => NewStore - }}. -``` - -### safe_exec - -```erlang -safe_exec(Command) -> - safe_exec(Command, ["Error", "failed", "bad", "error"]). -``` - -### safe_exec - -```erlang -safe_exec(Command, ErrorKeywords) -> - Result = os:cmd(Command), - case check_command_errors(Result, ErrorKeywords) of - ok -> {ok, Result}; - error -> {error, list_to_binary(Result)} - end. -``` - -### check_command_errors - -```erlang -check_command_errors(Result, Keywords) -> - case lists:any(fun(Keyword) -> - string:find(Result, Keyword) =/= nomatch - end, Keywords) of - true -> error; - false -> ok - end. -``` - -### with_secure_key_file - -```erlang -with_secure_key_file(EncKey, Fun) -> - ?event(debug_volume, {with_secure_key_file, entry, creating_temp_file}), - os:cmd("sudo mkdir -p /root/tmp"), - % Get process ID and create filename - PID = os:getpid(), - ?event(debug_volume, {with_secure_key_file, process_id, PID}), - KeyFile = "/root/tmp/luks_key_" ++ PID, - ?event(debug_volume, {with_secure_key_file, key_file_path, KeyFile}), - % Check if directory was created successfully - DirCheck = os:cmd("ls -la /root/tmp/"), - ?event(debug_volume, {with_secure_key_file, directory_check, DirCheck}), - try - % Convert EncKey to binary using hb_util - BinaryEncKey = case EncKey of - % Handle RSA wallet tuples - extract private key or use hash - {{rsa, _}, PrivKey, _PubKey} when is_binary(PrivKey) -> - % Use first 32 bytes of private key for AES-256 - case byte_size(PrivKey) of - Size when Size >= 32 -> - binary:part(PrivKey, 0, 32); - _ -> - % If private key is too short, hash it to get 32 bytes - crypto:hash(sha256, PrivKey) - end; - % Handle other complex terms - _ when not is_binary(EncKey) andalso not is_list(EncKey) -> - try - hb_util:bin(EncKey) - catch - _:_ -> - % Fallback to term_to_binary and hash to get consistent - % key size - crypto:hash(sha256, term_to_binary(EncKey)) - end; - % Simple cases handled by hb_util:bin - _ -> - hb_util:bin(EncKey) - end, - WriteResult = file:write_file(KeyFile, BinaryEncKey, [raw]), - ?event(debug_volume, - {with_secure_key_file, write_result, WriteResult} - ), - % Check if file was created - FileExists = filelib:is_regular(KeyFile), - ?event(debug_volume, - {with_secure_key_file, file_exists_check, FileExists} - ), - % If file exists, get its info - case FileExists of - true -> - FileInfo = file:read_file_info(KeyFile), - ?event(debug_volume, - {with_secure_key_file, file_info, FileInfo} - ); - false -> - ?event(debug_volume, - {with_secure_key_file, file_not_found, KeyFile} - ) - end, - % Execute function with key file path - ?event(debug_volume, - {with_secure_key_file, executing_function, with_key_file} - ), - Result = Fun(KeyFile), - % Always clean up the key file - ?event(debug_volume, - {with_secure_key_file, cleanup, shredding_key_file} - ), - os:cmd("sudo shred -u " ++ KeyFile), - ?event(debug_volume, {with_secure_key_file, success, completed}), - Result - catch - Class:Reason:Stacktrace -> - ?event(debug_volume, - {with_secure_key_file, exception, - {class, Class, reason, Reason, cleanup, starting} - } - ), - % Ensure cleanup even if function fails - os:cmd("sudo shred -u " ++ KeyFile), - ?event(debug_volume, - {with_secure_key_file, exception_cleanup, completed} - ), - erlang:raise(Class, Reason, Stacktrace) - end. -``` - -### update_store_config - -```erlang --spec update_store_config(StoreConfig :: term(), - NewPath :: binary()) -> term(). -``` - -```erlang -update_store_config(StoreConfig, NewPath) when is_list(StoreConfig) -> - % For a list, update each element - [update_store_config(Item, NewPath) || Item <- StoreConfig]; -``` - -### update_store_config - -```erlang --spec update_store_config(StoreConfig :: term(), - NewPath :: binary()) -> term(). -``` - -```erlang -update_store_config( - #{<<"store-module">> := Module} = StoreConfig, - NewPath -) when is_map(StoreConfig) -> - % Handle various store module types differently - case Module of - hb_store_fs -> - % For filesystem store, prefix the existing path with the new path - ExistingPath = maps:get(<<"name">>, StoreConfig, <<"">>), - NewName = <>, - ?event(debug_volume, {fs, StoreConfig, NewPath, NewName}), - StoreConfig#{<<"name">> => NewName}; - hb_store_lmdb -> - ExistingPath = maps:get(<<"name">>, StoreConfig, <<"">>), - NewName = <>, - ?event(debug_volume, {migrate_start, ExistingPath, NewName}), - safe_stop_lmdb_store(StoreConfig), - ?event(debug_volume, {using_existing_store, NewName}), - FinalConfig = StoreConfig#{<<"name">> => NewName}, - safe_start_lmdb_store(FinalConfig), - FinalConfig; - hb_store_rocksdb -> - StoreConfig; - hb_store_gateway -> - % For gateway store, recursively update nested store configs - NestedStore = maps:get(<<"store">>, StoreConfig, []), - StoreConfig#{ - <<"store">> => update_store_config(NestedStore, NewPath) - }; - _ -> - % For any other store type, update the prefix - % StoreConfig#{<<"name">> => NewPath} - ?event(debug_volume, {other, StoreConfig, NewPath}), - StoreConfig - end; -``` - -### update_store_config - -```erlang --spec update_store_config(StoreConfig :: term(), - NewPath :: binary()) -> term(). -``` - -```erlang -update_store_config({Type, _OldPath, Opts}, NewPath) -> - % For tuple format with options - {Type, NewPath, Opts}; -``` - -### update_store_config - -```erlang --spec update_store_config(StoreConfig :: term(), - NewPath :: binary()) -> term(). -``` - -```erlang -update_store_config({Type, _OldPath}, NewPath) -> - % For tuple format without options - {Type, NewPath}; -``` - -### update_store_config - -```erlang --spec update_store_config(StoreConfig :: term(), - NewPath :: binary()) -> term(). -``` - -```erlang -update_store_config(StoreConfig, _NewPath) -> - % Return unchanged for any other format - StoreConfig. -``` - -### safe_stop_lmdb_store - -```erlang -safe_stop_lmdb_store(StoreConfig) -> - ?event(debug_volume, {stopping_current_store, StoreConfig}), - try - hb_store_lmdb:stop(StoreConfig) - catch - error:StopReason -> - ?event(debug_volume, {stop_error, StopReason}) - end. -``` - -### safe_start_lmdb_store - -```erlang -safe_start_lmdb_store(StoreConfig) -> - NewName = maps:get(<<"name">>, StoreConfig), - ?event(debug_volume, {starting_new_store, NewName}), - hb_store_lmdb:start(StoreConfig). -``` - -### check_command_errors_test - -```erlang --spec check_for_device(Device :: binary()) -> boolean(). -check_for_device(Device) -> - ?event(debug_volume, {check_for_device, entry, {device, Device}}), - Command = - io_lib:format( - "ls -l ~s 2>/dev/null || echo 'not_found'", - [binary_to_list(Device)] - ), - ?event(debug_volume, {check_for_device, executing_command, ls_check}), - Result = os:cmd(Command), - DeviceExists = string:find(Result, "not_found") =:= nomatch, - ?event(debug_volume, - {check_for_device, result, - {device, Device, exists, DeviceExists} - } - ), - DeviceExists. -``` - -```erlang -check_command_errors_test() -> - % Test successful case - no errors - ?assertEqual( - ok, - check_command_errors( - "Success: operation completed", - ["Error", "failed"] - ) - ), - % Test error detection - ?assertEqual( - error, - check_command_errors( - "Error: something went wrong", - ["Error", "failed"] - ) - ), - ?assertEqual( - error, - check_command_errors( - "Operation failed", - ["Error", "failed"] - ) - ), - % Test case sensitivity - ?assertEqual( - ok, - check_command_errors( - "error (lowercase)", - ["Error", "failed"] - ) - ), - % Test multiple keywords - ?assertEqual( - error, - check_command_errors( - "Command failed with Error", - ["Error", "failed"] - ) - ). -``` - -### update_store_config_test - -```erlang -update_store_config_test() -> - % Test filesystem store - FSStore = #{ - <<"store-module">> => hb_store_fs, - <<"name">> => <<"cache">> - }, - NewPath = <<"/encrypted/mount">>, - Updated = update_store_config(FSStore, NewPath), - Expected = FSStore#{<<"name">> => <<"/encrypted/mount/cache">>}, - ?assertEqual(Expected, Updated), - % Test list of stores - StoreList = [FSStore, #{<<"store-module">> => hb_store_gateway}], - UpdatedList = update_store_config(StoreList, NewPath), - ?assertEqual(2, length(UpdatedList)), - % Test tuple format - TupleStore = {fs, <<"old_path">>, []}, - UpdatedTuple = update_store_config(TupleStore, NewPath), - ?assertEqual({fs, NewPath, []}, UpdatedTuple). -``` - -### with_secure_key_file_test - -```erlang -with_secure_key_file_test() -> - TestKey = <<"test_encryption_key_123">>, - % Create a safe test version that doesn't use /root/tmp - TestWithSecureKeyFile = fun(EncKey, Fun) -> - % Use /tmp instead of /root/tmp for testing - TmpDir = "/tmp", - KeyFile = TmpDir ++ "/test_luks_key_" ++ os:getpid(), - try - % Write key to temporary file - file:write_file(KeyFile, EncKey, [raw]), - % Execute function with key file path - Result = Fun(KeyFile), - % Clean up the key file - file:delete(KeyFile), - Result - catch - Class:Reason:Stacktrace -> - % Ensure cleanup even if function fails - file:delete(KeyFile), - erlang:raise(Class, Reason, Stacktrace) - end - end, - % Test successful execution - Result = TestWithSecureKeyFile(TestKey, fun(KeyFile) -> - % Verify key file was created and contains the key - ?assert(filelib:is_regular(KeyFile)), - {ok, FileContent} = file:read_file(KeyFile), - ?assertEqual(TestKey, FileContent), - {ok, <<"success">>} - end), - ?assertEqual({ok, <<"success">>}, Result), - % Test exception handling and cleanup - TestException = fun() -> - TestWithSecureKeyFile(TestKey, fun(KeyFile) -> - ?assert(filelib:is_regular(KeyFile)), - error(test_error) - end) - end, - ?assertError(test_error, TestException()). -``` - -### check_for_device_test - -```erlang -check_for_device_test() -> - % This test would need mocking of os:cmd to be fully testable - % For now, test with /dev/null which should always exist - ?assertEqual(true, check_for_device(<<"/dev/null">>)), - % Test non-existent device - ?assertEqual( - false, - check_for_device(<<"/dev/nonexistent_device_123">>) - ). -``` - -### safe_exec_mock_test - -```erlang -safe_exec_mock_test() -> - % We can't easily mock os:cmd, but we can test the error checking logic - % This is covered by check_command_errors_test above - % Test with default error keywords - TestResult1 = - check_command_errors( - "Operation completed successfully", - ["Error", "failed"] - ), - ?assertEqual(ok, TestResult1), - TestResult2 = - check_command_errors( - "Error: disk not found", - ["Error", "failed"] - ), -``` - ---- - -*Generated from [hb_volume.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/hb_volume.erl)* diff --git a/docs/book/src/rsa_pss.erl.md b/docs/book/src/rsa_pss.erl.md deleted file mode 100644 index e81d9b9ad..000000000 --- a/docs/book/src/rsa_pss.erl.md +++ /dev/null @@ -1,354 +0,0 @@ -# rsa_pss - -[View source on GitHub](https://github.com/permaweb/HyperBEAM/blob/edge/src/rsa_pss.erl) - -**Author:** Andrew Bennett -**Copyright:** 2014-2015, Andrew Bennett -Distributed under the Mozilla Public License v2.0. -Original available at: -https://github.com/potatosalad/erlang-crypto_rsassa_pss -Created : 20 Jul 2015 by Andrew Bennett -Modified: 17 Nov 2017 by The Arweave Team - ---- - -## Exported Functions - -- `sign/3` -- `sign/4` -- `verify_legacy/4` -- `verify/4` - ---- - -### sign - -```erlang --spec sign(Message, DigestType, PrivateKey) -> Signature - when - Message :: binary() | {digest, binary()}, - DigestType :: rsa_digest_type() | atom(), - PrivateKey :: rsa_private_key(), - Signature :: binary(). -``` - -```erlang -sign(Message, DigestType, PrivateKey) when is_binary(Message) -> - sign({digest, crypto:hash(DigestType, Message)}, DigestType, PrivateKey); -``` - -### sign - -```erlang --spec sign(Message, DigestType, PrivateKey) -> Signature - when - Message :: binary() | {digest, binary()}, - DigestType :: rsa_digest_type() | atom(), - PrivateKey :: rsa_private_key(), - Signature :: binary(). -``` - -```erlang -sign(Message={digest, _}, DigestType, PrivateKey) -> - SaltLen = byte_size(crypto:hash(DigestType, <<>>)), - Salt = crypto:strong_rand_bytes(SaltLen), - sign(Message, DigestType, Salt, PrivateKey). -``` - -### sign - -```erlang --spec sign(Message, DigestType, Salt, PrivateKey) -> Signature - when - Message :: binary() | {digest, binary()}, - DigestType :: rsa_digest_type() | atom(), - Salt :: binary(), - PrivateKey :: rsa_private_key(), - Signature :: binary(). -``` - -```erlang -sign(Message, DigestType, Salt, PrivateKey) when is_binary(Message) -> - sign({digest, crypto:hash(DigestType, Message)}, DigestType, Salt, PrivateKey); -``` - -### sign - -```erlang --spec sign(Message, DigestType, Salt, PrivateKey) -> Signature - when - Message :: binary() | {digest, binary()}, - DigestType :: rsa_digest_type() | atom(), - Salt :: binary(), - PrivateKey :: rsa_private_key(), - Signature :: binary(). -``` - -```erlang -sign({digest, Digest}, DigestType, Salt, PrivateKey=#'RSAPrivateKey'{modulus=N}) -> - DigestLen = byte_size(Digest), - SaltLen = byte_size(Salt), - PublicBitSize = int_to_bit_size(N), - PrivateByteSize = (PublicBitSize + 7) div 8, - PublicByteSize = int_to_byte_size(N), - case PublicByteSize < (DigestLen + SaltLen + 2) of - false -> - DBLen = PrivateByteSize - DigestLen - 1, - M = << 0:64, Digest/binary, Salt/binary >>, - H = crypto:hash(DigestType, M), - DB = << 0:((DBLen - SaltLen - 1) * 8), 1, Salt/binary >>, - DBMask = mgf1(DigestType, H, DBLen), - MaskedDB = normalize_to_key_size(PublicBitSize, crypto:exor(DB, DBMask)), - EM = << MaskedDB/binary, H/binary, ?PSS_TRAILER_FIELD >>, - DM = pad_to_key_size(PublicByteSize, dp(EM, PrivateKey)), - DM; - true -> - erlang:error(badarg, [{digest, Digest}, DigestType, Salt, PrivateKey]) - end. -``` - -### verify - -```erlang --spec verify(Message, DigestType, Signature, PublicKey) -> boolean() - when - Message :: binary() | {digest, binary()}, - DigestType :: rsa_digest_type() | atom(), - Signature :: binary(), - PublicKey :: rsa_public_key(). -``` - -```erlang -verify(Message, DigestType, Signature, PublicKey) when is_binary(Message) -> - verify({digest, crypto:hash(DigestType, Message)}, DigestType, Signature, PublicKey); -``` - -### verify - -```erlang --spec verify(Message, DigestType, Signature, PublicKey) -> boolean() - when - Message :: binary() | {digest, binary()}, - DigestType :: rsa_digest_type() | atom(), - Signature :: binary(), - PublicKey :: rsa_public_key(). -``` - -```erlang -verify({digest, Digest}, DigestType, Signature, PublicKey=#'RSAPublicKey'{modulus=N}) -> - DigestLen = byte_size(Digest), - PublicBitSize = int_to_bit_size(N), - PrivateByteSize = (PublicBitSize + 7) div 8, - PublicByteSize = int_to_byte_size(N), - SignatureSize = byte_size(Signature), - case PublicByteSize =:= SignatureSize of - true -> - SignatureNumber = binary:decode_unsigned(Signature, big), - case SignatureNumber >= 0 andalso SignatureNumber < N of - true -> - DBLen = PrivateByteSize - DigestLen - 1, - EM = pad_to_key_size(PrivateByteSize, ep(Signature, PublicKey)), - case binary:last(EM) of - ?PSS_TRAILER_FIELD -> - MaskedDB = binary:part(EM, 0, byte_size(EM) - DigestLen - 1), - H = binary:part(EM, byte_size(MaskedDB), DigestLen), - DBMask = mgf1(DigestType, H, DBLen), - DB = normalize_to_key_size(PublicBitSize, crypto:exor(MaskedDB, DBMask)), - case binary:match(DB, << 1 >>) of - {Pos, Len} -> - PS = binary:decode_unsigned(binary:part(DB, 0, Pos)), - case PS =:= 0 of - true -> - Salt = binary:part(DB, Pos + Len, byte_size(DB) - Pos - Len), - M = << 0:64, Digest/binary, Salt/binary >>, - HOther = crypto:hash(DigestType, M), - H =:= HOther; - false -> - false - end; - nomatch -> - false - end; - _BadTrailer -> - false - end; - _ -> - false - end; - false -> - false - end. -``` - -### verify_legacy - -```erlang -verify_legacy(Message, DigestType, Signature, PublicKey) when is_binary(Message) -> - verify_legacy({digest, crypto:hash(DigestType, Message)}, DigestType, Signature, PublicKey); -``` - -### verify_legacy - -```erlang -verify_legacy({digest, Digest}, DigestType, Signature, PublicKey=#'RSAPublicKey'{modulus=N}) -> - DigestLen = byte_size(Digest), - PublicBitSize = int_to_bit_size(N), - PrivateByteSize = PublicBitSize div 8, - PublicByteSize = int_to_byte_size(N), - SignatureSize = byte_size(Signature), - case PublicByteSize =:= SignatureSize of - true -> - SignatureNumber = binary:decode_unsigned(Signature, big), - case SignatureNumber >= 0 andalso SignatureNumber < N of - true -> - DBLen = PrivateByteSize - DigestLen - 1, - EM = pad_to_key_size(PrivateByteSize, ep(Signature, PublicKey)), - case binary:last(EM) of - ?PSS_TRAILER_FIELD -> - MaskedDB = binary:part(EM, 0, byte_size(EM) - DigestLen - 1), - H = binary:part(EM, byte_size(MaskedDB), DigestLen), - DBMask = mgf1(DigestType, H, DBLen), - DB = normalize_to_key_size(PublicBitSize, crypto:exor(MaskedDB, DBMask)), - case binary:match(DB, << 1 >>) of - {Pos, Len} -> - PS = binary:decode_unsigned(binary:part(DB, 0, Pos)), - case PS =:= 0 of - true -> - Salt = binary:part(DB, Pos + Len, byte_size(DB) - Pos - Len), - M = << 0:64, Digest/binary, Salt/binary >>, - HOther = crypto:hash(DigestType, M), - H =:= HOther; - false -> - false - end; - nomatch -> - false - end; - _BadTrailer -> - false - end; - _ -> - false - end; - false -> - false - end. -``` - -### dp - -```erlang -dp(B, #'RSAPrivateKey'{modulus=N, privateExponent=E}) -> - crypto:mod_pow(B, E, N). -``` - -### ep - -```erlang -ep(B, #'RSAPublicKey'{modulus=N, publicExponent=E}) -> - crypto:mod_pow(B, E, N). -``` - -### int_to_bit_size - -```erlang -int_to_bit_size(I) -> - int_to_bit_size(I, 0). -``` - -### int_to_bit_size - -```erlang -int_to_bit_size(0, B) -> - B; -``` - -### int_to_bit_size - -```erlang -int_to_bit_size(I, B) -> - int_to_bit_size(I bsr 1, B + 1). -``` - -### int_to_byte_size - -```erlang -int_to_byte_size(I) -> - int_to_byte_size(I, 0). -``` - -### int_to_byte_size - -```erlang -int_to_byte_size(0, B) -> - B; -``` - -### int_to_byte_size - -```erlang -int_to_byte_size(I, B) -> - int_to_byte_size(I bsr 8, B + 1). -``` - -### mgf1 - -```erlang -mgf1(DigestType, Seed, Len) -> - mgf1(DigestType, Seed, Len, <<>>, 0). -``` - -### mgf1 - -```erlang -mgf1(_DigestType, _Seed, Len, T, _Counter) when byte_size(T) >= Len -> - binary:part(T, 0, Len); -``` - -### mgf1 - -```erlang -mgf1(DigestType, Seed, Len, T, Counter) -> - CounterBin = << Counter:8/unsigned-big-integer-unit:4 >>, - NewT = << T/binary, (crypto:hash(DigestType, << Seed/binary, CounterBin/binary >>))/binary >>, - mgf1(DigestType, Seed, Len, NewT, Counter + 1). -``` - -### normalize_to_key_size - -```erlang -normalize_to_key_size(_, <<>>) -> - <<>>; -``` - -### normalize_to_key_size - -```erlang -normalize_to_key_size(Bits, _A = << C, Rest/binary >>) -> - SH = (Bits - 1) band 16#7, - Mask = case SH > 0 of - false -> - 16#FF; - true -> - 16#FF bsr (8 - SH) - end, - B = << (C band Mask), Rest/binary >>, - B. -``` - -### pad_to_key_size - -```erlang -pad_to_key_size(Bytes, Data) when byte_size(Data) < Bytes -> - pad_to_key_size(Bytes, << 0, Data/binary >>); -``` - -### pad_to_key_size - -```erlang -pad_to_key_size(_Bytes, Data) -> -``` - ---- - -*Generated from [rsa_pss.erl](https://github.com/permaweb/HyperBEAM/blob/edge/src/rsa_pss.erl)* From 3a02468a61f6a3b1d8d628641fcdf8aef675d27c Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Thu, 18 Sep 2025 13:33:25 -0400 Subject: [PATCH 03/17] docs: Update .gitignore and custom CSS for generated documentation - Add entries to .gitignore for generated literate Erlang documentation files - Adjust custom CSS to fix excessive heading margins in generated documentation - Remove outdated copy functionality section from introduction.md --- .gitignore | 4 + docs/BOOK-README.md | 145 ++++++++++++ docs/book/custom.css | 11 + docs/book/src/introduction.md | 4 - docs/build-literate-erlang.sh | 405 ++++++++++++++++++++++++++++++++++ 5 files changed, 565 insertions(+), 4 deletions(-) create mode 100644 docs/BOOK-README.md create mode 100755 docs/build-literate-erlang.sh diff --git a/.gitignore b/.gitignore index e910e01f0..7f989aaeb 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,10 @@ mkdocs-site-manifest.csv # mdbook generated documentation docs/book/dist/ +# Generated literate Erlang documentation files +docs/literate-erlang/ +docs/book/src/*.erl.md + !test/admissible-report-wallet.json !test/admissible-report.json !test/config.json \ No newline at end of file diff --git a/docs/BOOK-README.md b/docs/BOOK-README.md new file mode 100644 index 000000000..100574839 --- /dev/null +++ b/docs/BOOK-README.md @@ -0,0 +1,145 @@ + +# HyperBEAM Documentation + +This directory contains the documentation build system for HyperBEAM, implementing a literate programming approach using mdBook to generate browsable documentation directly from Erlang source code. + +## Overview + +The documentation system consists of two main components: + +1. **Literate Erlang Generator**: Converts HyperBEAM Erlang source code into markdown files with embedded documentation +2. **mdBook Documentation Site**: Compiles the generated markdown into a browsable documentation website + +## Quick Start + +```bash +# Generate literate docs and build the book +./docs/build-literate-erlang.sh # Generate .erl.md files from source +cd docs/book && mdbook build # Build the documentation site +cd docs/book && mdbook serve # Serve locally on http://localhost:3471 +``` + +## Build Process + +### 1. Literate Erlang Generation + +The `build-literate-erlang.sh` script processes all `.erl` files in `src/` and generates corresponding `.erl.md` files with: + +- Module documentation extracted from `%%%` and `%% @doc` comments +- Function documentation from preceding comment blocks +- Type specifications (`-spec`) +- Source code formatted in markdown code blocks +- Links to GitHub source files + +**Input**: `src/*.erl` (HyperBEAM Erlang source files) +**Output**: +- `docs/literate-erlang/*.erl.md` (intermediate generated files) +- `docs/book/src/*.erl.md` (copied for mdBook processing) + +### 2. mdBook Compilation + +The mdBook system takes the generated `.erl.md` files and compiles them into a static documentation website. + +**Configuration**: `docs/book/book.toml` +**Source**: `docs/book/src/` +**Output**: `docs/book/dist/` (static website) + +## File Structure + +``` +docs/ +├── README.md # This file +├── build-literate-erlang.sh # Literate Erlang generator +├── literate-erlang/ # Generated .erl.md files (gitignored) +└── book/ + ├── book.toml # mdBook configuration + ├── README.md # mdBook setup instructions + ├── custom.css # Custom styling + ├── custom.js # Custom JavaScript + ├── src/ + │ ├── SUMMARY.md # Navigation structure + │ ├── introduction.md # Introduction page + │ └── *.erl.md # Generated docs (gitignored) + └── dist/ # Built documentation site (gitignored) +``` + +## Generated Files + +**Important**: All `.erl.md` files are generated artifacts and should not be committed to version control. They are automatically excluded via `.gitignore`. + +- `docs/literate-erlang/*.erl.md` - Intermediate generated files +- `docs/book/src/*.erl.md` - Files copied for mdBook processing +- `docs/book/dist/` - Final compiled documentation website + +## Dependencies + +- **mdBook**: Install via `cargo install mdbook` or download from [GitHub](https://github.com/rust-lang/mdBook) +- **Bash**: For running build scripts (available on Unix-like systems) + +## Development Workflow + +1. **Modify Erlang source code** in `src/` with proper documentation comments +2. **Run build script** to regenerate documentation +3. **Preview locally** using `mdbook serve` +4. **Deploy** the `docs/book/dist/` directory to hosting platform + +## Documentation Standards + +### Module Documentation + +Use `%%%` or `%% @doc` at the beginning of files: + +```erlang +%%%------------------------------------------------------------------- +%%% @doc Module for handling HyperBEAM caching operations. +%%% +%%% This module provides... +%%% @end +%%%------------------------------------------------------------------- +``` + +### Function Documentation + +Use comment blocks before function definitions: + +```erlang +%% @doc Retrieves a value from the cache. +%% +%% Returns the cached value for the given key, or `undefined` if not found. +-spec get(Key :: term()) -> term() | undefined. +get(Key) -> + % Implementation... +``` + +## Deployment + +The built documentation in `docs/book/dist/` can be deployed to any static hosting service: + +- **Vercel**: Automatic deployment from git repository +- **GitHub Pages**: Use GitHub Actions to build and deploy +- **Netlify**: Connect repository and set build command to `./docs/build-literate-erlang.sh && cd docs/book && mdbook build` + +## Troubleshooting + +### Common Issues + +1. **mdBook not found**: Install mdBook using `cargo install mdbook` +2. **Permission denied**: Make scripts executable with `chmod +x docs/*.sh` +3. **Empty output**: Ensure Erlang files have proper documentation comments + +### Cleaning Up + +```bash +# Remove all generated files +rm -rf docs/literate-erlang/ +rm -f docs/book/src/*.erl.md +rm -rf docs/book/dist/ + +# Regenerate everything +./docs/build-literate-erlang.sh +cd docs/book && mdbook build +``` + +--- + +For more information about the HyperBEAM project, see the main [README](../README.md). diff --git a/docs/book/custom.css b/docs/book/custom.css index 79420c4d7..b902be70f 100644 --- a/docs/book/custom.css +++ b/docs/book/custom.css @@ -33,4 +33,15 @@ .rust { --sidebar-active: var(--hyperbeam-neon); --links: var(--hyperbeam-cyan); +} + +/* Fix excessive heading margins in generated documentation */ +.content h2 { + margin-top: 1em !important; + margin-bottom: 0.5em !important; +} + +.content h3 { + margin-top: 1em !important; + margin-bottom: 0.5em !important; } \ No newline at end of file diff --git a/docs/book/src/introduction.md b/docs/book/src/introduction.md index c3ffa0966..396cbdc09 100644 --- a/docs/book/src/introduction.md +++ b/docs/book/src/introduction.md @@ -21,10 +21,6 @@ Use the sidebar to browse modules organized by category: - **Core Services**: Essential HyperBEAM services and components - **HyperBEAM Core**: Foundation modules and utilities -## Copy Functionality - -Each page includes a copy button (📋) in the top-right corner that copies the original markdown content to your clipboard - perfect for sharing with LLMs or analysis tools. - ## About HyperBEAM HyperBEAM is a client implementation of the AO-Core protocol, providing a framework for decentralized computations. It offers: diff --git a/docs/build-literate-erlang.sh b/docs/build-literate-erlang.sh new file mode 100755 index 000000000..e55589c6d --- /dev/null +++ b/docs/build-literate-erlang.sh @@ -0,0 +1,405 @@ +#!/bin/bash + +# Script to generate literate Erlang documentation from HyperBEAM source files +# +# This creates .erl.md files that combine source code with documentation +# in a format optimized for GitHub rendering with cleaner appearance +# +# Usage: ./docs/build-literate-erlang.sh [-v | --verbose] +# -v, --verbose: Show detailed processing output + +# --- Color Definitions --- +GREEN='\033[0;32m' +RED='\033[0;31m' +YELLOW='\033[0;33m' +BLUE='\033[0;34m' +BOLD='\033[1m' +NC='\033[0m' # No Color + +# HyperBEAM Logo Colors +NEON_GREEN='\033[38;5;46m' +CYAN='\033[38;5;51m' +BRIGHT_YELLOW='\033[38;5;226m' +MAGENTA='\033[38;5;201m' +BRIGHT_RED='\033[38;5;196m' +BLACK='\033[38;5;0m' +GRAY='\033[38;5;245m' + +# --- Helper Functions --- +log_success() { + echo -e "${GREEN}✓ $1${NC}" +} + +log_info() { + echo -e "${BLUE}→ $1${NC}" +} + +log_step() { + echo -e "\n${YELLOW}${BOLD}$1${NC}" +} + +log_error() { + echo -e "${RED}✗ $1${NC}" +} + +log_verbose() { + if [ "$VERBOSE" = true ]; then + echo -e "${GRAY} $1${NC}" + fi +} + +# --- Variable Defaults --- +VERBOSE=false + +# --- Parse Command Line Arguments --- +while [[ $# -gt 0 ]]; do + key="$1" + case $key in + -v|--verbose) + VERBOSE=true + log_info "Verbose mode enabled" + shift + ;; + *) + log_error "Unknown option: $1" + echo "Usage: $0 [-v | --verbose]" + exit 1 + ;; + esac +done + +# --- Display HyperBEAM ASCII Logo --- +display_logo() { + echo -e " +${NEON_GREEN} ++ ${BLACK}${BOLD} ${NC} +${NEON_GREEN} +++ ${BLACK}${BOLD} _ ${NC} +${NEON_GREEN} ++++* ${BLACK}${BOLD}| |__ _ _ _ __ ___ _ __ ${NC} +${NEON_GREEN} :+++*${BRIGHT_YELLOW}## ${BLACK}${BOLD} | '_ \\| | | | '_ \\ / _ \\ '__| ${NC} +${NEON_GREEN} ++**${BRIGHT_YELLOW}#### ${BLACK}${BOLD} | | | | |_| | |_) | __/ | ${NC} +${NEON_GREEN} +++${BRIGHT_YELLOW}####${NEON_GREEN}*** ${BLACK}${BOLD} |_| |_|\\__, | .__/ \\___|_| ${NC} +${NEON_GREEN} +*${BRIGHT_YELLOW}##${NEON_GREEN}****${MAGENTA}+-- ${BLACK}${BOLD} |___/|_| ${NC} +${MAGENTA} -**${BRIGHT_YELLOW}##${NEON_GREEN}**${MAGENTA}+------ ${BLACK}${BOLD} BEAM.${NC} +${MAGENTA} -##${NEON_GREEN}*+${BRIGHT_RED}---::::::: +${GRAY} =${GRAY}%%${NEON_GREEN}*+${BRIGHT_RED}=-:::::::::${GRAY} LITERATE ERLANG DOCUMENTATION${NC} +" +} + +# --- Script Start --- +display_logo +log_step "LITERATE ERLANG DOCUMENTATION GENERATION" + +# Ensure we're in the root directory +ROOT_DIR="$(dirname "$(realpath "$0")")/.." +cd "$ROOT_DIR" || { log_error "Failed to change to root directory"; exit 1; } + +# GitHub repository base URL +GITHUB_BASE_URL="https://github.com/permaweb/HyperBEAM/blob/edge/src" + +# Output directory for literate Erlang files +OUTPUT_DIR="$ROOT_DIR/docs/literate-erlang" +mkdir -p "$OUTPUT_DIR" + +# --- Function to extract module documentation --- +extract_module_doc() { + local file="$1" + local in_doc=false + local doc_content="" + + while IFS= read -r line; do + if [[ "$line" =~ ^%%%[[:space:]]?(.*)$ ]]; then + in_doc=true + doc_content+="${BASH_REMATCH[1]}"$'\n' + elif [[ "$line" =~ ^%%[[:space:]]?(@doc[[:space:]])?(.*)$ ]] && [ "$in_doc" = true ]; then + # Extract content after @doc if present + doc_content+="${BASH_REMATCH[2]}"$'\n' + elif [[ ! "$line" =~ ^%% ]] && [ "$in_doc" = true ]; then + break + fi + done < "$file" + + # Clean up @doc prefixes, empty lines, and convert edocs syntax to markdown + echo "$doc_content" | \ + sed 's/^@doc$//' | \ + sed 's/^@doc //' | \ + sed 's/^@end$//' | \ + sed 's/^@author /**Author:** /' | \ + sed 's/^@copyright /**Copyright:** /' | \ + sed 's/^---*$//' | \ + sed '/^[[:space:]]*$/d' | \ + sed "s/\`\([^']*\)'/\`\1\`/g" +} + +# --- Function to extract function documentation --- +extract_function_doc() { + local content="$1" + + # Remove leading %% or % and @doc tags, then convert edocs syntax to markdown + echo "$content" | \ + sed 's/^%% *//' | \ + sed 's/^% *//' | \ + sed 's/^@doc$//' | \ + sed 's/^@doc //' | \ + sed 's/^@end$//' | \ + sed 's/^@author /**Author:** /' | \ + sed 's/^@copyright /**Copyright:** /' | \ + sed 's/^---*$//' | \ + sed '/^$/d' | \ + sed "s/\`\([^']*\)'/\`\1\`/g" +} + +# --- Function to process a single Erlang file --- +process_erlang_file() { + local src_file="$1" + local module_name=$(basename "$src_file" .erl) + local output_file="$OUTPUT_DIR/${module_name}.erl.md" + + log_verbose "Processing $module_name" + + # Start the literate Erlang document with cleaner format + cat > "$output_file" <> "$output_file" + echo "" >> "$output_file" + echo "---" >> "$output_file" + echo "" >> "$output_file" + fi + + # Add module exports in a clean format + local exports=$(grep -E "^-export\(" "$src_file" | sed 's/-export(\[//' | sed 's/\]).*//' | tr ',' '\n' | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//' | sort -u) + + if [ -n "$exports" ]; then + echo "## Exported Functions" >> "$output_file" + echo "" >> "$output_file" + + # Create a proper bulleted list for exports + while IFS= read -r export; do + if [[ "$export" =~ ^[a-z] ]]; then + echo "- \`$export\`" >> "$output_file" + fi + done <<< "$exports" + + echo "" >> "$output_file" + echo "---" >> "$output_file" + echo "" >> "$output_file" + fi + + # Process functions + local in_function=false + local in_spec=false + local in_doc_comment=false + local current_function="" + local function_content="" + local spec_content="" + local doc_content="" + local functions_written=0 + + while IFS= read -r line; do + # Check for doc comments (before functions) + if [[ "$line" =~ ^%+[[:space:]]?@doc[[:space:]](.*)$ ]] || + ([[ "$line" =~ ^%+[[:space:]](.*)$ ]] && [ "$in_doc_comment" = true ]); then + in_doc_comment=true + if [[ "$line" =~ @doc[[:space:]](.*)$ ]]; then + doc_content+="${BASH_REMATCH[1]}"$'\n' + else + doc_content+="${BASH_REMATCH[1]}"$'\n' + fi + continue + fi + + # Check for -spec + if [[ "$line" =~ ^-spec[[:space:]] ]]; then + in_spec=true + spec_content="$line"$'\n' + in_doc_comment=false + continue + fi + + # Continue collecting spec if in multi-line spec + if [ "$in_spec" = true ]; then + spec_content+="$line"$'\n' + if [[ "$line" =~ \.[[:space:]]*$ ]]; then + in_spec=false + fi + continue + fi + + # Check for function definition + if [[ "$line" =~ ^([a-z][a-z0-9_]*)[[:space:]]*\( ]]; then + # If we were already in a function, write it out + if [ -n "$current_function" ] && [ -n "$function_content" ]; then + write_clean_function "$output_file" "$current_function" "$spec_content" "$doc_content" "$function_content" "$functions_written" + ((functions_written++)) + fi + + # Start new function + current_function="${BASH_REMATCH[1]}" + function_content="$line"$'\n' + in_function=true + in_doc_comment=false + continue + fi + + # Continue collecting function content + if [ "$in_function" = true ]; then + function_content+="$line"$'\n' + # Check for function end (period at end of line not in string) + if [[ "$line" =~ \.[[:space:]]*$ ]] && ! [[ "$line" =~ \" ]]; then + in_function=false + write_clean_function "$output_file" "$current_function" "$spec_content" "$doc_content" "$function_content" "$functions_written" + ((functions_written++)) + current_function="" + function_content="" + spec_content="" + doc_content="" + fi + elif [ "$in_doc_comment" = false ]; then + # Reset doc content if we hit a non-comment, non-function line + doc_content="" + fi + done < "$src_file" + + # Write any remaining function + if [ -n "$current_function" ] && [ -n "$function_content" ]; then + write_clean_function "$output_file" "$current_function" "$spec_content" "$doc_content" "$function_content" "$functions_written" + fi + + # Add footer + echo "" >> "$output_file" + echo "---" >> "$output_file" + echo "" >> "$output_file" + echo "*Generated from [$module_name.erl]($GITHUB_BASE_URL/${module_name}.erl)*" >> "$output_file" +} + +# --- Function to write a function section with cleaner format --- +write_clean_function() { + local output_file="$1" + local func_name="$2" + local spec="$3" + local doc="$4" + local code="$5" + local func_num="$6" + + # Add section separator for better readability (except for first function) + if [ "$func_num" -gt 0 ]; then + echo "" >> "$output_file" + fi + + echo "### $func_name" >> "$output_file" + echo "" >> "$output_file" + + # Add documentation if present + if [ -n "$doc" ]; then + local cleaned_doc=$(extract_function_doc "$doc") + if [ -n "$cleaned_doc" ]; then + echo "$cleaned_doc" >> "$output_file" + echo "" >> "$output_file" + fi + fi + + # Add spec if present (in a more compact format) + if [ -n "$spec" ] && [ "$spec" != $'\n' ]; then + echo '```erlang' >> "$output_file" + echo -n "$spec" | sed '/^[[:space:]]*$/d' >> "$output_file" + echo '```' >> "$output_file" + echo "" >> "$output_file" + fi + + # Add implementation + echo '```erlang' >> "$output_file" + echo -n "$code" | sed '/^[[:space:]]*$/d' >> "$output_file" + echo '```' >> "$output_file" +} + +# --- Main processing loop --- +log_step "Processing Erlang source files" + +# Count total files +total_files=$(find "$ROOT_DIR/src" -name "*.erl" -type f | wc -l) +processed=0 + +# Process each .erl file in src directory +find "$ROOT_DIR/src" -name "*.erl" -type f | sort | while read -r erl_file; do + ((processed++)) + module_name=$(basename "$erl_file" .erl) + log_info "[$processed/$total_files] Processing $module_name.erl" + process_erlang_file "$erl_file" +done + +log_success "Processed $total_files Erlang files" + +# --- Generate index file --- +log_step "Generating index file" + +cat > "$OUTPUT_DIR/README.md" <> "$OUTPUT_DIR/README.md" +echo "|--------|-------------|" >> "$OUTPUT_DIR/README.md" + +find "$OUTPUT_DIR" -name "*.erl.md" -type f | sort | while read -r md_file; do + module_name=$(basename "$md_file" .erl.md) + # Try to extract first line of module doc as description + first_line=$(grep -m 1 -A 1 "^# $module_name" "$md_file" | tail -1 | head -c 100) + if [ "$first_line" = "[View source on GitHub]"* ] || [ -z "$first_line" ]; then + first_line="Erlang module" + fi + echo "| [$module_name](./${module_name}.erl.md) | $first_line... |" >> "$OUTPUT_DIR/README.md" +done + +cat >> "$OUTPUT_DIR/README.md" < Date: Thu, 18 Sep 2025 13:35:13 -0400 Subject: [PATCH 04/17] docs: Adjust heading margins in custom CSS for improved documentation layout - Set margin-top for h2 and h3 elements to 0 to reduce excessive spacing in generated documentation. --- docs/book/custom.css | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/book/custom.css b/docs/book/custom.css index b902be70f..87b36da96 100644 --- a/docs/book/custom.css +++ b/docs/book/custom.css @@ -37,11 +37,11 @@ /* Fix excessive heading margins in generated documentation */ .content h2 { - margin-top: 1em !important; + margin-top: 0 !important; margin-bottom: 0.5em !important; } .content h3 { - margin-top: 1em !important; + margin-top: 0 !important; margin-bottom: 0.5em !important; } \ No newline at end of file From e3c9ac8b8372ff9605020a2dda2a5f35db5b5549 Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Thu, 18 Sep 2025 13:42:09 -0400 Subject: [PATCH 05/17] docs: Update title and enhance styling in documentation - Change title from "HyperBEAM Literate Documentation" to "HyperBEAM Book" in book.toml and README.md. - Adjust heading margins for h2 and h3 elements in custom.css for improved layout. - Add styling for search input and results in custom.css to enhance user experience. --- docs/book/README.md | 2 +- docs/book/book.toml | 2 +- docs/book/custom.css | 51 +++++++++++++++++++++++++++++++++++++++++--- 3 files changed, 50 insertions(+), 5 deletions(-) diff --git a/docs/book/README.md b/docs/book/README.md index 7a7e325db..08771c858 100644 --- a/docs/book/README.md +++ b/docs/book/README.md @@ -1,4 +1,4 @@ -# HyperBEAM Literate Documentation +# HyperBEAM Book - Literate Documentation This repository contains the mdBook-based documentation for HyperBEAM, generated from Erlang source files using a literate programming approach. diff --git a/docs/book/book.toml b/docs/book/book.toml index 225ab784c..b754465c2 100644 --- a/docs/book/book.toml +++ b/docs/book/book.toml @@ -2,7 +2,7 @@ authors = ["Dylan Shade", "HyperBEAM Team"] language = "en" src = "src" -title = "HyperBEAM Literate Documentation" +title = "HyperBEAM Book" description = "Literate programming documentation for the HyperBEAM decentralized operating system, combining Erlang source code with comprehensive documentation." [build] diff --git a/docs/book/custom.css b/docs/book/custom.css index 87b36da96..f71e592e8 100644 --- a/docs/book/custom.css +++ b/docs/book/custom.css @@ -37,11 +37,56 @@ /* Fix excessive heading margins in generated documentation */ .content h2 { - margin-top: 0 !important; + margin-top: 0.5em !important; margin-bottom: 0.5em !important; } .content h3 { - margin-top: 0 !important; - margin-bottom: 0.5em !important; + margin-top: 0.5em !important; + margin-bottom: 0.75em !important; +} + +/* Search input styling */ +#searchbar { + outline: none !important; + border: 1px solid transparent !important; + transition: border-color 300ms ease-in-out, box-shadow 300ms ease-in-out !important; +} + +#searchbar:focus, +#searchbar.active { + box-shadow: none !important; + border-color: var(--links) !important; +} + +/* Search result highlighting */ +.searchresults mark { + background-color: var(--links) !important; + color: var(--bg) !important; + opacity: 0.8; +} + +/* Search input clear button (X) styling */ +#searchbar::-webkit-search-cancel-button { + -webkit-appearance: none; + appearance: none; + height: 14px; + width: 14px; + cursor: pointer; + position: relative; +} + +#searchbar::-webkit-search-cancel-button::before { + content: "×"; + font-size: 18px; + color: var(--links); + font-weight: bold; + position: absolute; + top: -2px; + left: 0; + line-height: 1; +} + +#searchbar::-webkit-search-cancel-button:hover::before { + color: var(--sidebar-active); } \ No newline at end of file From bd513e50a9dc94e796a7a99bef252a66dfb45f33 Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Thu, 18 Sep 2025 14:25:25 -0400 Subject: [PATCH 06/17] ci: Update GitHub Actions workflow for mkdocs documentation deployment - Change workflow name to "Build & Deploy mkdocs Documentation". - Update trigger branches for pull requests and pushes to include `docs/deploy` and `docs/deploy-test`. - Exclude `docs/book/**` from triggering the workflow. - Modify deployment conditions to handle production and preview deployments based on the branch. - Rename build step to "Build mkdocs Documentation" for clarity. --- .github/workflows/build-deploy-docs.yml | 58 +++++++++--- .github/workflows/build-deploy-mdbook.yml | 109 ++++++++++++++++++++++ 2 files changed, 155 insertions(+), 12 deletions(-) create mode 100644 .github/workflows/build-deploy-mdbook.yml diff --git a/.github/workflows/build-deploy-docs.yml b/.github/workflows/build-deploy-docs.yml index 8024b54dc..226812d6c 100644 --- a/.github/workflows/build-deploy-docs.yml +++ b/.github/workflows/build-deploy-docs.yml @@ -1,20 +1,24 @@ -name: 🥘 Build & Deploy Docs HB +name: 🥘 Build & Deploy mkdocs Documentation on: pull_request: branches: - - main + - docs/deploy + - docs/deploy-test paths: # Trigger on changes to docs, mkdocs config, or the workflow itself - "docs/**" + - "!docs/book/**" - "mkdocs.yml" - ".github/workflows/build-deploy-docs.yml" push: branches: - - main + - docs/deploy + - docs/deploy-test paths: # Trigger on changes to docs, mkdocs config, or the workflow itself - "docs/**" + - "!docs/book/**" - "mkdocs.yml" - ".github/workflows/build-deploy-docs.yml" @@ -52,6 +56,7 @@ jobs: . ~/otp-27.0/activate echo "Erlang version:" erl -eval 'io:format("~s~n", [erlang:system_info(otp_release)]), halt().' + # Install system dependencies needed for HyperBEAM - name: Install system dependencies run: | @@ -62,15 +67,19 @@ jobs: ncurses-dev \ libssl-dev \ ca-certificates + # Debug step - display the region with syntax error - name: Debug syntax error region run: | echo "Showing the region with syntax error in hb_message.erl:" sed -n '1440,1460p' src/hb_message.erl || echo "File not found or cannot be read" + echo "Checking for syntax error fix files:" find . -name "*.erl.fix" -o -name "hb_message.erl.*" | grep -v ".beam" || echo "No fix files found" + echo "Erlang version:" . ~/otp-27.0/activate && erl -eval 'io:format("~s~n", [erlang:system_info(otp_release)]), halt().' + # Install rebar3 - name: Install rebar3 run: | @@ -79,12 +88,14 @@ jobs: curl -O https://s3.amazonaws.com/rebar3/rebar3 && chmod +x rebar3 sudo mv rebar3 /usr/local/bin/rebar3 . ~/otp-27.0/activate && rebar3 --version + # Install Rust toolchain (needed for WASM components) - name: Install Rust and Cargo run: | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y echo "$HOME/.cargo/bin" >> $GITHUB_PATH source "$HOME/.cargo/env" + # Setup Node.js - name: ⎔ Setup Node uses: actions/setup-node@v3 @@ -96,18 +107,20 @@ jobs: run: | python -m pip install --upgrade pip pip install mkdocs mkdocs-material mkdocs-git-revision-date-localized-plugin - - name: 🛠 Build Docs + + - name: 🛠 Build mkdocs Documentation run: | . ~/otp-27.0/activate SKIP_COMPILE=1 SKIP_EDOC=1 ./docs/build-all.sh -v + # Build and deploy the artifacts to Arweave via ArDrive deploy: - if: github.ref == 'refs/heads/main' + if: github.ref == 'refs/heads/docs/deploy' || github.ref == 'refs/heads/docs/deploy-test' runs-on: ubuntu-22.04 # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. # However, do NOT cancel in-progress runs as we want to allow these deployments to complete. concurrency: - group: deploy + group: deploy-mkdocs cancel-in-progress: false steps: - name: ⬇️ Checkout repo @@ -131,6 +144,7 @@ jobs: . ~/otp-27.0/activate echo "Erlang version:" erl -eval 'io:format("~s~n", [erlang:system_info(otp_release)]), halt().' + # Install system dependencies needed for HyperBEAM - name: Install system dependencies run: | @@ -141,15 +155,19 @@ jobs: ncurses-dev \ libssl-dev \ ca-certificates + # Debug step - display the region with syntax error - name: Debug syntax error region run: | echo "Showing the region with syntax error in hb_message.erl:" sed -n '1440,1460p' src/hb_message.erl || echo "File not found or cannot be read" + echo "Checking for syntax error fix files:" find . -name "*.erl.fix" -o -name "hb_message.erl.*" | grep -v ".beam" || echo "No fix files found" + echo "Erlang version:" . ~/otp-27.0/activate && erl -eval 'io:format("~s~n", [erlang:system_info(otp_release)]), halt().' + # Install rebar3 - name: Install rebar3 run: | @@ -158,17 +176,20 @@ jobs: curl -O https://s3.amazonaws.com/rebar3/rebar3 && chmod +x rebar3 sudo mv rebar3 /usr/local/bin/rebar3 . ~/otp-27.0/activate && rebar3 --version + # Install Rust toolchain (needed for WASM components) - name: Install Rust and Cargo run: | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y echo "$HOME/.cargo/bin" >> $GITHUB_PATH source "$HOME/.cargo/env" + # Install pip dependencies and cache them - name: 📦 Install Python dependencies run: | python -m pip install --upgrade pip pip install mkdocs mkdocs-material mkdocs-git-revision-date-localized-plugin + # Setup Node.js (needed for npx deploy command) - name: ⎔ Setup Node uses: actions/setup-node@v3 @@ -184,21 +205,34 @@ jobs: VER=`node --version`; echo "Node ver: $VER" VER=`npm --version`; echo "npm ver: $VER" . ~/otp-27.0/activate && erl -eval 'io:format("Erlang OTP version: ~s~n", [erlang:system_info(otp_release)]), halt().' - - name: 🛠 Build Docs + + - name: 🛠 Build mkdocs Documentation id: build_artifacts run: | . ~/otp-27.0/activate SKIP_COMPILE=1 SKIP_EDOC=1 ./docs/build-all.sh -v touch mkdocs-site/.nojekyll + echo "artifacts_output_dir=mkdocs-site" >> $GITHUB_OUTPUT + - name: 💾 Publish to Arweave id: publish_artifacts run: | - npx permaweb-deploy \ - --arns-name=dps-testing-facility \ - --ant-process=${{ secrets.ANT_PROCESS }} \ - --deploy-folder=${ARTIFACTS_OUTPUT_DIR} + if [[ "${{ github.ref }}" == "refs/heads/docs/deploy" ]]; then + echo "Deploying to production ArNS" + npx permaweb-deploy \ + --arns-name=hyperbeam \ + --ant-process=${{ secrets.ANT_PROCESS }} \ + --deploy-folder=${ARTIFACTS_OUTPUT_DIR} + elif [[ "${{ github.ref }}" == "refs/heads/docs/deploy-test" ]]; then + echo "Deploying to preview ArNS undername" + npx permaweb-deploy \ + --arns-name=hyperbeam \ + --undername=preview \ + --ant-process=${{ secrets.ANT_PROCESS }} \ + --deploy-folder=${ARTIFACTS_OUTPUT_DIR} + fi env: DEPLOY_KEY: ${{ secrets.DEPLOY_KEY }} ARTIFACTS_OUTPUT_DIR: ${{ steps.build_artifacts.outputs.artifacts_output_dir }} - ANT_PROCESS: ${{ secrets.ANT_PROCESS }} + ANT_PROCESS: ${{ secrets.ANT_PROCESS }} \ No newline at end of file diff --git a/.github/workflows/build-deploy-mdbook.yml b/.github/workflows/build-deploy-mdbook.yml new file mode 100644 index 000000000..a5b0cf0f4 --- /dev/null +++ b/.github/workflows/build-deploy-mdbook.yml @@ -0,0 +1,109 @@ +name: 📖 Build & Deploy mdBook Documentation + +on: + pull_request: + branches: + - edge + paths: + # Trigger on changes to mdbook docs or the workflow itself + - "docs/book/**" + - ".github/workflows/build-deploy-mdbook.yml" + push: + branches: + - edge + paths: + # Trigger on changes to mdbook docs or the workflow itself + - "docs/book/**" + - ".github/workflows/build-deploy-mdbook.yml" + + # Perform a release using a workflow dispatch + workflow_dispatch: + +defaults: + run: + shell: bash + +jobs: + # Run the build as part of PRs to confirm the site properly builds + check_build: + if: ${{ startsWith(github.ref, 'refs/pull/') }} + runs-on: ubuntu-22.04 + steps: + - name: ⬇️ Checkout repo + uses: actions/checkout@v3 + + # Install mdBook for literate documentation + - name: 📖 Install mdBook + run: | + curl -L https://github.com/rust-lang/mdBook/releases/download/v0.4.40/mdbook-v0.4.40-x86_64-unknown-linux-gnu.tar.gz | tar xz + chmod +x mdbook + sudo mv mdbook /usr/local/bin/mdbook + mdbook --version + + - name: 📝 Generate Literate Erlang Documentation + run: | + ./docs/build-literate-erlang.sh -v + + - name: 📚 Build mdBook Documentation + run: | + cd docs/book && mdbook build + + # Build and deploy the mdBook to Arweave + deploy: + if: github.ref == 'refs/heads/edge' + runs-on: ubuntu-22.04 + # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. + # However, do NOT cancel in-progress runs as we want to allow these deployments to complete. + concurrency: + group: deploy-mdbook + cancel-in-progress: false + steps: + - name: ⬇️ Checkout repo + uses: actions/checkout@v3 + + # Setup Node.js (needed for npx deploy command) + - name: ⎔ Setup Node + uses: actions/setup-node@v3 + with: + node-version: 22 + + - name: 👀 Env + run: | + echo "Event name: ${{ github.event_name }}" + echo "Git ref: ${{ github.ref }}" + echo "GH actor: ${{ github.actor }}" + echo "SHA: ${{ github.sha }}" + VER=`node --version`; echo "Node ver: $VER" + VER=`npm --version`; echo "npm ver: $VER" + + # Install mdBook for literate documentation + - name: 📖 Install mdBook + run: | + curl -L https://github.com/rust-lang/mdBook/releases/download/v0.4.40/mdbook-v0.4.40-x86_64-unknown-linux-gnu.tar.gz | tar xz + chmod +x mdbook + sudo mv mdbook /usr/local/bin/mdbook + mdbook --version + + - name: 📝 Generate Literate Erlang Documentation + run: | + ./docs/build-literate-erlang.sh -v + + - name: 📚 Build mdBook Documentation + id: build_mdbook + run: | + cd docs/book && mdbook build + cd ../.. + echo "mdbook_output_dir=docs/book/dist" >> $GITHUB_OUTPUT + + - name: 📖 Publish mdBook to Arweave + id: publish_mdbook + run: | + npx permaweb-deploy \ + --arns-name=hyperbeam \ + --undername=book \ + --ant-process=${{ secrets.ANT_PROCESS }} \ + --deploy-folder=${MDBOOK_OUTPUT_DIR} + env: + DEPLOY_KEY: ${{ secrets.DEPLOY_KEY }} + MDBOOK_OUTPUT_DIR: ${{ steps.build_mdbook.outputs.mdbook_output_dir }} + ANT_PROCESS: ${{ secrets.ANT_PROCESS }} \ No newline at end of file From 451cbd27e033ef028d05415b4241234a0bf688d4 Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Thu, 18 Sep 2025 14:41:10 -0400 Subject: [PATCH 07/17] ci: Update GitHub Actions workflow for mdBook documentation deployment - Modify workflow triggers to include changes in source code and mdBook configuration files. - Remove outdated references to `docs/book/**` and streamline the deployment process. - Ensure the workflow is set to trigger on relevant changes for improved documentation management. --- .github/workflows/build-deploy-mdbook.yml | 16 ++- docs/BOOK-README.md | 145 ---------------------- docs/README.md | 37 +++++- docs/book/README.md | 144 +++++++++------------ 4 files changed, 108 insertions(+), 234 deletions(-) delete mode 100644 docs/BOOK-README.md diff --git a/.github/workflows/build-deploy-mdbook.yml b/.github/workflows/build-deploy-mdbook.yml index a5b0cf0f4..2780d2c22 100644 --- a/.github/workflows/build-deploy-mdbook.yml +++ b/.github/workflows/build-deploy-mdbook.yml @@ -5,15 +5,23 @@ on: branches: - edge paths: - # Trigger on changes to mdbook docs or the workflow itself - - "docs/book/**" + # Trigger on changes to source code, mdbook config, or the workflow itself + - "src/**" + - "docs/book/book.toml" + - "docs/book/custom.css" + - "docs/book/custom.js" + - "docs/build-literate-erlang.sh" - ".github/workflows/build-deploy-mdbook.yml" push: branches: - edge paths: - # Trigger on changes to mdbook docs or the workflow itself - - "docs/book/**" + # Trigger on changes to source code, mdbook config, or the workflow itself + - "src/**" + - "docs/book/book.toml" + - "docs/book/custom.css" + - "docs/book/custom.js" + - "docs/build-literate-erlang.sh" - ".github/workflows/build-deploy-mdbook.yml" # Perform a release using a workflow dispatch diff --git a/docs/BOOK-README.md b/docs/BOOK-README.md deleted file mode 100644 index 100574839..000000000 --- a/docs/BOOK-README.md +++ /dev/null @@ -1,145 +0,0 @@ - -# HyperBEAM Documentation - -This directory contains the documentation build system for HyperBEAM, implementing a literate programming approach using mdBook to generate browsable documentation directly from Erlang source code. - -## Overview - -The documentation system consists of two main components: - -1. **Literate Erlang Generator**: Converts HyperBEAM Erlang source code into markdown files with embedded documentation -2. **mdBook Documentation Site**: Compiles the generated markdown into a browsable documentation website - -## Quick Start - -```bash -# Generate literate docs and build the book -./docs/build-literate-erlang.sh # Generate .erl.md files from source -cd docs/book && mdbook build # Build the documentation site -cd docs/book && mdbook serve # Serve locally on http://localhost:3471 -``` - -## Build Process - -### 1. Literate Erlang Generation - -The `build-literate-erlang.sh` script processes all `.erl` files in `src/` and generates corresponding `.erl.md` files with: - -- Module documentation extracted from `%%%` and `%% @doc` comments -- Function documentation from preceding comment blocks -- Type specifications (`-spec`) -- Source code formatted in markdown code blocks -- Links to GitHub source files - -**Input**: `src/*.erl` (HyperBEAM Erlang source files) -**Output**: -- `docs/literate-erlang/*.erl.md` (intermediate generated files) -- `docs/book/src/*.erl.md` (copied for mdBook processing) - -### 2. mdBook Compilation - -The mdBook system takes the generated `.erl.md` files and compiles them into a static documentation website. - -**Configuration**: `docs/book/book.toml` -**Source**: `docs/book/src/` -**Output**: `docs/book/dist/` (static website) - -## File Structure - -``` -docs/ -├── README.md # This file -├── build-literate-erlang.sh # Literate Erlang generator -├── literate-erlang/ # Generated .erl.md files (gitignored) -└── book/ - ├── book.toml # mdBook configuration - ├── README.md # mdBook setup instructions - ├── custom.css # Custom styling - ├── custom.js # Custom JavaScript - ├── src/ - │ ├── SUMMARY.md # Navigation structure - │ ├── introduction.md # Introduction page - │ └── *.erl.md # Generated docs (gitignored) - └── dist/ # Built documentation site (gitignored) -``` - -## Generated Files - -**Important**: All `.erl.md` files are generated artifacts and should not be committed to version control. They are automatically excluded via `.gitignore`. - -- `docs/literate-erlang/*.erl.md` - Intermediate generated files -- `docs/book/src/*.erl.md` - Files copied for mdBook processing -- `docs/book/dist/` - Final compiled documentation website - -## Dependencies - -- **mdBook**: Install via `cargo install mdbook` or download from [GitHub](https://github.com/rust-lang/mdBook) -- **Bash**: For running build scripts (available on Unix-like systems) - -## Development Workflow - -1. **Modify Erlang source code** in `src/` with proper documentation comments -2. **Run build script** to regenerate documentation -3. **Preview locally** using `mdbook serve` -4. **Deploy** the `docs/book/dist/` directory to hosting platform - -## Documentation Standards - -### Module Documentation - -Use `%%%` or `%% @doc` at the beginning of files: - -```erlang -%%%------------------------------------------------------------------- -%%% @doc Module for handling HyperBEAM caching operations. -%%% -%%% This module provides... -%%% @end -%%%------------------------------------------------------------------- -``` - -### Function Documentation - -Use comment blocks before function definitions: - -```erlang -%% @doc Retrieves a value from the cache. -%% -%% Returns the cached value for the given key, or `undefined` if not found. --spec get(Key :: term()) -> term() | undefined. -get(Key) -> - % Implementation... -``` - -## Deployment - -The built documentation in `docs/book/dist/` can be deployed to any static hosting service: - -- **Vercel**: Automatic deployment from git repository -- **GitHub Pages**: Use GitHub Actions to build and deploy -- **Netlify**: Connect repository and set build command to `./docs/build-literate-erlang.sh && cd docs/book && mdbook build` - -## Troubleshooting - -### Common Issues - -1. **mdBook not found**: Install mdBook using `cargo install mdbook` -2. **Permission denied**: Make scripts executable with `chmod +x docs/*.sh` -3. **Empty output**: Ensure Erlang files have proper documentation comments - -### Cleaning Up - -```bash -# Remove all generated files -rm -rf docs/literate-erlang/ -rm -f docs/book/src/*.erl.md -rm -rf docs/book/dist/ - -# Regenerate everything -./docs/build-literate-erlang.sh -cd docs/book && mdbook build -``` - ---- - -For more information about the HyperBEAM project, see the main [README](../README.md). diff --git a/docs/README.md b/docs/README.md index 4168a5bed..72d1db150 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,7 +1,13 @@ - ## Documentation -HyperBEAM uses [MkDocs](https://www.mkdocs.org/) with the [Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) theme to build its documentation site. +HyperBEAM uses two documentation systems: + +1. **[MkDocs](https://www.mkdocs.org/)** - Main documentation site with Material theme +2. **[mdBook](https://rust-lang.github.io/mdBook/)** - Literate programming documentation generated from Erlang source + +## MkDocs Documentation + +The main documentation site uses MkDocs with the Material for MkDocs theme. Building the documentation requires Python 3 and pip. It's recommended to use a virtual environment: @@ -52,6 +58,31 @@ Press `Ctrl+C` in the terminal where the server is running to stop it. The final static site is generated in the `mkdocs-site` directory, as configured in `mkdocs.yml` (`site_dir: mkdocs-site`). +## mdBook Documentation (Literate Programming) + +The literate programming documentation system generates browsable documentation directly from Erlang source code comments. + +**Live Site:** [hyperbeam.arweave.dev/book](https://hyperbeam.arweave.dev/book) + +### Quick Start + +```bash +# Generate literate docs and build the book (integrated workflow) +./docs/build-literate-erlang.sh # Generate .erl.md files and copy to mdBook +cd docs/book && mdbook build # Build the documentation site +cd docs/book && mdbook serve # Serve locally on http://localhost:3471 +``` + +### Automated Deployment + +Documentation is automatically deployed via GitHub Actions: + +- **Triggers**: Pushes to `edge` branch affecting `src/**` or mdBook configuration +- **Process**: Generates fresh documentation from source → builds mdBook → deploys to ArNS +- **URL**: [hyperbeam.arweave.dev/book](https://hyperbeam.arweave.dev/book) + +For complete mdBook documentation details, see [book/README.md](book/README.md). + ### Contributing to the Documentation To contribute documentation to HyperBEAM, follow these steps: @@ -112,4 +143,4 @@ To contribute documentation to HyperBEAM, follow these steps: - Be prepared to make adjustments based on feedback - Once approved, your documentation will be merged into the main repository -For more detailed contribution guidelines, see the [Community Guidelines](./docs/misc/community/guidelines.md) and [Development Setup](./docs/misc/community/setup.md) documentation. +For more detailed contribution guidelines, see the [Community Guidelines](./docs/misc/community/guidelines.md) and [Development Setup](./docs/misc/community/setup.md) documentation. \ No newline at end of file diff --git a/docs/book/README.md b/docs/book/README.md index 08771c858..fd87cc052 100644 --- a/docs/book/README.md +++ b/docs/book/README.md @@ -1,125 +1,105 @@ -# HyperBEAM Book - Literate Documentation +# HyperBEAM mdBook - Literate Documentation -This repository contains the mdBook-based documentation for HyperBEAM, generated from Erlang source files using a literate programming approach. +This directory contains the mdBook configuration for generating literate programming documentation from HyperBEAM Erlang source code. + +**Live Documentation**: [hyperbeam.arweave.dev/book](https://hyperbeam.arweave.dev/book) ## Overview -The documentation combines Erlang source code with comprehensive documentation in a format optimized for both reading and LLM consumption. Each `.erl.md` file represents a module from the HyperBEAM codebase with embedded documentation, function signatures, and implementation details. +The mdBook system generates comprehensive documentation by extracting comments and code structure from Erlang source files, creating a browsable reference that stays in sync with the codebase. -## Generation Process +## Quick Start -### 1. Source Documentation Generation +```bash +# From project root - generate and build in one step +./docs/build-literate-erlang.sh -v -Documentation is generated from the HyperBEAM repository using the literate Erlang script: +# Build mdBook only (if .erl.md files already exist) +cd docs/book && mdbook build -```bash -# From the HyperBEAM repository -./docs/build-literate-erlang.sh +# Serve locally for development +cd docs/book && mdbook serve # Opens on http://localhost:3471 ``` -This script: -- Extracts module documentation from `%%%` comments -- Converts edoc tags (`@author`, `@copyright`, `@doc`, `@end`) to markdown format -- Processes function documentation and specifications -- Converts quote patterns (`'text'` to `text`) for proper backtick formatting -- Generates individual `.erl.md` files for each module +## Automated Deployment -### 2. Copy Generated Files +Documentation automatically deploys when source code changes: -Copy the generated documentation to this book's source directory: +- **Triggers**: Pushes to `edge` branch affecting `src/**` or mdBook config +- **GitHub Action**: `build-deploy-mdbook.yml` +- **Process**: Generates `.erl.md` → builds mdBook → deploys to ArNS +- **Live URL**: [hyperbeam.arweave.dev/book](https://hyperbeam.arweave.dev/book) -```bash -# Copy from HyperBEAM docs/literate-erlang/ to src/ -cp /path/to/HyperBEAM/docs/literate-erlang/*.erl.md src/ -``` +## File Structure -### 3. Build the mdBook - -Generate the final documentation: - -```bash -mdbook build ``` - -This creates the static HTML documentation in the `book/` directory. +book/ +├── book.toml # mdBook configuration +├── custom.css # HyperBEAM brand styling +├── custom.js # Enhanced copy functionality +├── src/ +│ ├── SUMMARY.md # Navigation structure +│ ├── introduction.md # Welcome page +│ └── *.erl.md # Generated module docs (auto-copied) +└── dist/ # Built documentation (gitignored) +``` ## Features -### Enhanced Copy Functionality - -The documentation includes a custom copy button (📋 icon) in the top-right corner that: -- Fetches the original markdown content from the `src/` directory -- Copies the raw markdown to clipboard for LLM use -- Preserves all formatting, code blocks, and structure exactly as written - -### Theme Support - -Supports all mdBook themes with HyperBEAM brand colors: -- **Neon Green**: `#00ff94` -- **Cyan**: `#00d4ff` -- **Yellow**: `#fff700` -- **Magenta**: `#ff006a` +### HyperBEAM Branding +- Custom CSS with brand colors across all mdBook themes +- Enhanced search interface with theme-aware styling +- Professional appearance matching HyperBEAM design -### Clean Documentation Structure +### Enhanced Copy Functionality +- Copy button (📋) for LLM consumption +- Fetches raw markdown preserving all formatting +- Perfect for AI analysis and code understanding -Each module page includes: -- GitHub source link pointing to the `edge` branch -- **Author** and **Copyright** information (when available) -- Exported functions list -- Function documentation with signatures -- Implementation code blocks -- Test functions (when present) +### Source Integration +- Direct links to GitHub source files on `edge` branch +- Module documentation extracted from `%%%` comments +- Function documentation and type specifications +- Clean code block formatting ## Configuration -### book.toml - -Key configuration options: +### book.toml Key Settings ```toml [book] -title = "HyperBEAM Literate Documentation" +title = "HyperBEAM Book" src = "src" [build] -build-dir = "book" +build-dir = "dist" [output.html] additional-css = ["custom.css"] additional-js = ["custom.js"] edit-url-template = "https://github.com/permaweb/HyperBEAM/edit/edge/src/{path}" -git-repository-url = "https://github.com/permaweb/HyperBEAM" ``` -### Custom Styling +### Port Configuration +- **Default**: `localhost:3000` +- **Configured**: `localhost:3471` (to avoid conflicts) -- `custom.css`: HyperBEAM brand colors for all themes -- `custom.js`: Copy functionality and theme detection +## Development -## Development Workflow +### Manual Workflow +1. **Edit Erlang source** with proper documentation comments +2. **Generate docs**: `./docs/build-literate-erlang.sh -v` +3. **Preview**: `cd docs/book && mdbook serve` -1. **Update source documentation**: Run `./docs/build-literate-erlang.sh` in HyperBEAM repo -2. **Copy to book**: Transfer generated `.erl.md` files to `src/` -3. **Build book**: Run `mdbook build` -4. **Serve locally**: Use `mdbook serve` for development +### Production Workflow +1. **Push to edge** with source changes +2. **GitHub Actions** handles the rest automatically -## Repository Structure +## Dependencies -``` -HB-DevicesBook/ -├── src/ # Markdown source files -│ ├── *.erl.md # Generated module documentation -│ └── SUMMARY.md # Book structure -├── book/ # Generated HTML output -├── custom.css # HyperBEAM theme styling -├── custom.js # Copy functionality -├── book.toml # mdBook configuration -└── README.md # This file -``` +- **mdBook**: `cargo install mdbook` or [download binary](https://github.com/rust-lang/mdBook/releases) +- **Generated by**: `build-literate-erlang.sh` script -## Notes +--- -- The documentation is generated from the HyperBEAM `edge` branch -- All GitHub links point to the source files in the HyperBEAM repository -- The copy functionality fetches original markdown for accurate LLM consumption -- Search is enabled with fuzzy matching and result limiting for performance \ No newline at end of file +For the complete documentation overview including MkDocs, see [../README.md](../README.md). \ No newline at end of file From efa121b9fe82b159f43de1208cef0081a16fa981 Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Thu, 18 Sep 2025 17:21:00 -0400 Subject: [PATCH 08/17] docs: Enhance syntax highlighting in documentation - Add new CSS file `theme/highlight.css` for improved syntax highlighting styles. - Update `book.toml` to include the new CSS file alongside the existing custom styles. - Introduce `highlight.js` for enhanced code highlighting functionality in documentation. --- docs/book/book.toml | 2 +- docs/book/theme/highlight.css | 341 ++++++++++++++++++++++++++++++++++ docs/book/theme/highlight.js | 17 ++ 3 files changed, 359 insertions(+), 1 deletion(-) create mode 100644 docs/book/theme/highlight.css create mode 100644 docs/book/theme/highlight.js diff --git a/docs/book/book.toml b/docs/book/book.toml index b754465c2..86adbc7af 100644 --- a/docs/book/book.toml +++ b/docs/book/book.toml @@ -12,7 +12,7 @@ build-dir = "dist" edition = "2021" [output.html] -additional-css = ["custom.css"] +additional-css = ["custom.css", "theme/highlight.css"] additional-js = ["custom.js"] mathjax-support = false copy-fonts = true diff --git a/docs/book/theme/highlight.css b/docs/book/theme/highlight.css new file mode 100644 index 000000000..ea3f4ab3e --- /dev/null +++ b/docs/book/theme/highlight.css @@ -0,0 +1,341 @@ +/* + GitHub-like syntax highlighting theme for Highlight.js + Optimized for mdBook and HyperBEAM branding +*/ + +.hljs { + display: block; + overflow-x: auto; + padding: 0.5em; + color: #24292e; + background: #f6f8fa; + border-radius: 0.25em; +} + +.hljs-comment, +.hljs-quote { + color: #6a737d; + font-style: italic; +} + +.hljs-keyword, +.hljs-selector-tag, +.hljs-subst { + color: #d73a49; + font-weight: bold; +} + +.hljs-number, +.hljs-literal, +.hljs-variable, +.hljs-template-variable, +.hljs-tag .hljs-attr { + color: #005cc5; +} + +.hljs-string, +.hljs-doctag { + color: #032f62; +} + +.hljs-title, +.hljs-section, +.hljs-selector-id { + color: #6f42c1; + font-weight: bold; +} + +.hljs-subst { + font-weight: normal; +} + +.hljs-type, +.hljs-class .hljs-title { + color: #e36209; + font-weight: bold; +} + +.hljs-tag, +.hljs-name, +.hljs-attribute { + color: #22863a; + font-weight: normal; +} + +.hljs-regexp, +.hljs-link { + color: #032f62; +} + +.hljs-symbol, +.hljs-bullet { + color: #e36209; +} + +.hljs-built_in, +.hljs-builtin-name { + color: #005cc5; +} + +.hljs-meta { + color: #6a737d; + font-weight: bold; +} + +.hljs-deletion { + background: #fdd; +} + +.hljs-addition { + background: #dfd; +} + +.hljs-emphasis { + font-style: italic; +} + +.hljs-strong { + font-weight: bold; +} + +/* Rust theme - warm, earthy color scheme */ +.rust .hljs { + color: #2d2a24; + background: hsl(60deg 6.35% 80%);; +} + +.rust .hljs-comment, +.rust .hljs-quote { + color: #5a5750; + font-style: italic; +} + +.rust .hljs-keyword, +.rust .hljs-selector-tag, +.rust .hljs-subst { + color: #a0714a; + font-weight: bold; +} + +.rust .hljs-string, +.rust .hljs-doctag { + color: #8b5a2b; +} + +.rust .hljs-number, +.rust .hljs-literal, +.rust .hljs-variable, +.rust .hljs-template-variable { + color: #d4935c; +} + +.rust .hljs-title, +.rust .hljs-section, +.rust .hljs-selector-id { + color: #a0714a; + font-weight: bold; +} + +.rust .hljs-type, +.rust .hljs-class .hljs-title { + color: #a97951; + font-weight: bold; +} + +.rust .hljs-built_in, +.rust .hljs-builtin-name { + color: #c7814b; +} + +.rust .hljs-meta { + color: #5a5750; + font-weight: bold; +} + +.rust .hljs-tag, +.rust .hljs-name, +.rust .hljs-attribute { + color: #a0714a; +} + +.rust .hljs-regexp, +.rust .hljs-link { + color: #b5753a; +} + +.rust .hljs-symbol, +.rust .hljs-bullet { + color: #c7814b; +} + +/* Override CSS rules that force blue/green colors in rust theme */ +.rust .content a > .hljs, +.rust #searchresults a > .hljs, +.rust a:link > .hljs, +.rust a:visited > .hljs { + color: #d4935c !important; +} + +.rust .chapter li a.active > .hljs { + color: #d4935c !important; +} + +/* Navy theme - keep existing dark styling */ +.navy .hljs, +.coal .hljs, +.ayu .hljs { + color: #c9d1d9; + background: #0d1117; +} + +.navy .hljs-comment, +.coal .hljs-comment, +.ayu .hljs-comment, +.navy .hljs-quote, +.coal .hljs-quote, +.ayu .hljs-quote { + color: #8b949e; +} + +.navy .hljs-keyword, +.coal .hljs-keyword, +.ayu .hljs-keyword { + color: #ff7b72; +} + +.navy .hljs-string, +.coal .hljs-string, +.ayu .hljs-string { + color: #a5d6ff; +} + +.navy .hljs-number, +.coal .hljs-number, +.ayu .hljs-number { + color: #79c0ff; +} + +.navy .hljs-literal, +.coal .hljs-literal, +.ayu .hljs-literal, +.navy .hljs-variable, +.coal .hljs-variable, +.ayu .hljs-variable, +.navy .hljs-template-variable, +.coal .hljs-template-variable, +.ayu .hljs-template-variable, +.navy .hljs-tag .hljs-attr, +.coal .hljs-tag .hljs-attr, +.ayu .hljs-tag .hljs-attr { + color: #79c0ff; +} + +.navy .hljs-title, +.coal .hljs-title, +.ayu .hljs-title, +.navy .hljs-section, +.coal .hljs-section, +.ayu .hljs-section, +.navy .hljs-selector-id, +.coal .hljs-selector-id, +.ayu .hljs-selector-id { + color: #d2a8ff; + font-weight: bold; +} + +.navy .hljs-type, +.coal .hljs-type, +.ayu .hljs-type, +.navy .hljs-class .hljs-title, +.coal .hljs-class .hljs-title, +.ayu .hljs-class .hljs-title { + color: #ffa657; + font-weight: bold; +} + +.navy .hljs-tag, +.coal .hljs-tag, +.ayu .hljs-tag, +.navy .hljs-name, +.coal .hljs-name, +.ayu .hljs-name, +.navy .hljs-attribute, +.coal .hljs-attribute, +.ayu .hljs-attribute { + color: #7ee787; + font-weight: normal; +} + +.navy .hljs-regexp, +.coal .hljs-regexp, +.ayu .hljs-regexp, +.navy .hljs-link, +.coal .hljs-link, +.ayu .hljs-link { + color: #a5d6ff; +} + +.navy .hljs-symbol, +.coal .hljs-symbol, +.ayu .hljs-symbol, +.navy .hljs-bullet, +.coal .hljs-bullet, +.ayu .hljs-bullet { + color: #ffa657; +} + +.navy .hljs-built_in, +.coal .hljs-built_in, +.ayu .hljs-built_in, +.navy .hljs-builtin-name, +.coal .hljs-builtin-name, +.ayu .hljs-builtin-name { + color: #79c0ff; +} + +.navy .hljs-meta, +.coal .hljs-meta, +.ayu .hljs-meta { + color: #7d8590; + font-weight: bold; +} + +.navy .hljs-selector-tag, +.coal .hljs-selector-tag, +.ayu .hljs-selector-tag, +.navy .hljs-subst, +.coal .hljs-subst, +.ayu .hljs-subst { + color: #ff7b72; + font-weight: bold; +} + +.navy .hljs-doctag, +.coal .hljs-doctag, +.ayu .hljs-doctag { + color: #a5d6ff; +} + +.navy .hljs-deletion, +.coal .hljs-deletion, +.ayu .hljs-deletion { + background: #490202; +} + +.navy .hljs-addition, +.coal .hljs-addition, +.ayu .hljs-addition { + background: #033a16; +} + +.navy .hljs-emphasis, +.coal .hljs-emphasis, +.ayu .hljs-emphasis { + font-style: italic; +} + +.navy .hljs-strong, +.coal .hljs-strong, +.ayu .hljs-strong { + font-weight: bold; +} \ No newline at end of file diff --git a/docs/book/theme/highlight.js b/docs/book/theme/highlight.js new file mode 100644 index 000000000..95aaf3459 --- /dev/null +++ b/docs/book/theme/highlight.js @@ -0,0 +1,17 @@ +/* + Highlight.js 10.1.1 (93fd0d73) + License: BSD-3-Clause + Copyright (c) 2006-2020, Ivan Sagalaev +*/ +var hljs=function(){"use strict";function e(n){Object.freeze(n);var t="function"==typeof n;return Object.getOwnPropertyNames(n).forEach((function(r){!Object.hasOwnProperty.call(n,r)||null===n[r]||"object"!=typeof n[r]&&"function"!=typeof n[r]||t&&("caller"===r||"callee"===r||"arguments"===r)||Object.isFrozen(n[r])||e(n[r])})),n}class n{constructor(e){void 0===e.data&&(e.data={}),this.data=e.data}ignoreMatch(){this.ignore=!0}}function t(e){return e.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'")}function r(e,...n){var t={};for(const n in e)t[n]=e[n];return n.forEach((function(e){for(const n in e)t[n]=e[n]})),t}function a(e){return e.nodeName.toLowerCase()}var i=Object.freeze({__proto__:null,escapeHTML:t,inherit:r,nodeStream:function(e){var n=[];return function e(t,r){for(var i=t.firstChild;i;i=i.nextSibling)3===i.nodeType?r+=i.nodeValue.length:1===i.nodeType&&(n.push({event:"start",offset:r,node:i}),r=e(i,r),a(i).match(/br|hr|img|input/)||n.push({event:"stop",offset:r,node:i}));return r}(e,0),n},mergeStreams:function(e,n,r){var i=0,s="",o=[];function l(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset"}function u(e){s+=""}function d(e){("start"===e.event?c:u)(e.node)}for(;e.length||n.length;){var g=l();if(s+=t(r.substring(i,g[0].offset)),i=g[0].offset,g===e){o.reverse().forEach(u);do{d(g.splice(0,1)[0]),g=l()}while(g===e&&g.length&&g[0].offset===i);o.reverse().forEach(c)}else"start"===g[0].event?o.push(g[0].node):o.pop(),d(g.splice(0,1)[0])}return s+t(r.substr(i))}});const s="",o=e=>!!e.kind;class l{constructor(e,n){this.buffer="",this.classPrefix=n.classPrefix,e.walk(this)}addText(e){this.buffer+=t(e)}openNode(e){if(!o(e))return;let n=e.kind;e.sublanguage||(n=`${this.classPrefix}${n}`),this.span(n)}closeNode(e){o(e)&&(this.buffer+=s)}value(){return this.buffer}span(e){this.buffer+=``}}class c{constructor(){this.rootNode={children:[]},this.stack=[this.rootNode]}get top(){return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){this.top.children.push(e)}openNode(e){const n={kind:e,children:[]};this.add(n),this.stack.push(n)}closeNode(){if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)}walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,n){return"string"==typeof n?e.addText(n):n.children&&(e.openNode(n),n.children.forEach(n=>this._walk(e,n)),e.closeNode(n)),e}static _collapse(e){"string"!=typeof e&&e.children&&(e.children.every(e=>"string"==typeof e)?e.children=[e.children.join("")]:e.children.forEach(e=>{c._collapse(e)}))}}class u extends c{constructor(e){super(),this.options=e}addKeyword(e,n){""!==e&&(this.openNode(n),this.addText(e),this.closeNode())}addText(e){""!==e&&this.add(e)}addSublanguage(e,n){const t=e.root;t.kind=n,t.sublanguage=!0,this.add(t)}toHTML(){return new l(this,this.options).value()}finalize(){return!0}}function d(e){return e?"string"==typeof e?e:e.source:null}const g="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",h={begin:"\\\\[\\s\\S]",relevance:0},f={className:"string",begin:"'",end:"'",illegal:"\\n",contains:[h]},p={className:"string",begin:'"',end:'"',illegal:"\\n",contains:[h]},b={begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},m=function(e,n,t={}){var a=r({className:"comment",begin:e,end:n,contains:[]},t);return a.contains.push(b),a.contains.push({className:"doctag",begin:"(?:TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):",relevance:0}),a},v=m("//","$"),x=m("/\\*","\\*/"),E=m("#","$");var _=Object.freeze({__proto__:null,IDENT_RE:"[a-zA-Z]\\w*",UNDERSCORE_IDENT_RE:"[a-zA-Z_]\\w*",NUMBER_RE:"\\b\\d+(\\.\\d+)?",C_NUMBER_RE:g,BINARY_NUMBER_RE:"\\b(0b[01]+)",RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",SHEBANG:(e={})=>{const n=/^#![ ]*\//;return e.binary&&(e.begin=function(...e){return e.map(e=>d(e)).join("")}(n,/.*\b/,e.binary,/\b.*/)),r({className:"meta",begin:n,end:/$/,relevance:0,"on:begin":(e,n)=>{0!==e.index&&n.ignoreMatch()}},e)},BACKSLASH_ESCAPE:h,APOS_STRING_MODE:f,QUOTE_STRING_MODE:p,PHRASAL_WORDS_MODE:b,COMMENT:m,C_LINE_COMMENT_MODE:v,C_BLOCK_COMMENT_MODE:x,HASH_COMMENT_MODE:E,NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?",relevance:0},C_NUMBER_MODE:{className:"number",begin:g,relevance:0},BINARY_NUMBER_MODE:{className:"number",begin:"\\b(0b[01]+)",relevance:0},CSS_NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",relevance:0},REGEXP_MODE:{begin:/(?=\/[^/\n]*\/)/,contains:[{className:"regexp",begin:/\//,end:/\/[gimuy]*/,illegal:/\n/,contains:[h,{begin:/\[/,end:/\]/,relevance:0,contains:[h]}]}]},TITLE_MODE:{className:"title",begin:"[a-zA-Z]\\w*",relevance:0},UNDERSCORE_TITLE_MODE:{className:"title",begin:"[a-zA-Z_]\\w*",relevance:0},METHOD_GUARD:{begin:"\\.\\s*[a-zA-Z_]\\w*",relevance:0},END_SAME_AS_BEGIN:function(e){return Object.assign(e,{"on:begin":(e,n)=>{n.data._beginMatch=e[1]},"on:end":(e,n)=>{n.data._beginMatch!==e[1]&&n.ignoreMatch()}})}}),N="of and for in not or if then".split(" ");function w(e,n){return n?+n:function(e){return N.includes(e.toLowerCase())}(e)?0:1}const R=t,y=r,{nodeStream:k,mergeStreams:O}=i,M=Symbol("nomatch");return function(t){var a=[],i={},s={},o=[],l=!0,c=/(^(<[^>]+>|\t|)+|\n)/gm,g="Could not find the language '{}', did you forget to load/include a language module?";const h={disableAutodetect:!0,name:"Plain text",contains:[]};var f={noHighlightRe:/^(no-?highlight)$/i,languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:null,__emitter:u};function p(e){return f.noHighlightRe.test(e)}function b(e,n,t,r){var a={code:n,language:e};S("before:highlight",a);var i=a.result?a.result:m(a.language,a.code,t,r);return i.code=a.code,S("after:highlight",i),i}function m(e,t,a,s){var o=t;function c(e,n){var t=E.case_insensitive?n[0].toLowerCase():n[0];return Object.prototype.hasOwnProperty.call(e.keywords,t)&&e.keywords[t]}function u(){null!=y.subLanguage?function(){if(""!==A){var e=null;if("string"==typeof y.subLanguage){if(!i[y.subLanguage])return void O.addText(A);e=m(y.subLanguage,A,!0,k[y.subLanguage]),k[y.subLanguage]=e.top}else e=v(A,y.subLanguage.length?y.subLanguage:null);y.relevance>0&&(I+=e.relevance),O.addSublanguage(e.emitter,e.language)}}():function(){if(!y.keywords)return void O.addText(A);let e=0;y.keywordPatternRe.lastIndex=0;let n=y.keywordPatternRe.exec(A),t="";for(;n;){t+=A.substring(e,n.index);const r=c(y,n);if(r){const[e,a]=r;O.addText(t),t="",I+=a,O.addKeyword(n[0],e)}else t+=n[0];e=y.keywordPatternRe.lastIndex,n=y.keywordPatternRe.exec(A)}t+=A.substr(e),O.addText(t)}(),A=""}function h(e){return e.className&&O.openNode(e.className),y=Object.create(e,{parent:{value:y}})}function p(e){return 0===y.matcher.regexIndex?(A+=e[0],1):(L=!0,0)}var b={};function x(t,r){var i=r&&r[0];if(A+=t,null==i)return u(),0;if("begin"===b.type&&"end"===r.type&&b.index===r.index&&""===i){if(A+=o.slice(r.index,r.index+1),!l){const n=Error("0 width match regex");throw n.languageName=e,n.badRule=b.rule,n}return 1}if(b=r,"begin"===r.type)return function(e){var t=e[0],r=e.rule;const a=new n(r),i=[r.__beforeBegin,r["on:begin"]];for(const n of i)if(n&&(n(e,a),a.ignore))return p(t);return r&&r.endSameAsBegin&&(r.endRe=RegExp(t.replace(/[-/\\^$*+?.()|[\]{}]/g,"\\$&"),"m")),r.skip?A+=t:(r.excludeBegin&&(A+=t),u(),r.returnBegin||r.excludeBegin||(A=t)),h(r),r.returnBegin?0:t.length}(r);if("illegal"===r.type&&!a){const e=Error('Illegal lexeme "'+i+'" for mode "'+(y.className||"")+'"');throw e.mode=y,e}if("end"===r.type){var s=function(e){var t=e[0],r=o.substr(e.index),a=function e(t,r,a){let i=function(e,n){var t=e&&e.exec(n);return t&&0===t.index}(t.endRe,a);if(i){if(t["on:end"]){const e=new n(t);t["on:end"](r,e),e.ignore&&(i=!1)}if(i){for(;t.endsParent&&t.parent;)t=t.parent;return t}}if(t.endsWithParent)return e(t.parent,r,a)}(y,e,r);if(!a)return M;var i=y;i.skip?A+=t:(i.returnEnd||i.excludeEnd||(A+=t),u(),i.excludeEnd&&(A=t));do{y.className&&O.closeNode(),y.skip||y.subLanguage||(I+=y.relevance),y=y.parent}while(y!==a.parent);return a.starts&&(a.endSameAsBegin&&(a.starts.endRe=a.endRe),h(a.starts)),i.returnEnd?0:t.length}(r);if(s!==M)return s}if("illegal"===r.type&&""===i)return 1;if(B>1e5&&B>3*r.index)throw Error("potential infinite loop, way more iterations than matches");return A+=i,i.length}var E=T(e);if(!E)throw console.error(g.replace("{}",e)),Error('Unknown language: "'+e+'"');var _=function(e){function n(n,t){return RegExp(d(n),"m"+(e.case_insensitive?"i":"")+(t?"g":""))}class t{constructor(){this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0}addRule(e,n){n.position=this.position++,this.matchIndexes[this.matchAt]=n,this.regexes.push([n,e]),this.matchAt+=function(e){return RegExp(e.toString()+"|").exec("").length-1}(e)+1}compile(){0===this.regexes.length&&(this.exec=()=>null);const e=this.regexes.map(e=>e[1]);this.matcherRe=n(function(e,n="|"){for(var t=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./,r=0,a="",i=0;i0&&(a+=n),a+="(";o.length>0;){var l=t.exec(o);if(null==l){a+=o;break}a+=o.substring(0,l.index),o=o.substring(l.index+l[0].length),"\\"===l[0][0]&&l[1]?a+="\\"+(+l[1]+s):(a+=l[0],"("===l[0]&&r++)}a+=")"}return a}(e),!0),this.lastIndex=0}exec(e){this.matcherRe.lastIndex=this.lastIndex;const n=this.matcherRe.exec(e);if(!n)return null;const t=n.findIndex((e,n)=>n>0&&void 0!==e),r=this.matchIndexes[t];return n.splice(0,t),Object.assign(n,r)}}class a{constructor(){this.rules=[],this.multiRegexes=[],this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){if(this.multiRegexes[e])return this.multiRegexes[e];const n=new t;return this.rules.slice(e).forEach(([e,t])=>n.addRule(e,t)),n.compile(),this.multiRegexes[e]=n,n}considerAll(){this.regexIndex=0}addRule(e,n){this.rules.push([e,n]),"begin"===n.type&&this.count++}exec(e){const n=this.getMatcher(this.regexIndex);n.lastIndex=this.lastIndex;const t=n.exec(e);return t&&(this.regexIndex+=t.position+1,this.regexIndex===this.count&&(this.regexIndex=0)),t}}function i(e,n){const t=e.input[e.index-1],r=e.input[e.index+e[0].length];"."!==t&&"."!==r||n.ignoreMatch()}if(e.contains&&e.contains.includes("self"))throw Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.");return function t(s,o){const l=s;if(s.compiled)return l;s.compiled=!0,s.__beforeBegin=null,s.keywords=s.keywords||s.beginKeywords;let c=null;if("object"==typeof s.keywords&&(c=s.keywords.$pattern,delete s.keywords.$pattern),s.keywords&&(s.keywords=function(e,n){var t={};return"string"==typeof e?r("keyword",e):Object.keys(e).forEach((function(n){r(n,e[n])})),t;function r(e,r){n&&(r=r.toLowerCase()),r.split(" ").forEach((function(n){var r=n.split("|");t[r[0]]=[e,w(r[0],r[1])]}))}}(s.keywords,e.case_insensitive)),s.lexemes&&c)throw Error("ERR: Prefer `keywords.$pattern` to `mode.lexemes`, BOTH are not allowed. (see mode reference) ");return l.keywordPatternRe=n(s.lexemes||c||/\w+/,!0),o&&(s.beginKeywords&&(s.begin="\\b("+s.beginKeywords.split(" ").join("|")+")(?=\\b|\\s)",s.__beforeBegin=i),s.begin||(s.begin=/\B|\b/),l.beginRe=n(s.begin),s.endSameAsBegin&&(s.end=s.begin),s.end||s.endsWithParent||(s.end=/\B|\b/),s.end&&(l.endRe=n(s.end)),l.terminator_end=d(s.end)||"",s.endsWithParent&&o.terminator_end&&(l.terminator_end+=(s.end?"|":"")+o.terminator_end)),s.illegal&&(l.illegalRe=n(s.illegal)),void 0===s.relevance&&(s.relevance=1),s.contains||(s.contains=[]),s.contains=[].concat(...s.contains.map((function(e){return function(e){return e.variants&&!e.cached_variants&&(e.cached_variants=e.variants.map((function(n){return r(e,{variants:null},n)}))),e.cached_variants?e.cached_variants:function e(n){return!!n&&(n.endsWithParent||e(n.starts))}(e)?r(e,{starts:e.starts?r(e.starts):null}):Object.isFrozen(e)?r(e):e}("self"===e?s:e)}))),s.contains.forEach((function(e){t(e,l)})),s.starts&&t(s.starts,o),l.matcher=function(e){const n=new a;return e.contains.forEach(e=>n.addRule(e.begin,{rule:e,type:"begin"})),e.terminator_end&&n.addRule(e.terminator_end,{type:"end"}),e.illegal&&n.addRule(e.illegal,{type:"illegal"}),n}(l),l}(e)}(E),N="",y=s||_,k={},O=new f.__emitter(f);!function(){for(var e=[],n=y;n!==E;n=n.parent)n.className&&e.unshift(n.className);e.forEach(e=>O.openNode(e))}();var A="",I=0,S=0,B=0,L=!1;try{for(y.matcher.considerAll();;){B++,L?L=!1:(y.matcher.lastIndex=S,y.matcher.considerAll());const e=y.matcher.exec(o);if(!e)break;const n=x(o.substring(S,e.index),e);S=e.index+n}return x(o.substr(S)),O.closeAllNodes(),O.finalize(),N=O.toHTML(),{relevance:I,value:N,language:e,illegal:!1,emitter:O,top:y}}catch(n){if(n.message&&n.message.includes("Illegal"))return{illegal:!0,illegalBy:{msg:n.message,context:o.slice(S-100,S+100),mode:n.mode},sofar:N,relevance:0,value:R(o),emitter:O};if(l)return{illegal:!1,relevance:0,value:R(o),emitter:O,language:e,top:y,errorRaised:n};throw n}}function v(e,n){n=n||f.languages||Object.keys(i);var t=function(e){const n={relevance:0,emitter:new f.__emitter(f),value:R(e),illegal:!1,top:h};return n.emitter.addText(e),n}(e),r=t;return n.filter(T).filter(I).forEach((function(n){var a=m(n,e,!1);a.language=n,a.relevance>r.relevance&&(r=a),a.relevance>t.relevance&&(r=t,t=a)})),r.language&&(t.second_best=r),t}function x(e){return f.tabReplace||f.useBR?e.replace(c,e=>"\n"===e?f.useBR?"
":e:f.tabReplace?e.replace(/\t/g,f.tabReplace):e):e}function E(e){let n=null;const t=function(e){var n=e.className+" ";n+=e.parentNode?e.parentNode.className:"";const t=f.languageDetectRe.exec(n);if(t){var r=T(t[1]);return r||(console.warn(g.replace("{}",t[1])),console.warn("Falling back to no-highlight mode for this block.",e)),r?t[1]:"no-highlight"}return n.split(/\s+/).find(e=>p(e)||T(e))}(e);if(p(t))return;S("before:highlightBlock",{block:e,language:t}),f.useBR?(n=document.createElement("div")).innerHTML=e.innerHTML.replace(/\n/g,"").replace(//g,"\n"):n=e;const r=n.textContent,a=t?b(t,r,!0):v(r),i=k(n);if(i.length){const e=document.createElement("div");e.innerHTML=a.value,a.value=O(i,k(e),r)}a.value=x(a.value),S("after:highlightBlock",{block:e,result:a}),e.innerHTML=a.value,e.className=function(e,n,t){var r=n?s[n]:t,a=[e.trim()];return e.match(/\bhljs\b/)||a.push("hljs"),e.includes(r)||a.push(r),a.join(" ").trim()}(e.className,t,a.language),e.result={language:a.language,re:a.relevance,relavance:a.relevance},a.second_best&&(e.second_best={language:a.second_best.language,re:a.second_best.relevance,relavance:a.second_best.relevance})}const N=()=>{if(!N.called){N.called=!0;var e=document.querySelectorAll("pre code");a.forEach.call(e,E)}};function T(e){return e=(e||"").toLowerCase(),i[e]||i[s[e]]}function A(e,{languageName:n}){"string"==typeof e&&(e=[e]),e.forEach(e=>{s[e]=n})}function I(e){var n=T(e);return n&&!n.disableAutodetect}function S(e,n){var t=e;o.forEach((function(e){e[t]&&e[t](n)}))}Object.assign(t,{highlight:b,highlightAuto:v,fixMarkup:x,highlightBlock:E,configure:function(e){f=y(f,e)},initHighlighting:N,initHighlightingOnLoad:function(){window.addEventListener("DOMContentLoaded",N,!1)},registerLanguage:function(e,n){var r=null;try{r=n(t)}catch(n){if(console.error("Language definition for '{}' could not be registered.".replace("{}",e)),!l)throw n;console.error(n),r=h}r.name||(r.name=e),i[e]=r,r.rawDefinition=n.bind(null,t),r.aliases&&A(r.aliases,{languageName:e})},listLanguages:function(){return Object.keys(i)},getLanguage:T,registerAliases:A,requireLanguage:function(e){var n=T(e);if(n)return n;throw Error("The '{}' language is required, but not loaded.".replace("{}",e))},autoDetection:I,inherit:y,addPlugin:function(e){o.push(e)}}),t.debugMode=function(){l=!1},t.safeMode=function(){l=!0},t.versionString="10.1.1";for(const n in _)"object"==typeof _[n]&&e(_[n]);return Object.assign(t,_),t}({})}();"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=hljs); +hljs.registerLanguage("apache",function(){"use strict";return function(e){var n={className:"number",begin:"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?"};return{name:"Apache config",aliases:["apacheconf"],case_insensitive:!0,contains:[e.HASH_COMMENT_MODE,{className:"section",begin:"",contains:[n,{className:"number",begin:":\\d{1,5}"},e.inherit(e.QUOTE_STRING_MODE,{relevance:0})]},{className:"attribute",begin:/\w+/,relevance:0,keywords:{nomarkup:"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername"},starts:{end:/$/,relevance:0,keywords:{literal:"on off all deny allow"},contains:[{className:"meta",begin:"\\s\\[",end:"\\]$"},{className:"variable",begin:"[\\$%]\\{",end:"\\}",contains:["self",{className:"number",begin:"[\\$%]\\d+"}]},n,{className:"number",begin:"\\d+"},e.QUOTE_STRING_MODE]}}],illegal:/\S/}}}()); +hljs.registerLanguage("bash",function(){"use strict";return function(e){const s={};Object.assign(s,{className:"variable",variants:[{begin:/\$[\w\d#@][\w\d_]*/},{begin:/\$\{/,end:/\}/,contains:[{begin:/:-/,contains:[s]}]}]});const t={className:"subst",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]},n={className:"string",begin:/"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,s,t]};t.contains.push(n);const a={begin:/\$\(\(/,end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},e.NUMBER_MODE,s]},i=e.SHEBANG({binary:"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)",relevance:10}),c={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{name:"Bash",aliases:["sh","zsh"],keywords:{$pattern:/\b-?[a-z\._]+\b/,keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},contains:[i,e.SHEBANG(),c,a,e.HASH_COMMENT_MODE,n,{className:"",begin:/\\"/},{className:"string",begin:/'/,end:/'/},s]}}}()); +hljs.registerLanguage("c-like",function(){"use strict";return function(e){function t(e){return"(?:"+e+")?"}var n="(decltype\\(auto\\)|"+t("[a-zA-Z_]\\w*::")+"[a-zA-Z_]\\w*"+t("<.*?>")+")",r={className:"keyword",begin:"\\b[a-z\\d_]*_t\\b"},a={className:"string",variants:[{begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)",end:"'",illegal:"."},e.END_SAME_AS_BEGIN({begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},i={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},s={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{"meta-keyword":"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include"},contains:[{begin:/\\\n/,relevance:0},e.inherit(a,{className:"meta-string"}),{className:"meta-string",begin:/<.*?>/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},o={className:"title",begin:t("[a-zA-Z_]\\w*::")+e.IDENT_RE,relevance:0},c=t("[a-zA-Z_]\\w*::")+e.IDENT_RE+"\\s*\\(",l={keyword:"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_t short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq",built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary",literal:"true false nullptr NULL"},d=[r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,i,a],_={variants:[{begin:/=/,end:/;/},{begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}],keywords:l,contains:d.concat([{begin:/\(/,end:/\)/,keywords:l,contains:d.concat(["self"]),relevance:0}]),relevance:0},u={className:"function",begin:"("+n+"[\\*&\\s]+)+"+c,returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:l,illegal:/[^\w\s\*&:<>]/,contains:[{begin:"decltype\\(auto\\)",keywords:l,relevance:0},{begin:c,returnBegin:!0,contains:[o],relevance:0},{className:"params",begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r,{begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:["self",e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r]}]},r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s]};return{aliases:["c","cc","h","c++","h++","hpp","hh","hxx","cxx"],keywords:l,disableAutodetect:!0,illegal:"",keywords:l,contains:["self",r]},{begin:e.IDENT_RE+"::",keywords:l},{className:"class",beginKeywords:"class struct",end:/[{;:]/,contains:[{begin://,contains:["self"]},e.TITLE_MODE]}]),exports:{preprocessor:s,strings:a,keywords:l}}}}()); +hljs.registerLanguage("c",function(){"use strict";return function(e){var n=e.getLanguage("c-like").rawDefinition();return n.name="C",n.aliases=["c","h"],n}}()); +hljs.registerLanguage("coffeescript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);return function(r){var t={keyword:e.concat(["then","unless","until","loop","by","when","and","or","is","isnt","not"]).filter((e=>n=>!e.includes(n))(["var","const","let","function","static"])).join(" "),literal:n.concat(["yes","no","on","off"]).join(" "),built_in:a.concat(["npm","print"]).join(" ")},i="[A-Za-z$_][0-9A-Za-z$_]*",s={className:"subst",begin:/#\{/,end:/}/,keywords:t},o=[r.BINARY_NUMBER_MODE,r.inherit(r.C_NUMBER_MODE,{starts:{end:"(\\s*/)?",relevance:0}}),{className:"string",variants:[{begin:/'''/,end:/'''/,contains:[r.BACKSLASH_ESCAPE]},{begin:/'/,end:/'/,contains:[r.BACKSLASH_ESCAPE]},{begin:/"""/,end:/"""/,contains:[r.BACKSLASH_ESCAPE,s]},{begin:/"/,end:/"/,contains:[r.BACKSLASH_ESCAPE,s]}]},{className:"regexp",variants:[{begin:"///",end:"///",contains:[s,r.HASH_COMMENT_MODE]},{begin:"//[gim]{0,3}(?=\\W)",relevance:0},{begin:/\/(?![ *]).*?(?![\\]).\/[gim]{0,3}(?=\W)/}]},{begin:"@"+i},{subLanguage:"javascript",excludeBegin:!0,excludeEnd:!0,variants:[{begin:"```",end:"```"},{begin:"`",end:"`"}]}];s.contains=o;var c=r.inherit(r.TITLE_MODE,{begin:i}),l={className:"params",begin:"\\([^\\(]",returnBegin:!0,contains:[{begin:/\(/,end:/\)/,keywords:t,contains:["self"].concat(o)}]};return{name:"CoffeeScript",aliases:["coffee","cson","iced"],keywords:t,illegal:/\/\*/,contains:o.concat([r.COMMENT("###","###"),r.HASH_COMMENT_MODE,{className:"function",begin:"^\\s*"+i+"\\s*=\\s*(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[c,l]},{begin:/[:\(,=]\s*/,relevance:0,contains:[{className:"function",begin:"(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[l]}]},{className:"class",beginKeywords:"class",end:"$",illegal:/[:="\[\]]/,contains:[{beginKeywords:"extends",endsWithParent:!0,illegal:/[:="\[\]]/,contains:[c]},c]},{begin:i+":",end:":",returnBegin:!0,returnEnd:!0,relevance:0}])}}}()); +hljs.registerLanguage("cpp",function(){"use strict";return function(e){var t=e.getLanguage("c-like").rawDefinition();return t.disableAutodetect=!1,t.name="C++",t.aliases=["cc","c++","h++","hpp","hh","hxx","cxx"],t}}()); +hljs.registerLanguage("csharp",function(){"use strict";return function(e){var n={keyword:"abstract as base bool break byte case catch char checked const continue decimal default delegate do double enum event explicit extern finally fixed float for foreach goto if implicit in int interface internal is lock long object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this try typeof uint ulong unchecked unsafe ushort using virtual void volatile while add alias ascending async await by descending dynamic equals from get global group into join let nameof on orderby partial remove select set value var when where yield",literal:"null false true"},i=e.inherit(e.TITLE_MODE,{begin:"[a-zA-Z](\\.?\\w)*"}),a={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},s={className:"string",begin:'@"',end:'"',contains:[{begin:'""'}]},t=e.inherit(s,{illegal:/\n/}),l={className:"subst",begin:"{",end:"}",keywords:n},r=e.inherit(l,{illegal:/\n/}),c={className:"string",begin:/\$"/,end:'"',illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},e.BACKSLASH_ESCAPE,r]},o={className:"string",begin:/\$@"/,end:'"',contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},l]},g=e.inherit(o,{illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},r]});l.contains=[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.C_BLOCK_COMMENT_MODE],r.contains=[g,c,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.inherit(e.C_BLOCK_COMMENT_MODE,{illegal:/\n/})];var d={variants:[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},E={begin:"<",end:">",contains:[{beginKeywords:"in out"},i]},_=e.IDENT_RE+"(<"+e.IDENT_RE+"(\\s*,\\s*"+e.IDENT_RE+")*>)?(\\[\\])?",b={begin:"@"+e.IDENT_RE,relevance:0};return{name:"C#",aliases:["cs","c#"],keywords:n,illegal:/::/,contains:[e.COMMENT("///","$",{returnBegin:!0,contains:[{className:"doctag",variants:[{begin:"///",relevance:0},{begin:"\x3c!--|--\x3e"},{begin:""}]}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"meta",begin:"#",end:"$",keywords:{"meta-keyword":"if else elif endif define undef warning error line region endregion pragma checksum"}},d,a,{beginKeywords:"class interface",end:/[{;=]/,illegal:/[^\s:,]/,contains:[{beginKeywords:"where class"},i,E,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"namespace",end:/[{;=]/,illegal:/[^\s:]/,contains:[i,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"meta",begin:"^\\s*\\[",excludeBegin:!0,end:"\\]",excludeEnd:!0,contains:[{className:"meta-string",begin:/"/,end:/"/}]},{beginKeywords:"new return throw await else",relevance:0},{className:"function",begin:"("+_+"\\s+)+"+e.IDENT_RE+"\\s*(\\<.+\\>)?\\s*\\(",returnBegin:!0,end:/\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{begin:e.IDENT_RE+"\\s*(\\<.+\\>)?\\s*\\(",returnBegin:!0,contains:[e.TITLE_MODE,E],relevance:0},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0,contains:[d,a,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},b]}}}()); +hljs.registerLanguage("css",function(){"use strict";return function(e){var n={begin:/(?:[A-Z\_\.\-]+|--[a-zA-Z0-9_-]+)\s*:/,returnBegin:!0,end:";",endsWithParent:!0,contains:[{className:"attribute",begin:/\S/,end:":",excludeEnd:!0,starts:{endsWithParent:!0,excludeEnd:!0,contains:[{begin:/[\w-]+\(/,returnBegin:!0,contains:[{className:"built_in",begin:/[\w-]+/},{begin:/\(/,end:/\)/,contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{className:"number",begin:"#[0-9A-Fa-f]+"},{className:"meta",begin:"!important"}]}}]};return{name:"CSS",case_insensitive:!0,illegal:/[=\/|'\$]/,contains:[e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:/#[A-Za-z0-9_-]+/},{className:"selector-class",begin:/\.[A-Za-z0-9_-]+/},{className:"selector-attr",begin:/\[/,end:/\]/,illegal:"$",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",illegal:/:/,returnBegin:!0,contains:[{className:"keyword",begin:/@\-?\w[\w]*(\-\w+)*/},{begin:/\s/,endsWithParent:!0,excludeEnd:!0,relevance:0,keywords:"and or not only",contains:[{begin:/[a-z-]+:/,className:"attribute"},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},{className:"selector-tag",begin:"[a-zA-Z-][a-zA-Z0-9_-]*",relevance:0},{begin:"{",end:"}",illegal:/\S/,contains:[e.C_BLOCK_COMMENT_MODE,n]}]}}}()); +hljs.registerLanguage("diff",function(){"use strict";return function(e){return{name:"Diff",aliases:["patch"],contains:[{className:"meta",relevance:10,variants:[{begin:/^@@ +\-\d+,\d+ +\+\d+,\d+ +@@$/},{begin:/^\*\*\* +\d+,\d+ +\*\*\*\*$/},{begin:/^\-\-\- +\d+,\d+ +\-\-\-\-$/}]},{className:"comment",variants:[{begin:/Index: /,end:/$/},{begin:/={3,}/,end:/$/},{begin:/^\-{3}/,end:/$/},{begin:/^\*{3} /,end:/$/},{begin:/^\+{3}/,end:/$/},{begin:/^\*{15}$/}]},{className:"addition",begin:"^\\+",end:"$"},{className:"deletion",begin:"^\\-",end:"$"},{className:"addition",begin:"^\\!",end:"$"}]}}}()); +// Add Erlang language support +hljs.registerLanguage("erlang",function(){"use strict";return function(e){var n="[a-z'][a-zA-Z0-9_']*",r="("+n+":"+n+"|"+n+")",a={keyword:"after and andalso|10 band begin bnot bor bsl bzr bxor case catch cond div end fun if let not of orelse|10 query receive rem try when xor",literal:"false true"},i=e.COMMENT("%","$"),c={className:"number",begin:"\\b(\\d+(_\\d+)*#[a-fA-F0-9]+(_[a-fA-F0-9]+)*|\\d+(_\\d+)*(\\.\\d+(_\\d+)*)?([eE][-+]?\\d+)?)",relevance:0},s={begin:"fun\\s+"+n+"/\\d+"},t={begin:r+"\\(",end:"\\)",returnBegin:!0,relevance:0,contains:[{begin:r,relevance:0},{begin:"\\(",end:"\\)",endsWithParent:!0,returnEnd:!0,relevance:0}]},d={begin:"{",end:"}",relevance:0},o={begin:"\\b_([A-Z][A-Za-z0-9_]*)?",relevance:0},l={begin:"[A-Z][a-zA-Z0-9_]*",relevance:0},b={begin:"#"+e.UNDERSCORE_IDENT_RE,relevance:0,returnBegin:!0,contains:[{begin:"#"+e.UNDERSCORE_IDENT_RE,relevance:0},{begin:"{",end:"}",relevance:0}]},g={beginKeywords:"fun receive if try case",end:"end",keywords:a};g.contains=[i,s,e.inherit(e.APOS_STRING_MODE,{className:""}),g,t,e.QUOTE_STRING_MODE,c,d,o,l,b];var u=[i,s,g,t,e.QUOTE_STRING_MODE,c,d,o,l,b];t.contains[1].contains=u,d.contains=u,b.contains[1].contains=u;var E={className:"params",begin:"\\(",end:"\\)",contains:u};return{name:"Erlang",aliases:["erl"],keywords:a,illegal:"(",returnBegin:!0,illegal:"\\(|#|//|/\\*|\\\\|:|;",contains:[E,e.inherit(e.TITLE_MODE,{begin:n})],starts:{end:";|\\.",keywords:a,contains:u}},i,{begin:"^-",end:"\\.",relevance:0,excludeEnd:!0,returnBegin:!0,keywords:{$pattern:"-"+e.IDENT_RE,keyword:"-module -record -undef -export -ifdef -ifndef -author -copyright -doc -vsn -import -include -include_lib -compile -define -else -endif -file -behaviour -behavior -spec"},contains:[E]},c,e.QUOTE_STRING_MODE,b,o,l,d,{begin:/\.$/}]}}}()); \ No newline at end of file From 76966e99f9142ac0c57f927e0c555491dd4153b7 Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Thu, 18 Sep 2025 17:21:24 -0400 Subject: [PATCH 09/17] docs: Add Erlang language support for syntax highlighting - Introduce `erlang.min.js` for Erlang grammar in Highlight.js to enhance code highlighting in documentation. - Update `highlight.js` to include the new Erlang language support, improving the overall syntax highlighting capabilities. --- docs/docs/book/theme/erlang.min.js | 27 + docs/docs/book/theme/highlight.js | 1239 ++++++++++++++++++++++++++++ 2 files changed, 1266 insertions(+) create mode 100644 docs/docs/book/theme/erlang.min.js create mode 100644 docs/docs/book/theme/highlight.js diff --git a/docs/docs/book/theme/erlang.min.js b/docs/docs/book/theme/erlang.min.js new file mode 100644 index 000000000..9b48a797d --- /dev/null +++ b/docs/docs/book/theme/erlang.min.js @@ -0,0 +1,27 @@ +/*! `erlang` grammar compiled for Highlight.js 11.9.0 */ +(()=>{var e=(()=>{"use strict";return e=>{ +const n="[a-z'][a-zA-Z0-9_']*",r="("+n+":"+n+"|"+n+")",a={ +keyword:"after and andalso|10 band begin bnot bor bsl bzr bxor case catch cond div end fun if let not of orelse|10 query receive rem try when xor", +literal:"false true"},i=e.COMMENT("%","$"),s={className:"number", +begin:"\\b(\\d+(_\\d+)*#[a-fA-F0-9]+(_[a-fA-F0-9]+)*|\\d+(_\\d+)*(\\.\\d+(_\\d+)*)?([eE][-+]?\\d+)?)", +relevance:0},c={begin:"fun\\s+"+n+"/\\d+"},t={begin:r+"\\(",end:"\\)", +returnBegin:!0,relevance:0,contains:[{begin:r,relevance:0},{begin:"\\(", +end:"\\)",endsWithParent:!0,returnEnd:!0,relevance:0}]},d={begin:/\{/,end:/\}/, +relevance:0},o={begin:"\\b_([A-Z][A-Za-z0-9_]*)?",relevance:0},l={ +begin:"[A-Z][a-zA-Z0-9_]*",relevance:0},b={begin:"#"+e.UNDERSCORE_IDENT_RE, +relevance:0,returnBegin:!0,contains:[{begin:"#"+e.UNDERSCORE_IDENT_RE, +relevance:0},{begin:/\{/,end:/\}/,relevance:0}]},g={ +beginKeywords:"fun receive if try case",end:"end",keywords:a} +;g.contains=[i,c,e.inherit(e.APOS_STRING_MODE,{className:"" +}),g,t,e.QUOTE_STRING_MODE,s,d,o,l,b] +;const E=[i,c,g,t,e.QUOTE_STRING_MODE,s,d,o,l,b] +;t.contains[1].contains=E,d.contains=E,b.contains[1].contains=E;const u={ +className:"params",begin:"\\(",end:"\\)",contains:E};return{name:"Erlang", +aliases:["erl"],keywords:a,illegal:"(",returnBegin:!0, +illegal:"\\(|#|//|/\\*|\\\\|:|;",contains:[u,e.inherit(e.TITLE_MODE,{begin:n})], +starts:{end:";|\\.",keywords:a,contains:E}},i,{begin:"^-",end:"\\.",relevance:0, +excludeEnd:!0,returnBegin:!0,keywords:{$pattern:"-"+e.IDENT_RE, +keyword:["-module","-record","-undef","-export","-ifdef","-ifndef","-author","-copyright","-doc","-vsn","-import","-include","-include_lib","-compile","-define","-else","-endif","-file","-behaviour","-behavior","-spec"].map((e=>e+"|1.5")).join(" ") +},contains:[u]},s,e.QUOTE_STRING_MODE,b,o,l,d,{begin:/\.$/}]}}})() +;hljs.registerLanguage("erlang",e)})(); \ No newline at end of file diff --git a/docs/docs/book/theme/highlight.js b/docs/docs/book/theme/highlight.js new file mode 100644 index 000000000..e72ef8c77 --- /dev/null +++ b/docs/docs/book/theme/highlight.js @@ -0,0 +1,1239 @@ +/*! + Highlight.js v11.9.0 (git: f47103d4f1) + (c) 2006-2023 undefined and other contributors + License: BSD-3-Clause + */ +var hljs=function(){"use strict";function e(n){ +return n instanceof Map?n.clear=n.delete=n.set=()=>{ +throw Error("map is read-only")}:n instanceof Set&&(n.add=n.clear=n.delete=()=>{ +throw Error("set is read-only") +}),Object.freeze(n),Object.getOwnPropertyNames(n).forEach((t=>{ +const a=n[t],i=typeof a;"object"!==i&&"function"!==i||Object.isFrozen(a)||e(a) +})),n}class n{constructor(e){ +void 0===e.data&&(e.data={}),this.data=e.data,this.isMatchIgnored=!1} +ignoreMatch(){this.isMatchIgnored=!0}}function t(e){ +return e.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'") +}function a(e,...n){const t=Object.create(null);for(const n in e)t[n]=e[n] +;return n.forEach((e=>{for(const n in e)t[n]=e[n]})),t}const i=e=>!!e.scope +;class r{constructor(e,n){ +this.buffer="",this.classPrefix=n.classPrefix,e.walk(this)}addText(e){ +this.buffer+=t(e)}openNode(e){if(!i(e))return;const n=((e,{prefix:n})=>{ +if(e.startsWith("language:"))return e.replace("language:","language-") +;if(e.includes(".")){const t=e.split(".") +;return[`${n}${t.shift()}`,...t.map(((e,n)=>`${e}${"_".repeat(n+1)}`))].join(" ") +}return`${n}${e}`})(e.scope,{prefix:this.classPrefix});this.span(n)} +closeNode(e){i(e)&&(this.buffer+="
")}value(){return this.buffer}span(e){ +this.buffer+=``}}const s=(e={})=>{const n={children:[]} +;return Object.assign(n,e),n};class o{constructor(){ +this.rootNode=s(),this.stack=[this.rootNode]}get top(){ +return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){ +this.top.children.push(e)}openNode(e){const n=s({scope:e}) +;this.add(n),this.stack.push(n)}closeNode(){ +if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){ +for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)} +walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,n){ +return"string"==typeof n?e.addText(n):n.children&&(e.openNode(n), +n.children.forEach((n=>this._walk(e,n))),e.closeNode(n)),e}static _collapse(e){ +"string"!=typeof e&&e.children&&(e.children.every((e=>"string"==typeof e))?e.children=[e.children.join("")]:e.children.forEach((e=>{ +o._collapse(e)})))}}class l extends o{constructor(e){super(),this.options=e} +addText(e){""!==e&&this.add(e)}startScope(e){this.openNode(e)}endScope(){ +this.closeNode()}__addSublanguage(e,n){const t=e.root +;n&&(t.scope="language:"+n),this.add(t)}toHTML(){ +return new r(this,this.options).value()}finalize(){ +return this.closeAllNodes(),!0}}function c(e){ +return e?"string"==typeof e?e:e.source:null}function d(e){return b("(?=",e,")")} +function g(e){return b("(?:",e,")*")}function u(e){return b("(?:",e,")?")} +function b(...e){return e.map((e=>c(e))).join("")}function m(...e){const n=(e=>{ +const n=e[e.length-1] +;return"object"==typeof n&&n.constructor===Object?(e.splice(e.length-1,1),n):{} +})(e);return"("+(n.capture?"":"?:")+e.map((e=>c(e))).join("|")+")"} +function p(e){return RegExp(e.toString()+"|").exec("").length-1} +const _=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./ +;function h(e,{joinWith:n}){let t=0;return e.map((e=>{t+=1;const n=t +;let a=c(e),i="";for(;a.length>0;){const e=_.exec(a);if(!e){i+=a;break} +i+=a.substring(0,e.index), +a=a.substring(e.index+e[0].length),"\\"===e[0][0]&&e[1]?i+="\\"+(Number(e[1])+n):(i+=e[0], +"("===e[0]&&t++)}return i})).map((e=>`(${e})`)).join(n)} +const f="[a-zA-Z]\\w*",E="[a-zA-Z_]\\w*",y="\\b\\d+(\\.\\d+)?",N="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",w="\\b(0b[01]+)",v={ +begin:"\\\\[\\s\\S]",relevance:0},O={scope:"string",begin:"'",end:"'", +illegal:"\\n",contains:[v]},k={scope:"string",begin:'"',end:'"',illegal:"\\n", +contains:[v]},x=(e,n,t={})=>{const i=a({scope:"comment",begin:e,end:n, +contains:[]},t);i.contains.push({scope:"doctag", +begin:"[ ]*(?=(TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):)", +end:/(TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):/,excludeBegin:!0,relevance:0}) +;const r=m("I","a","is","so","us","to","at","if","in","it","on",/[A-Za-z]+['](d|ve|re|ll|t|s|n)/,/[A-Za-z]+[-][a-z]+/,/[A-Za-z][a-z]{2,}/) +;return i.contains.push({begin:b(/[ ]+/,"(",r,/[.]?[:]?([.][ ]|[ ])/,"){3}")}),i +},M=x("//","$"),S=x("/\\*","\\*/"),A=x("#","$");var C=Object.freeze({ +__proto__:null,APOS_STRING_MODE:O,BACKSLASH_ESCAPE:v,BINARY_NUMBER_MODE:{ +scope:"number",begin:w,relevance:0},BINARY_NUMBER_RE:w,COMMENT:x, +C_BLOCK_COMMENT_MODE:S,C_LINE_COMMENT_MODE:M,C_NUMBER_MODE:{scope:"number", +begin:N,relevance:0},C_NUMBER_RE:N,END_SAME_AS_BEGIN:e=>Object.assign(e,{ +"on:begin":(e,n)=>{n.data._beginMatch=e[1]},"on:end":(e,n)=>{ +n.data._beginMatch!==e[1]&&n.ignoreMatch()}}),HASH_COMMENT_MODE:A,IDENT_RE:f, +MATCH_NOTHING_RE:/\b\B/,METHOD_GUARD:{begin:"\\.\\s*"+E,relevance:0}, +NUMBER_MODE:{scope:"number",begin:y,relevance:0},NUMBER_RE:y, +PHRASAL_WORDS_MODE:{ +begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/ +},QUOTE_STRING_MODE:k,REGEXP_MODE:{scope:"regexp",begin:/\/(?=[^/\n]*\/)/, +end:/\/[gimuy]*/,contains:[v,{begin:/\[/,end:/\]/,relevance:0,contains:[v]}]}, +RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~", +SHEBANG:(e={})=>{const n=/^#![ ]*\// +;return e.binary&&(e.begin=b(n,/.*\b/,e.binary,/\b.*/)),a({scope:"meta",begin:n, +end:/$/,relevance:0,"on:begin":(e,n)=>{0!==e.index&&n.ignoreMatch()}},e)}, +TITLE_MODE:{scope:"title",begin:f,relevance:0},UNDERSCORE_IDENT_RE:E, +UNDERSCORE_TITLE_MODE:{scope:"title",begin:E,relevance:0}});function T(e,n){ +"."===e.input[e.index-1]&&n.ignoreMatch()}function R(e,n){ +void 0!==e.className&&(e.scope=e.className,delete e.className)}function D(e,n){ +n&&e.beginKeywords&&(e.begin="\\b("+e.beginKeywords.split(" ").join("|")+")(?!\\.)(?=\\b|\\s)", +e.__beforeBegin=T,e.keywords=e.keywords||e.beginKeywords,delete e.beginKeywords, +void 0===e.relevance&&(e.relevance=0))}function I(e,n){ +Array.isArray(e.illegal)&&(e.illegal=m(...e.illegal))}function L(e,n){ +if(e.match){ +if(e.begin||e.end)throw Error("begin & end are not supported with match") +;e.begin=e.match,delete e.match}}function B(e,n){ +void 0===e.relevance&&(e.relevance=1)}const $=(e,n)=>{if(!e.beforeMatch)return +;if(e.starts)throw Error("beforeMatch cannot be used with starts") +;const t=Object.assign({},e);Object.keys(e).forEach((n=>{delete e[n] +})),e.keywords=t.keywords,e.begin=b(t.beforeMatch,d(t.begin)),e.starts={ +relevance:0,contains:[Object.assign(t,{endsParent:!0})] +},e.relevance=0,delete t.beforeMatch +},z=["of","and","for","in","not","or","if","then","parent","list","value"],F="keyword" +;function U(e,n,t=F){const a=Object.create(null) +;return"string"==typeof e?i(t,e.split(" ")):Array.isArray(e)?i(t,e):Object.keys(e).forEach((t=>{ +Object.assign(a,U(e[t],n,t))})),a;function i(e,t){ +n&&(t=t.map((e=>e.toLowerCase()))),t.forEach((n=>{const t=n.split("|") +;a[t[0]]=[e,j(t[0],t[1])]}))}}function j(e,n){ +return n?Number(n):(e=>z.includes(e.toLowerCase()))(e)?0:1}const P={},K=e=>{ +console.error(e)},H=(e,...n)=>{console.log("WARN: "+e,...n)},q=(e,n)=>{ +P[`${e}/${n}`]||(console.log(`Deprecated as of ${e}. ${n}`),P[`${e}/${n}`]=!0) +},G=Error();function Z(e,n,{key:t}){let a=0;const i=e[t],r={},s={} +;for(let e=1;e<=n.length;e++)s[e+a]=i[e],r[e+a]=!0,a+=p(n[e-1]) +;e[t]=s,e[t]._emit=r,e[t]._multi=!0}function W(e){(e=>{ +e.scope&&"object"==typeof e.scope&&null!==e.scope&&(e.beginScope=e.scope, +delete e.scope)})(e),"string"==typeof e.beginScope&&(e.beginScope={ +_wrap:e.beginScope}),"string"==typeof e.endScope&&(e.endScope={_wrap:e.endScope +}),(e=>{if(Array.isArray(e.begin)){ +if(e.skip||e.excludeBegin||e.returnBegin)throw K("skip, excludeBegin, returnBegin not compatible with beginScope: {}"), +G +;if("object"!=typeof e.beginScope||null===e.beginScope)throw K("beginScope must be object"), +G;Z(e,e.begin,{key:"beginScope"}),e.begin=h(e.begin,{joinWith:""})}})(e),(e=>{ +if(Array.isArray(e.end)){ +if(e.skip||e.excludeEnd||e.returnEnd)throw K("skip, excludeEnd, returnEnd not compatible with endScope: {}"), +G +;if("object"!=typeof e.endScope||null===e.endScope)throw K("endScope must be object"), +G;Z(e,e.end,{key:"endScope"}),e.end=h(e.end,{joinWith:""})}})(e)}function Q(e){ +function n(n,t){ +return RegExp(c(n),"m"+(e.case_insensitive?"i":"")+(e.unicodeRegex?"u":"")+(t?"g":"")) +}class t{constructor(){ +this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0} +addRule(e,n){ +n.position=this.position++,this.matchIndexes[this.matchAt]=n,this.regexes.push([n,e]), +this.matchAt+=p(e)+1}compile(){0===this.regexes.length&&(this.exec=()=>null) +;const e=this.regexes.map((e=>e[1]));this.matcherRe=n(h(e,{joinWith:"|" +}),!0),this.lastIndex=0}exec(e){this.matcherRe.lastIndex=this.lastIndex +;const n=this.matcherRe.exec(e);if(!n)return null +;const t=n.findIndex(((e,n)=>n>0&&void 0!==e)),a=this.matchIndexes[t] +;return n.splice(0,t),Object.assign(n,a)}}class i{constructor(){ +this.rules=[],this.multiRegexes=[], +this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){ +if(this.multiRegexes[e])return this.multiRegexes[e];const n=new t +;return this.rules.slice(e).forEach((([e,t])=>n.addRule(e,t))), +n.compile(),this.multiRegexes[e]=n,n}resumingScanAtSamePosition(){ +return 0!==this.regexIndex}considerAll(){this.regexIndex=0}addRule(e,n){ +this.rules.push([e,n]),"begin"===n.type&&this.count++}exec(e){ +const n=this.getMatcher(this.regexIndex);n.lastIndex=this.lastIndex +;let t=n.exec(e) +;if(this.resumingScanAtSamePosition())if(t&&t.index===this.lastIndex);else{ +const n=this.getMatcher(0);n.lastIndex=this.lastIndex+1,t=n.exec(e)} +return t&&(this.regexIndex+=t.position+1, +this.regexIndex===this.count&&this.considerAll()),t}} +if(e.compilerExtensions||(e.compilerExtensions=[]), +e.contains&&e.contains.includes("self"))throw Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.") +;return e.classNameAliases=a(e.classNameAliases||{}),function t(r,s){const o=r +;if(r.isCompiled)return o +;[R,L,W,$].forEach((e=>e(r,s))),e.compilerExtensions.forEach((e=>e(r,s))), +r.__beforeBegin=null,[D,I,B].forEach((e=>e(r,s))),r.isCompiled=!0;let l=null +;return"object"==typeof r.keywords&&r.keywords.$pattern&&(r.keywords=Object.assign({},r.keywords), +l=r.keywords.$pattern, +delete r.keywords.$pattern),l=l||/\w+/,r.keywords&&(r.keywords=U(r.keywords,e.case_insensitive)), +o.keywordPatternRe=n(l,!0), +s&&(r.begin||(r.begin=/\B|\b/),o.beginRe=n(o.begin),r.end||r.endsWithParent||(r.end=/\B|\b/), +r.end&&(o.endRe=n(o.end)), +o.terminatorEnd=c(o.end)||"",r.endsWithParent&&s.terminatorEnd&&(o.terminatorEnd+=(r.end?"|":"")+s.terminatorEnd)), +r.illegal&&(o.illegalRe=n(r.illegal)), +r.contains||(r.contains=[]),r.contains=[].concat(...r.contains.map((e=>(e=>(e.variants&&!e.cachedVariants&&(e.cachedVariants=e.variants.map((n=>a(e,{ +variants:null},n)))),e.cachedVariants?e.cachedVariants:X(e)?a(e,{ +starts:e.starts?a(e.starts):null +}):Object.isFrozen(e)?a(e):e))("self"===e?r:e)))),r.contains.forEach((e=>{t(e,o) +})),r.starts&&t(r.starts,s),o.matcher=(e=>{const n=new i +;return e.contains.forEach((e=>n.addRule(e.begin,{rule:e,type:"begin" +}))),e.terminatorEnd&&n.addRule(e.terminatorEnd,{type:"end" +}),e.illegal&&n.addRule(e.illegal,{type:"illegal"}),n})(o),o}(e)}function X(e){ +return!!e&&(e.endsWithParent||X(e.starts))}class V extends Error{ +constructor(e,n){super(e),this.name="HTMLInjectionError",this.html=n}} +const J=t,Y=a,ee=Symbol("nomatch"),ne=t=>{ +const a=Object.create(null),i=Object.create(null),r=[];let s=!0 +;const o="Could not find the language '{}', did you forget to load/include a language module?",c={ +disableAutodetect:!0,name:"Plain text",contains:[]};let p={ +ignoreUnescapedHTML:!1,throwUnescapedHTML:!1,noHighlightRe:/^(no-?highlight)$/i, +languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-", +cssSelector:"pre code",languages:null,__emitter:l};function _(e){ +return p.noHighlightRe.test(e)}function h(e,n,t){let a="",i="" +;"object"==typeof n?(a=e, +t=n.ignoreIllegals,i=n.language):(q("10.7.0","highlight(lang, code, ...args) has been deprecated."), +q("10.7.0","Please use highlight(code, options) instead.\nhttps://github.com/highlightjs/highlight.js/issues/2277"), +i=e,a=n),void 0===t&&(t=!0);const r={code:a,language:i};x("before:highlight",r) +;const s=r.result?r.result:f(r.language,r.code,t) +;return s.code=r.code,x("after:highlight",s),s}function f(e,t,i,r){ +const l=Object.create(null);function c(){if(!x.keywords)return void S.addText(A) +;let e=0;x.keywordPatternRe.lastIndex=0;let n=x.keywordPatternRe.exec(A),t="" +;for(;n;){t+=A.substring(e,n.index) +;const i=w.case_insensitive?n[0].toLowerCase():n[0],r=(a=i,x.keywords[a]);if(r){ +const[e,a]=r +;if(S.addText(t),t="",l[i]=(l[i]||0)+1,l[i]<=7&&(C+=a),e.startsWith("_"))t+=n[0];else{ +const t=w.classNameAliases[e]||e;g(n[0],t)}}else t+=n[0] +;e=x.keywordPatternRe.lastIndex,n=x.keywordPatternRe.exec(A)}var a +;t+=A.substring(e),S.addText(t)}function d(){null!=x.subLanguage?(()=>{ +if(""===A)return;let e=null;if("string"==typeof x.subLanguage){ +if(!a[x.subLanguage])return void S.addText(A) +;e=f(x.subLanguage,A,!0,M[x.subLanguage]),M[x.subLanguage]=e._top +}else e=E(A,x.subLanguage.length?x.subLanguage:null) +;x.relevance>0&&(C+=e.relevance),S.__addSublanguage(e._emitter,e.language) +})():c(),A=""}function g(e,n){ +""!==e&&(S.startScope(n),S.addText(e),S.endScope())}function u(e,n){let t=1 +;const a=n.length-1;for(;t<=a;){if(!e._emit[t]){t++;continue} +const a=w.classNameAliases[e[t]]||e[t],i=n[t];a?g(i,a):(A=i,c(),A=""),t++}} +function b(e,n){ +return e.scope&&"string"==typeof e.scope&&S.openNode(w.classNameAliases[e.scope]||e.scope), +e.beginScope&&(e.beginScope._wrap?(g(A,w.classNameAliases[e.beginScope._wrap]||e.beginScope._wrap), +A=""):e.beginScope._multi&&(u(e.beginScope,n),A="")),x=Object.create(e,{parent:{ +value:x}}),x}function m(e,t,a){let i=((e,n)=>{const t=e&&e.exec(n) +;return t&&0===t.index})(e.endRe,a);if(i){if(e["on:end"]){const a=new n(e) +;e["on:end"](t,a),a.isMatchIgnored&&(i=!1)}if(i){ +for(;e.endsParent&&e.parent;)e=e.parent;return e}} +if(e.endsWithParent)return m(e.parent,t,a)}function _(e){ +return 0===x.matcher.regexIndex?(A+=e[0],1):(D=!0,0)}function h(e){ +const n=e[0],a=t.substring(e.index),i=m(x,e,a);if(!i)return ee;const r=x +;x.endScope&&x.endScope._wrap?(d(), +g(n,x.endScope._wrap)):x.endScope&&x.endScope._multi?(d(), +u(x.endScope,e)):r.skip?A+=n:(r.returnEnd||r.excludeEnd||(A+=n), +d(),r.excludeEnd&&(A=n));do{ +x.scope&&S.closeNode(),x.skip||x.subLanguage||(C+=x.relevance),x=x.parent +}while(x!==i.parent);return i.starts&&b(i.starts,e),r.returnEnd?0:n.length} +let y={};function N(a,r){const o=r&&r[0];if(A+=a,null==o)return d(),0 +;if("begin"===y.type&&"end"===r.type&&y.index===r.index&&""===o){ +if(A+=t.slice(r.index,r.index+1),!s){const n=Error(`0 width match regex (${e})`) +;throw n.languageName=e,n.badRule=y.rule,n}return 1} +if(y=r,"begin"===r.type)return(e=>{ +const t=e[0],a=e.rule,i=new n(a),r=[a.__beforeBegin,a["on:begin"]] +;for(const n of r)if(n&&(n(e,i),i.isMatchIgnored))return _(t) +;return a.skip?A+=t:(a.excludeBegin&&(A+=t), +d(),a.returnBegin||a.excludeBegin||(A=t)),b(a,e),a.returnBegin?0:t.length})(r) +;if("illegal"===r.type&&!i){ +const e=Error('Illegal lexeme "'+o+'" for mode "'+(x.scope||"")+'"') +;throw e.mode=x,e}if("end"===r.type){const e=h(r);if(e!==ee)return e} +if("illegal"===r.type&&""===o)return 1 +;if(R>1e5&&R>3*r.index)throw Error("potential infinite loop, way more iterations than matches") +;return A+=o,o.length}const w=v(e) +;if(!w)throw K(o.replace("{}",e)),Error('Unknown language: "'+e+'"') +;const O=Q(w);let k="",x=r||O;const M={},S=new p.__emitter(p);(()=>{const e=[] +;for(let n=x;n!==w;n=n.parent)n.scope&&e.unshift(n.scope) +;e.forEach((e=>S.openNode(e)))})();let A="",C=0,T=0,R=0,D=!1;try{ +if(w.__emitTokens)w.__emitTokens(t,S);else{for(x.matcher.considerAll();;){ +R++,D?D=!1:x.matcher.considerAll(),x.matcher.lastIndex=T +;const e=x.matcher.exec(t);if(!e)break;const n=N(t.substring(T,e.index),e) +;T=e.index+n}N(t.substring(T))}return S.finalize(),k=S.toHTML(),{language:e, +value:k,relevance:C,illegal:!1,_emitter:S,_top:x}}catch(n){ +if(n.message&&n.message.includes("Illegal"))return{language:e,value:J(t), +illegal:!0,relevance:0,_illegalBy:{message:n.message,index:T, +context:t.slice(T-100,T+100),mode:n.mode,resultSoFar:k},_emitter:S};if(s)return{ +language:e,value:J(t),illegal:!1,relevance:0,errorRaised:n,_emitter:S,_top:x} +;throw n}}function E(e,n){n=n||p.languages||Object.keys(a);const t=(e=>{ +const n={value:J(e),illegal:!1,relevance:0,_top:c,_emitter:new p.__emitter(p)} +;return n._emitter.addText(e),n})(e),i=n.filter(v).filter(k).map((n=>f(n,e,!1))) +;i.unshift(t);const r=i.sort(((e,n)=>{ +if(e.relevance!==n.relevance)return n.relevance-e.relevance +;if(e.language&&n.language){if(v(e.language).supersetOf===n.language)return 1 +;if(v(n.language).supersetOf===e.language)return-1}return 0})),[s,o]=r,l=s +;return l.secondBest=o,l}function y(e){let n=null;const t=(e=>{ +let n=e.className+" ";n+=e.parentNode?e.parentNode.className:"" +;const t=p.languageDetectRe.exec(n);if(t){const n=v(t[1]) +;return n||(H(o.replace("{}",t[1])), +H("Falling back to no-highlight mode for this block.",e)),n?t[1]:"no-highlight"} +return n.split(/\s+/).find((e=>_(e)||v(e)))})(e);if(_(t))return +;if(x("before:highlightElement",{el:e,language:t +}),e.dataset.highlighted)return void console.log("Element previously highlighted. To highlight again, first unset `dataset.highlighted`.",e) +;if(e.children.length>0&&(p.ignoreUnescapedHTML||(console.warn("One of your code blocks includes unescaped HTML. This is a potentially serious security risk."), +console.warn("https://github.com/highlightjs/highlight.js/wiki/security"), +console.warn("The element with unescaped HTML:"), +console.warn(e)),p.throwUnescapedHTML))throw new V("One of your code blocks includes unescaped HTML.",e.innerHTML) +;n=e;const a=n.textContent,r=t?h(a,{language:t,ignoreIllegals:!0}):E(a) +;e.innerHTML=r.value,e.dataset.highlighted="yes",((e,n,t)=>{const a=n&&i[n]||t +;e.classList.add("hljs"),e.classList.add("language-"+a) +})(e,t,r.language),e.result={language:r.language,re:r.relevance, +relevance:r.relevance},r.secondBest&&(e.secondBest={ +language:r.secondBest.language,relevance:r.secondBest.relevance +}),x("after:highlightElement",{el:e,result:r,text:a})}let N=!1;function w(){ +"loading"!==document.readyState?document.querySelectorAll(p.cssSelector).forEach(y):N=!0 +}function v(e){return e=(e||"").toLowerCase(),a[e]||a[i[e]]} +function O(e,{languageName:n}){"string"==typeof e&&(e=[e]),e.forEach((e=>{ +i[e.toLowerCase()]=n}))}function k(e){const n=v(e) +;return n&&!n.disableAutodetect}function x(e,n){const t=e;r.forEach((e=>{ +e[t]&&e[t](n)}))} +"undefined"!=typeof window&&window.addEventListener&&window.addEventListener("DOMContentLoaded",(()=>{ +N&&w()}),!1),Object.assign(t,{highlight:h,highlightAuto:E,highlightAll:w, +highlightElement:y, +highlightBlock:e=>(q("10.7.0","highlightBlock will be removed entirely in v12.0"), +q("10.7.0","Please use highlightElement now."),y(e)),configure:e=>{p=Y(p,e)}, +initHighlighting:()=>{ +w(),q("10.6.0","initHighlighting() deprecated. Use highlightAll() now.")}, +initHighlightingOnLoad:()=>{ +w(),q("10.6.0","initHighlightingOnLoad() deprecated. Use highlightAll() now.") +},registerLanguage:(e,n)=>{let i=null;try{i=n(t)}catch(n){ +if(K("Language definition for '{}' could not be registered.".replace("{}",e)), +!s)throw n;K(n),i=c} +i.name||(i.name=e),a[e]=i,i.rawDefinition=n.bind(null,t),i.aliases&&O(i.aliases,{ +languageName:e})},unregisterLanguage:e=>{delete a[e] +;for(const n of Object.keys(i))i[n]===e&&delete i[n]}, +listLanguages:()=>Object.keys(a),getLanguage:v,registerAliases:O, +autoDetection:k,inherit:Y,addPlugin:e=>{(e=>{ +e["before:highlightBlock"]&&!e["before:highlightElement"]&&(e["before:highlightElement"]=n=>{ +e["before:highlightBlock"](Object.assign({block:n.el},n)) +}),e["after:highlightBlock"]&&!e["after:highlightElement"]&&(e["after:highlightElement"]=n=>{ +e["after:highlightBlock"](Object.assign({block:n.el},n))})})(e),r.push(e)}, +removePlugin:e=>{const n=r.indexOf(e);-1!==n&&r.splice(n,1)}}),t.debugMode=()=>{ +s=!1},t.safeMode=()=>{s=!0},t.versionString="11.9.0",t.regex={concat:b, +lookahead:d,either:m,optional:u,anyNumberOfTimes:g} +;for(const n in C)"object"==typeof C[n]&&e(C[n]);return Object.assign(t,C),t +},te=ne({});te.newInstance=()=>ne({});var ae=te;const ie=e=>({IMPORTANT:{ +scope:"meta",begin:"!important"},BLOCK_COMMENT:e.C_BLOCK_COMMENT_MODE,HEXCOLOR:{ +scope:"number",begin:/#(([0-9a-fA-F]{3,4})|(([0-9a-fA-F]{2}){3,4}))\b/}, +FUNCTION_DISPATCH:{className:"built_in",begin:/[\w-]+(?=\()/}, +ATTRIBUTE_SELECTOR_MODE:{scope:"selector-attr",begin:/\[/,end:/\]/,illegal:"$", +contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},CSS_NUMBER_MODE:{ +scope:"number", +begin:e.NUMBER_RE+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?", +relevance:0},CSS_VARIABLE:{className:"attr",begin:/--[A-Za-z_][A-Za-z0-9_-]*/} +}),re=["a","abbr","address","article","aside","audio","b","blockquote","body","button","canvas","caption","cite","code","dd","del","details","dfn","div","dl","dt","em","fieldset","figcaption","figure","footer","form","h1","h2","h3","h4","h5","h6","header","hgroup","html","i","iframe","img","input","ins","kbd","label","legend","li","main","mark","menu","nav","object","ol","p","q","quote","samp","section","span","strong","summary","sup","table","tbody","td","textarea","tfoot","th","thead","time","tr","ul","var","video"],se=["any-hover","any-pointer","aspect-ratio","color","color-gamut","color-index","device-aspect-ratio","device-height","device-width","display-mode","forced-colors","grid","height","hover","inverted-colors","monochrome","orientation","overflow-block","overflow-inline","pointer","prefers-color-scheme","prefers-contrast","prefers-reduced-motion","prefers-reduced-transparency","resolution","scan","scripting","update","width","min-width","max-width","min-height","max-height"],oe=["active","any-link","blank","checked","current","default","defined","dir","disabled","drop","empty","enabled","first","first-child","first-of-type","fullscreen","future","focus","focus-visible","focus-within","has","host","host-context","hover","indeterminate","in-range","invalid","is","lang","last-child","last-of-type","left","link","local-link","not","nth-child","nth-col","nth-last-child","nth-last-col","nth-last-of-type","nth-of-type","only-child","only-of-type","optional","out-of-range","past","placeholder-shown","read-only","read-write","required","right","root","scope","target","target-within","user-invalid","valid","visited","where"],le=["after","backdrop","before","cue","cue-region","first-letter","first-line","grammar-error","marker","part","placeholder","selection","slotted","spelling-error"],ce=["align-content","align-items","align-self","all","animation","animation-delay","animation-direction","animation-duration","animation-fill-mode","animation-iteration-count","animation-name","animation-play-state","animation-timing-function","backface-visibility","background","background-attachment","background-blend-mode","background-clip","background-color","background-image","background-origin","background-position","background-repeat","background-size","block-size","border","border-block","border-block-color","border-block-end","border-block-end-color","border-block-end-style","border-block-end-width","border-block-start","border-block-start-color","border-block-start-style","border-block-start-width","border-block-style","border-block-width","border-bottom","border-bottom-color","border-bottom-left-radius","border-bottom-right-radius","border-bottom-style","border-bottom-width","border-collapse","border-color","border-image","border-image-outset","border-image-repeat","border-image-slice","border-image-source","border-image-width","border-inline","border-inline-color","border-inline-end","border-inline-end-color","border-inline-end-style","border-inline-end-width","border-inline-start","border-inline-start-color","border-inline-start-style","border-inline-start-width","border-inline-style","border-inline-width","border-left","border-left-color","border-left-style","border-left-width","border-radius","border-right","border-right-color","border-right-style","border-right-width","border-spacing","border-style","border-top","border-top-color","border-top-left-radius","border-top-right-radius","border-top-style","border-top-width","border-width","bottom","box-decoration-break","box-shadow","box-sizing","break-after","break-before","break-inside","caption-side","caret-color","clear","clip","clip-path","clip-rule","color","column-count","column-fill","column-gap","column-rule","column-rule-color","column-rule-style","column-rule-width","column-span","column-width","columns","contain","content","content-visibility","counter-increment","counter-reset","cue","cue-after","cue-before","cursor","direction","display","empty-cells","filter","flex","flex-basis","flex-direction","flex-flow","flex-grow","flex-shrink","flex-wrap","float","flow","font","font-display","font-family","font-feature-settings","font-kerning","font-language-override","font-size","font-size-adjust","font-smoothing","font-stretch","font-style","font-synthesis","font-variant","font-variant-caps","font-variant-east-asian","font-variant-ligatures","font-variant-numeric","font-variant-position","font-variation-settings","font-weight","gap","glyph-orientation-vertical","grid","grid-area","grid-auto-columns","grid-auto-flow","grid-auto-rows","grid-column","grid-column-end","grid-column-start","grid-gap","grid-row","grid-row-end","grid-row-start","grid-template","grid-template-areas","grid-template-columns","grid-template-rows","hanging-punctuation","height","hyphens","icon","image-orientation","image-rendering","image-resolution","ime-mode","inline-size","isolation","justify-content","left","letter-spacing","line-break","line-height","list-style","list-style-image","list-style-position","list-style-type","margin","margin-block","margin-block-end","margin-block-start","margin-bottom","margin-inline","margin-inline-end","margin-inline-start","margin-left","margin-right","margin-top","marks","mask","mask-border","mask-border-mode","mask-border-outset","mask-border-repeat","mask-border-slice","mask-border-source","mask-border-width","mask-clip","mask-composite","mask-image","mask-mode","mask-origin","mask-position","mask-repeat","mask-size","mask-type","max-block-size","max-height","max-inline-size","max-width","min-block-size","min-height","min-inline-size","min-width","mix-blend-mode","nav-down","nav-index","nav-left","nav-right","nav-up","none","normal","object-fit","object-position","opacity","order","orphans","outline","outline-color","outline-offset","outline-style","outline-width","overflow","overflow-wrap","overflow-x","overflow-y","padding","padding-block","padding-block-end","padding-block-start","padding-bottom","padding-inline","padding-inline-end","padding-inline-start","padding-left","padding-right","padding-top","page-break-after","page-break-before","page-break-inside","pause","pause-after","pause-before","perspective","perspective-origin","pointer-events","position","quotes","resize","rest","rest-after","rest-before","right","row-gap","scroll-margin","scroll-margin-block","scroll-margin-block-end","scroll-margin-block-start","scroll-margin-bottom","scroll-margin-inline","scroll-margin-inline-end","scroll-margin-inline-start","scroll-margin-left","scroll-margin-right","scroll-margin-top","scroll-padding","scroll-padding-block","scroll-padding-block-end","scroll-padding-block-start","scroll-padding-bottom","scroll-padding-inline","scroll-padding-inline-end","scroll-padding-inline-start","scroll-padding-left","scroll-padding-right","scroll-padding-top","scroll-snap-align","scroll-snap-stop","scroll-snap-type","scrollbar-color","scrollbar-gutter","scrollbar-width","shape-image-threshold","shape-margin","shape-outside","speak","speak-as","src","tab-size","table-layout","text-align","text-align-all","text-align-last","text-combine-upright","text-decoration","text-decoration-color","text-decoration-line","text-decoration-style","text-emphasis","text-emphasis-color","text-emphasis-position","text-emphasis-style","text-indent","text-justify","text-orientation","text-overflow","text-rendering","text-shadow","text-transform","text-underline-position","top","transform","transform-box","transform-origin","transform-style","transition","transition-delay","transition-duration","transition-property","transition-timing-function","unicode-bidi","vertical-align","visibility","voice-balance","voice-duration","voice-family","voice-pitch","voice-range","voice-rate","voice-stress","voice-volume","white-space","widows","width","will-change","word-break","word-spacing","word-wrap","writing-mode","z-index"].reverse(),de=oe.concat(le) +;var ge="[0-9](_*[0-9])*",ue=`\\.(${ge})`,be="[0-9a-fA-F](_*[0-9a-fA-F])*",me={ +className:"number",variants:[{ +begin:`(\\b(${ge})((${ue})|\\.)?|(${ue}))[eE][+-]?(${ge})[fFdD]?\\b`},{ +begin:`\\b(${ge})((${ue})[fFdD]?\\b|\\.([fFdD]\\b)?)`},{ +begin:`(${ue})[fFdD]?\\b`},{begin:`\\b(${ge})[fFdD]\\b`},{ +begin:`\\b0[xX]((${be})\\.?|(${be})?\\.(${be}))[pP][+-]?(${ge})[fFdD]?\\b`},{ +begin:"\\b(0|[1-9](_*[0-9])*)[lL]?\\b"},{begin:`\\b0[xX](${be})[lL]?\\b`},{ +begin:"\\b0(_*[0-7])*[lL]?\\b"},{begin:"\\b0[bB][01](_*[01])*[lL]?\\b"}], +relevance:0};function pe(e,n,t){return-1===t?"":e.replace(n,(a=>pe(e,n,t-1)))} +const _e="[A-Za-z$_][0-9A-Za-z$_]*",he=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],fe=["true","false","null","undefined","NaN","Infinity"],Ee=["Object","Function","Boolean","Symbol","Math","Date","Number","BigInt","String","RegExp","Array","Float32Array","Float64Array","Int8Array","Uint8Array","Uint8ClampedArray","Int16Array","Int32Array","Uint16Array","Uint32Array","BigInt64Array","BigUint64Array","Set","Map","WeakSet","WeakMap","ArrayBuffer","SharedArrayBuffer","Atomics","DataView","JSON","Promise","Generator","GeneratorFunction","AsyncFunction","Reflect","Proxy","Intl","WebAssembly"],ye=["Error","EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"],Ne=["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],we=["arguments","this","super","console","window","document","localStorage","sessionStorage","module","global"],ve=[].concat(Ne,Ee,ye) +;function Oe(e){const n=e.regex,t=_e,a={begin:/<[A-Za-z0-9\\._:-]+/, +end:/\/[A-Za-z0-9\\._:-]+>|\/>/,isTrulyOpeningTag:(e,n)=>{ +const t=e[0].length+e.index,a=e.input[t] +;if("<"===a||","===a)return void n.ignoreMatch();let i +;">"===a&&(((e,{after:n})=>{const t="",M={ +match:[/const|var|let/,/\s+/,t,/\s*/,/=\s*/,/(async\s*)?/,n.lookahead(x)], +keywords:"async",className:{1:"keyword",3:"title.function"},contains:[f]} +;return{name:"JavaScript",aliases:["js","jsx","mjs","cjs"],keywords:i,exports:{ +PARAMS_CONTAINS:h,CLASS_REFERENCE:y},illegal:/#(?![$_A-z])/, +contains:[e.SHEBANG({label:"shebang",binary:"node",relevance:5}),{ +label:"use_strict",className:"meta",relevance:10, +begin:/^\s*['"]use (strict|asm)['"]/ +},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,d,g,u,b,m,{match:/\$\d+/},l,y,{ +className:"attr",begin:t+n.lookahead(":"),relevance:0},M,{ +begin:"("+e.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*", +keywords:"return throw case",relevance:0,contains:[m,e.REGEXP_MODE,{ +className:"function",begin:x,returnBegin:!0,end:"\\s*=>",contains:[{ +className:"params",variants:[{begin:e.UNDERSCORE_IDENT_RE,relevance:0},{ +className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0, +excludeEnd:!0,keywords:i,contains:h}]}]},{begin:/,/,relevance:0},{match:/\s+/, +relevance:0},{variants:[{begin:"<>",end:""},{ +match:/<[A-Za-z0-9\\._:-]+\s*\/>/},{begin:a.begin, +"on:begin":a.isTrulyOpeningTag,end:a.end}],subLanguage:"xml",contains:[{ +begin:a.begin,end:a.end,skip:!0,contains:["self"]}]}]},N,{ +beginKeywords:"while if switch catch for"},{ +begin:"\\b(?!function)"+e.UNDERSCORE_IDENT_RE+"\\([^()]*(\\([^()]*(\\([^()]*\\)[^()]*)*\\)[^()]*)*\\)\\s*\\{", +returnBegin:!0,label:"func.def",contains:[f,e.inherit(e.TITLE_MODE,{begin:t, +className:"title.function"})]},{match:/\.\.\./,relevance:0},O,{match:"\\$"+t, +relevance:0},{match:[/\bconstructor(?=\s*\()/],className:{1:"title.function"}, +contains:[f]},w,{relevance:0,match:/\b[A-Z][A-Z_0-9]+\b/, +className:"variable.constant"},E,k,{match:/\$[(.]/}]}} +const ke=e=>b(/\b/,e,/\w$/.test(e)?/\b/:/\B/),xe=["Protocol","Type"].map(ke),Me=["init","self"].map(ke),Se=["Any","Self"],Ae=["actor","any","associatedtype","async","await",/as\?/,/as!/,"as","borrowing","break","case","catch","class","consume","consuming","continue","convenience","copy","default","defer","deinit","didSet","distributed","do","dynamic","each","else","enum","extension","fallthrough",/fileprivate\(set\)/,"fileprivate","final","for","func","get","guard","if","import","indirect","infix",/init\?/,/init!/,"inout",/internal\(set\)/,"internal","in","is","isolated","nonisolated","lazy","let","macro","mutating","nonmutating",/open\(set\)/,"open","operator","optional","override","postfix","precedencegroup","prefix",/private\(set\)/,"private","protocol",/public\(set\)/,"public","repeat","required","rethrows","return","set","some","static","struct","subscript","super","switch","throws","throw",/try\?/,/try!/,"try","typealias",/unowned\(safe\)/,/unowned\(unsafe\)/,"unowned","var","weak","where","while","willSet"],Ce=["false","nil","true"],Te=["assignment","associativity","higherThan","left","lowerThan","none","right"],Re=["#colorLiteral","#column","#dsohandle","#else","#elseif","#endif","#error","#file","#fileID","#fileLiteral","#filePath","#function","#if","#imageLiteral","#keyPath","#line","#selector","#sourceLocation","#warning"],De=["abs","all","any","assert","assertionFailure","debugPrint","dump","fatalError","getVaList","isKnownUniquelyReferenced","max","min","numericCast","pointwiseMax","pointwiseMin","precondition","preconditionFailure","print","readLine","repeatElement","sequence","stride","swap","swift_unboxFromSwiftValueWithType","transcode","type","unsafeBitCast","unsafeDowncast","withExtendedLifetime","withUnsafeMutablePointer","withUnsafePointer","withVaList","withoutActuallyEscaping","zip"],Ie=m(/[/=\-+!*%<>&|^~?]/,/[\u00A1-\u00A7]/,/[\u00A9\u00AB]/,/[\u00AC\u00AE]/,/[\u00B0\u00B1]/,/[\u00B6\u00BB\u00BF\u00D7\u00F7]/,/[\u2016-\u2017]/,/[\u2020-\u2027]/,/[\u2030-\u203E]/,/[\u2041-\u2053]/,/[\u2055-\u205E]/,/[\u2190-\u23FF]/,/[\u2500-\u2775]/,/[\u2794-\u2BFF]/,/[\u2E00-\u2E7F]/,/[\u3001-\u3003]/,/[\u3008-\u3020]/,/[\u3030]/),Le=m(Ie,/[\u0300-\u036F]/,/[\u1DC0-\u1DFF]/,/[\u20D0-\u20FF]/,/[\uFE00-\uFE0F]/,/[\uFE20-\uFE2F]/),Be=b(Ie,Le,"*"),$e=m(/[a-zA-Z_]/,/[\u00A8\u00AA\u00AD\u00AF\u00B2-\u00B5\u00B7-\u00BA]/,/[\u00BC-\u00BE\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF]/,/[\u0100-\u02FF\u0370-\u167F\u1681-\u180D\u180F-\u1DBF]/,/[\u1E00-\u1FFF]/,/[\u200B-\u200D\u202A-\u202E\u203F-\u2040\u2054\u2060-\u206F]/,/[\u2070-\u20CF\u2100-\u218F\u2460-\u24FF\u2776-\u2793]/,/[\u2C00-\u2DFF\u2E80-\u2FFF]/,/[\u3004-\u3007\u3021-\u302F\u3031-\u303F\u3040-\uD7FF]/,/[\uF900-\uFD3D\uFD40-\uFDCF\uFDF0-\uFE1F\uFE30-\uFE44]/,/[\uFE47-\uFEFE\uFF00-\uFFFD]/),ze=m($e,/\d/,/[\u0300-\u036F\u1DC0-\u1DFF\u20D0-\u20FF\uFE20-\uFE2F]/),Fe=b($e,ze,"*"),Ue=b(/[A-Z]/,ze,"*"),je=["attached","autoclosure",b(/convention\(/,m("swift","block","c"),/\)/),"discardableResult","dynamicCallable","dynamicMemberLookup","escaping","freestanding","frozen","GKInspectable","IBAction","IBDesignable","IBInspectable","IBOutlet","IBSegueAction","inlinable","main","nonobjc","NSApplicationMain","NSCopying","NSManaged",b(/objc\(/,Fe,/\)/),"objc","objcMembers","propertyWrapper","requires_stored_property_inits","resultBuilder","Sendable","testable","UIApplicationMain","unchecked","unknown","usableFromInline","warn_unqualified_access"],Pe=["iOS","iOSApplicationExtension","macOS","macOSApplicationExtension","macCatalyst","macCatalystApplicationExtension","watchOS","watchOSApplicationExtension","tvOS","tvOSApplicationExtension","swift"] +;var Ke=Object.freeze({__proto__:null,grmr_bash:e=>{const n=e.regex,t={},a={ +begin:/\$\{/,end:/\}/,contains:["self",{begin:/:-/,contains:[t]}]} +;Object.assign(t,{className:"variable",variants:[{ +begin:n.concat(/\$[\w\d#@][\w\d_]*/,"(?![\\w\\d])(?![$])")},a]});const i={ +className:"subst",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]},r={ +begin:/<<-?\s*(?=\w+)/,starts:{contains:[e.END_SAME_AS_BEGIN({begin:/(\w+)/, +end:/(\w+)/,className:"string"})]}},s={className:"string",begin:/"/,end:/"/, +contains:[e.BACKSLASH_ESCAPE,t,i]};i.contains.push(s);const o={begin:/\$?\(\(/, +end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},e.NUMBER_MODE,t] +},l=e.SHEBANG({binary:"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)",relevance:10 +}),c={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0, +contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{ +name:"Bash",aliases:["sh"],keywords:{$pattern:/\b[a-z][a-z0-9._-]+\b/, +keyword:["if","then","else","elif","fi","for","while","until","in","do","done","case","esac","function","select"], +literal:["true","false"], +built_in:["break","cd","continue","eval","exec","exit","export","getopts","hash","pwd","readonly","return","shift","test","times","trap","umask","unset","alias","bind","builtin","caller","command","declare","echo","enable","help","let","local","logout","mapfile","printf","read","readarray","source","type","typeset","ulimit","unalias","set","shopt","autoload","bg","bindkey","bye","cap","chdir","clone","comparguments","compcall","compctl","compdescribe","compfiles","compgroups","compquote","comptags","comptry","compvalues","dirs","disable","disown","echotc","echoti","emulate","fc","fg","float","functions","getcap","getln","history","integer","jobs","kill","limit","log","noglob","popd","print","pushd","pushln","rehash","sched","setcap","setopt","stat","suspend","ttyctl","unfunction","unhash","unlimit","unsetopt","vared","wait","whence","where","which","zcompile","zformat","zftp","zle","zmodload","zparseopts","zprof","zpty","zregexparse","zsocket","zstyle","ztcp","chcon","chgrp","chown","chmod","cp","dd","df","dir","dircolors","ln","ls","mkdir","mkfifo","mknod","mktemp","mv","realpath","rm","rmdir","shred","sync","touch","truncate","vdir","b2sum","base32","base64","cat","cksum","comm","csplit","cut","expand","fmt","fold","head","join","md5sum","nl","numfmt","od","paste","ptx","pr","sha1sum","sha224sum","sha256sum","sha384sum","sha512sum","shuf","sort","split","sum","tac","tail","tr","tsort","unexpand","uniq","wc","arch","basename","chroot","date","dirname","du","echo","env","expr","factor","groups","hostid","id","link","logname","nice","nohup","nproc","pathchk","pinky","printenv","printf","pwd","readlink","runcon","seq","sleep","stat","stdbuf","stty","tee","test","timeout","tty","uname","unlink","uptime","users","who","whoami","yes"] +},contains:[l,e.SHEBANG(),c,o,e.HASH_COMMENT_MODE,r,{match:/(\/[a-z._-]+)+/},s,{ +match:/\\"/},{className:"string",begin:/'/,end:/'/},{match:/\\'/},t]}}, +grmr_c:e=>{const n=e.regex,t=e.COMMENT("//","$",{contains:[{begin:/\\\n/}] +}),a="decltype\\(auto\\)",i="[a-zA-Z_]\\w*::",r="("+a+"|"+n.optional(i)+"[a-zA-Z_]\\w*"+n.optional("<[^<>]+>")+")",s={ +className:"type",variants:[{begin:"\\b[a-z\\d_]*_t\\b"},{ +match:/\batomic_[a-z]{3,6}\b/}]},o={className:"string",variants:[{ +begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{ +begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)", +end:"'",illegal:"."},e.END_SAME_AS_BEGIN({ +begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},l={ +className:"number",variants:[{begin:"\\b(0b[01']+)"},{ +begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)((ll|LL|l|L)(u|U)?|(u|U)(ll|LL|l|L)?|f|F|b|B)" +},{ +begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)" +}],relevance:0},c={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{ +keyword:"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include" +},contains:[{begin:/\\\n/,relevance:0},e.inherit(o,{className:"string"}),{ +className:"string",begin:/<.*?>/},t,e.C_BLOCK_COMMENT_MODE]},d={ +className:"title",begin:n.optional(i)+e.IDENT_RE,relevance:0 +},g=n.optional(i)+e.IDENT_RE+"\\s*\\(",u={ +keyword:["asm","auto","break","case","continue","default","do","else","enum","extern","for","fortran","goto","if","inline","register","restrict","return","sizeof","struct","switch","typedef","union","volatile","while","_Alignas","_Alignof","_Atomic","_Generic","_Noreturn","_Static_assert","_Thread_local","alignas","alignof","noreturn","static_assert","thread_local","_Pragma"], +type:["float","double","signed","unsigned","int","short","long","char","void","_Bool","_Complex","_Imaginary","_Decimal32","_Decimal64","_Decimal128","const","static","complex","bool","imaginary"], +literal:"true false NULL", +built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr" +},b=[c,s,t,e.C_BLOCK_COMMENT_MODE,l,o],m={variants:[{begin:/=/,end:/;/},{ +begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}], +keywords:u,contains:b.concat([{begin:/\(/,end:/\)/,keywords:u, +contains:b.concat(["self"]),relevance:0}]),relevance:0},p={ +begin:"("+r+"[\\*&\\s]+)+"+g,returnBegin:!0,end:/[{;=]/,excludeEnd:!0, +keywords:u,illegal:/[^\w\s\*&:<>.]/,contains:[{begin:a,keywords:u,relevance:0},{ +begin:g,returnBegin:!0,contains:[e.inherit(d,{className:"title.function"})], +relevance:0},{relevance:0,match:/,/},{className:"params",begin:/\(/,end:/\)/, +keywords:u,relevance:0,contains:[t,e.C_BLOCK_COMMENT_MODE,o,l,s,{begin:/\(/, +end:/\)/,keywords:u,relevance:0,contains:["self",t,e.C_BLOCK_COMMENT_MODE,o,l,s] +}]},s,t,e.C_BLOCK_COMMENT_MODE,c]};return{name:"C",aliases:["h"],keywords:u, +disableAutodetect:!0,illegal:"=]/,contains:[{ +beginKeywords:"final class struct"},e.TITLE_MODE]}]),exports:{preprocessor:c, +strings:o,keywords:u}}},grmr_cpp:e=>{const n=e.regex,t=e.COMMENT("//","$",{ +contains:[{begin:/\\\n/}] +}),a="decltype\\(auto\\)",i="[a-zA-Z_]\\w*::",r="(?!struct)("+a+"|"+n.optional(i)+"[a-zA-Z_]\\w*"+n.optional("<[^<>]+>")+")",s={ +className:"type",begin:"\\b[a-z\\d_]*_t\\b"},o={className:"string",variants:[{ +begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{ +begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)", +end:"'",illegal:"."},e.END_SAME_AS_BEGIN({ +begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},l={ +className:"number",variants:[{begin:"\\b(0b[01']+)"},{ +begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)((ll|LL|l|L)(u|U)?|(u|U)(ll|LL|l|L)?|f|F|b|B)" +},{ +begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)" +}],relevance:0},c={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{ +keyword:"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include" +},contains:[{begin:/\\\n/,relevance:0},e.inherit(o,{className:"string"}),{ +className:"string",begin:/<.*?>/},t,e.C_BLOCK_COMMENT_MODE]},d={ +className:"title",begin:n.optional(i)+e.IDENT_RE,relevance:0 +},g=n.optional(i)+e.IDENT_RE+"\\s*\\(",u={ +type:["bool","char","char16_t","char32_t","char8_t","double","float","int","long","short","void","wchar_t","unsigned","signed","const","static"], +keyword:["alignas","alignof","and","and_eq","asm","atomic_cancel","atomic_commit","atomic_noexcept","auto","bitand","bitor","break","case","catch","class","co_await","co_return","co_yield","compl","concept","const_cast|10","consteval","constexpr","constinit","continue","decltype","default","delete","do","dynamic_cast|10","else","enum","explicit","export","extern","false","final","for","friend","goto","if","import","inline","module","mutable","namespace","new","noexcept","not","not_eq","nullptr","operator","or","or_eq","override","private","protected","public","reflexpr","register","reinterpret_cast|10","requires","return","sizeof","static_assert","static_cast|10","struct","switch","synchronized","template","this","thread_local","throw","transaction_safe","transaction_safe_dynamic","true","try","typedef","typeid","typename","union","using","virtual","volatile","while","xor","xor_eq"], +literal:["NULL","false","nullopt","nullptr","true"],built_in:["_Pragma"], +_type_hints:["any","auto_ptr","barrier","binary_semaphore","bitset","complex","condition_variable","condition_variable_any","counting_semaphore","deque","false_type","future","imaginary","initializer_list","istringstream","jthread","latch","lock_guard","multimap","multiset","mutex","optional","ostringstream","packaged_task","pair","promise","priority_queue","queue","recursive_mutex","recursive_timed_mutex","scoped_lock","set","shared_future","shared_lock","shared_mutex","shared_timed_mutex","shared_ptr","stack","string_view","stringstream","timed_mutex","thread","true_type","tuple","unique_lock","unique_ptr","unordered_map","unordered_multimap","unordered_multiset","unordered_set","variant","vector","weak_ptr","wstring","wstring_view"] +},b={className:"function.dispatch",relevance:0,keywords:{ +_hint:["abort","abs","acos","apply","as_const","asin","atan","atan2","calloc","ceil","cerr","cin","clog","cos","cosh","cout","declval","endl","exchange","exit","exp","fabs","floor","fmod","forward","fprintf","fputs","free","frexp","fscanf","future","invoke","isalnum","isalpha","iscntrl","isdigit","isgraph","islower","isprint","ispunct","isspace","isupper","isxdigit","labs","launder","ldexp","log","log10","make_pair","make_shared","make_shared_for_overwrite","make_tuple","make_unique","malloc","memchr","memcmp","memcpy","memset","modf","move","pow","printf","putchar","puts","realloc","scanf","sin","sinh","snprintf","sprintf","sqrt","sscanf","std","stderr","stdin","stdout","strcat","strchr","strcmp","strcpy","strcspn","strlen","strncat","strncmp","strncpy","strpbrk","strrchr","strspn","strstr","swap","tan","tanh","terminate","to_underlying","tolower","toupper","vfprintf","visit","vprintf","vsprintf"] +}, +begin:n.concat(/\b/,/(?!decltype)/,/(?!if)/,/(?!for)/,/(?!switch)/,/(?!while)/,e.IDENT_RE,n.lookahead(/(<[^<>]+>|)\s*\(/)) +},m=[b,c,s,t,e.C_BLOCK_COMMENT_MODE,l,o],p={variants:[{begin:/=/,end:/;/},{ +begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}], +keywords:u,contains:m.concat([{begin:/\(/,end:/\)/,keywords:u, +contains:m.concat(["self"]),relevance:0}]),relevance:0},_={className:"function", +begin:"("+r+"[\\*&\\s]+)+"+g,returnBegin:!0,end:/[{;=]/,excludeEnd:!0, +keywords:u,illegal:/[^\w\s\*&:<>.]/,contains:[{begin:a,keywords:u,relevance:0},{ +begin:g,returnBegin:!0,contains:[d],relevance:0},{begin:/::/,relevance:0},{ +begin:/:/,endsWithParent:!0,contains:[o,l]},{relevance:0,match:/,/},{ +className:"params",begin:/\(/,end:/\)/,keywords:u,relevance:0, +contains:[t,e.C_BLOCK_COMMENT_MODE,o,l,s,{begin:/\(/,end:/\)/,keywords:u, +relevance:0,contains:["self",t,e.C_BLOCK_COMMENT_MODE,o,l,s]}] +},s,t,e.C_BLOCK_COMMENT_MODE,c]};return{name:"C++", +aliases:["cc","c++","h++","hpp","hh","hxx","cxx"],keywords:u,illegal:"",keywords:u,contains:["self",s]},{begin:e.IDENT_RE+"::",keywords:u},{ +match:[/\b(?:enum(?:\s+(?:class|struct))?|class|struct|union)/,/\s+/,/\w+/], +className:{1:"keyword",3:"title.class"}}])}},grmr_csharp:e=>{const n={ +keyword:["abstract","as","base","break","case","catch","class","const","continue","do","else","event","explicit","extern","finally","fixed","for","foreach","goto","if","implicit","in","interface","internal","is","lock","namespace","new","operator","out","override","params","private","protected","public","readonly","record","ref","return","scoped","sealed","sizeof","stackalloc","static","struct","switch","this","throw","try","typeof","unchecked","unsafe","using","virtual","void","volatile","while"].concat(["add","alias","and","ascending","async","await","by","descending","equals","from","get","global","group","init","into","join","let","nameof","not","notnull","on","or","orderby","partial","remove","select","set","unmanaged","value|0","var","when","where","with","yield"]), +built_in:["bool","byte","char","decimal","delegate","double","dynamic","enum","float","int","long","nint","nuint","object","sbyte","short","string","ulong","uint","ushort"], +literal:["default","false","null","true"]},t=e.inherit(e.TITLE_MODE,{ +begin:"[a-zA-Z](\\.?\\w)*"}),a={className:"number",variants:[{ +begin:"\\b(0b[01']+)"},{ +begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{ +begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)" +}],relevance:0},i={className:"string",begin:'@"',end:'"',contains:[{begin:'""'}] +},r=e.inherit(i,{illegal:/\n/}),s={className:"subst",begin:/\{/,end:/\}/, +keywords:n},o=e.inherit(s,{illegal:/\n/}),l={className:"string",begin:/\$"/, +end:'"',illegal:/\n/,contains:[{begin:/\{\{/},{begin:/\}\}/ +},e.BACKSLASH_ESCAPE,o]},c={className:"string",begin:/\$@"/,end:'"',contains:[{ +begin:/\{\{/},{begin:/\}\}/},{begin:'""'},s]},d=e.inherit(c,{illegal:/\n/, +contains:[{begin:/\{\{/},{begin:/\}\}/},{begin:'""'},o]}) +;s.contains=[c,l,i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.C_BLOCK_COMMENT_MODE], +o.contains=[d,l,r,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.inherit(e.C_BLOCK_COMMENT_MODE,{ +illegal:/\n/})];const g={variants:[c,l,i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE] +},u={begin:"<",end:">",contains:[{beginKeywords:"in out"},t] +},b=e.IDENT_RE+"(<"+e.IDENT_RE+"(\\s*,\\s*"+e.IDENT_RE+")*>)?(\\[\\])?",m={ +begin:"@"+e.IDENT_RE,relevance:0};return{name:"C#",aliases:["cs","c#"], +keywords:n,illegal:/::/,contains:[e.COMMENT("///","$",{returnBegin:!0, +contains:[{className:"doctag",variants:[{begin:"///",relevance:0},{ +begin:"\x3c!--|--\x3e"},{begin:""}]}] +}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"meta",begin:"#", +end:"$",keywords:{ +keyword:"if else elif endif define undef warning error line region endregion pragma checksum" +}},g,a,{beginKeywords:"class interface",relevance:0,end:/[{;=]/, +illegal:/[^\s:,]/,contains:[{beginKeywords:"where class" +},t,u,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"namespace", +relevance:0,end:/[{;=]/,illegal:/[^\s:]/, +contains:[t,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{ +beginKeywords:"record",relevance:0,end:/[{;=]/,illegal:/[^\s:]/, +contains:[t,u,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"meta", +begin:"^\\s*\\[(?=[\\w])",excludeBegin:!0,end:"\\]",excludeEnd:!0,contains:[{ +className:"string",begin:/"/,end:/"/}]},{ +beginKeywords:"new return throw await else",relevance:0},{className:"function", +begin:"("+b+"\\s+)+"+e.IDENT_RE+"\\s*(<[^=]+>\\s*)?\\(",returnBegin:!0, +end:/\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{ +beginKeywords:"public private protected static internal protected abstract async extern override unsafe virtual new sealed partial", +relevance:0},{begin:e.IDENT_RE+"\\s*(<[^=]+>\\s*)?\\(",returnBegin:!0, +contains:[e.TITLE_MODE,u],relevance:0},{match:/\(\)/},{className:"params", +begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0, +contains:[g,a,e.C_BLOCK_COMMENT_MODE] +},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},m]}},grmr_css:e=>{ +const n=e.regex,t=ie(e),a=[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE];return{ +name:"CSS",case_insensitive:!0,illegal:/[=|'\$]/,keywords:{ +keyframePosition:"from to"},classNameAliases:{keyframePosition:"selector-tag"}, +contains:[t.BLOCK_COMMENT,{begin:/-(webkit|moz|ms|o)-(?=[a-z])/ +},t.CSS_NUMBER_MODE,{className:"selector-id",begin:/#[A-Za-z0-9_-]+/,relevance:0 +},{className:"selector-class",begin:"\\.[a-zA-Z-][a-zA-Z0-9_-]*",relevance:0 +},t.ATTRIBUTE_SELECTOR_MODE,{className:"selector-pseudo",variants:[{ +begin:":("+oe.join("|")+")"},{begin:":(:)?("+le.join("|")+")"}] +},t.CSS_VARIABLE,{className:"attribute",begin:"\\b("+ce.join("|")+")\\b"},{ +begin:/:/,end:/[;}{]/, +contains:[t.BLOCK_COMMENT,t.HEXCOLOR,t.IMPORTANT,t.CSS_NUMBER_MODE,...a,{ +begin:/(url|data-uri)\(/,end:/\)/,relevance:0,keywords:{built_in:"url data-uri" +},contains:[...a,{className:"string",begin:/[^)]/,endsWithParent:!0, +excludeEnd:!0}]},t.FUNCTION_DISPATCH]},{begin:n.lookahead(/@/),end:"[{;]", +relevance:0,illegal:/:/,contains:[{className:"keyword",begin:/@-?\w[\w]*(-\w+)*/ +},{begin:/\s/,endsWithParent:!0,excludeEnd:!0,relevance:0,keywords:{ +$pattern:/[a-z-]+/,keyword:"and or not only",attribute:se.join(" ")},contains:[{ +begin:/[a-z-]+(?=:)/,className:"attribute"},...a,t.CSS_NUMBER_MODE]}]},{ +className:"selector-tag",begin:"\\b("+re.join("|")+")\\b"}]}},grmr_diff:e=>{ +const n=e.regex;return{name:"Diff",aliases:["patch"],contains:[{ +className:"meta",relevance:10, +match:n.either(/^@@ +-\d+,\d+ +\+\d+,\d+ +@@/,/^\*\*\* +\d+,\d+ +\*\*\*\*$/,/^--- +\d+,\d+ +----$/) +},{className:"comment",variants:[{ +begin:n.either(/Index: /,/^index/,/={3,}/,/^-{3}/,/^\*{3} /,/^\+{3}/,/^diff --git/), +end:/$/},{match:/^\*{15}$/}]},{className:"addition",begin:/^\+/,end:/$/},{ +className:"deletion",begin:/^-/,end:/$/},{className:"addition",begin:/^!/, +end:/$/}]}},grmr_go:e=>{const n={ +keyword:["break","case","chan","const","continue","default","defer","else","fallthrough","for","func","go","goto","if","import","interface","map","package","range","return","select","struct","switch","type","var"], +type:["bool","byte","complex64","complex128","error","float32","float64","int8","int16","int32","int64","string","uint8","uint16","uint32","uint64","int","uint","uintptr","rune"], +literal:["true","false","iota","nil"], +built_in:["append","cap","close","complex","copy","imag","len","make","new","panic","print","println","real","recover","delete"] +};return{name:"Go",aliases:["golang"],keywords:n,illegal:"{const n=e.regex;return{name:"GraphQL",aliases:["gql"], +case_insensitive:!0,disableAutodetect:!1,keywords:{ +keyword:["query","mutation","subscription","type","input","schema","directive","interface","union","scalar","fragment","enum","on"], +literal:["true","false","null"]}, +contains:[e.HASH_COMMENT_MODE,e.QUOTE_STRING_MODE,e.NUMBER_MODE,{ +scope:"punctuation",match:/[.]{3}/,relevance:0},{scope:"punctuation", +begin:/[\!\(\)\:\=\[\]\{\|\}]{1}/,relevance:0},{scope:"variable",begin:/\$/, +end:/\W/,excludeEnd:!0,relevance:0},{scope:"meta",match:/@\w+/,excludeEnd:!0},{ +scope:"symbol",begin:n.concat(/[_A-Za-z][_0-9A-Za-z]*/,n.lookahead(/\s*:/)), +relevance:0}],illegal:[/[;<']/,/BEGIN/]}},grmr_ini:e=>{const n=e.regex,t={ +className:"number",relevance:0,variants:[{begin:/([+-]+)?[\d]+_[\d_]+/},{ +begin:e.NUMBER_RE}]},a=e.COMMENT();a.variants=[{begin:/;/,end:/$/},{begin:/#/, +end:/$/}];const i={className:"variable",variants:[{begin:/\$[\w\d"][\w\d_]*/},{ +begin:/\$\{(.*?)\}/}]},r={className:"literal", +begin:/\bon|off|true|false|yes|no\b/},s={className:"string", +contains:[e.BACKSLASH_ESCAPE],variants:[{begin:"'''",end:"'''",relevance:10},{ +begin:'"""',end:'"""',relevance:10},{begin:'"',end:'"'},{begin:"'",end:"'"}] +},o={begin:/\[/,end:/\]/,contains:[a,r,i,s,t,"self"],relevance:0 +},l=n.either(/[A-Za-z0-9_-]+/,/"(\\"|[^"])*"/,/'[^']*'/);return{ +name:"TOML, also INI",aliases:["toml"],case_insensitive:!0,illegal:/\S/, +contains:[a,{className:"section",begin:/\[+/,end:/\]+/},{ +begin:n.concat(l,"(\\s*\\.\\s*",l,")*",n.lookahead(/\s*=\s*[^#\s]/)), +className:"attr",starts:{end:/$/,contains:[a,o,r,i,s,t]}}]}},grmr_java:e=>{ +const n=e.regex,t="[\xc0-\u02b8a-zA-Z_$][\xc0-\u02b8a-zA-Z_$0-9]*",a=t+pe("(?:<"+t+"~~~(?:\\s*,\\s*"+t+"~~~)*>)?",/~~~/g,2),i={ +keyword:["synchronized","abstract","private","var","static","if","const ","for","while","strictfp","finally","protected","import","native","final","void","enum","else","break","transient","catch","instanceof","volatile","case","assert","package","default","public","try","switch","continue","throws","protected","public","private","module","requires","exports","do","sealed","yield","permits"], +literal:["false","true","null"], +type:["char","boolean","long","float","int","byte","short","double"], +built_in:["super","this"]},r={className:"meta",begin:"@"+t,contains:[{ +begin:/\(/,end:/\)/,contains:["self"]}]},s={className:"params",begin:/\(/, +end:/\)/,keywords:i,relevance:0,contains:[e.C_BLOCK_COMMENT_MODE],endsParent:!0} +;return{name:"Java",aliases:["jsp"],keywords:i,illegal:/<\/|#/, +contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/, +relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),{ +begin:/import java\.[a-z]+\./,keywords:"import",relevance:2 +},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{begin:/"""/,end:/"""/, +className:"string",contains:[e.BACKSLASH_ESCAPE] +},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{ +match:[/\b(?:class|interface|enum|extends|implements|new)/,/\s+/,t],className:{ +1:"keyword",3:"title.class"}},{match:/non-sealed/,scope:"keyword"},{ +begin:[n.concat(/(?!else)/,t),/\s+/,t,/\s+/,/=(?!=)/],className:{1:"type", +3:"variable",5:"operator"}},{begin:[/record/,/\s+/,t],className:{1:"keyword", +3:"title.class"},contains:[s,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{ +beginKeywords:"new throw return else",relevance:0},{ +begin:["(?:"+a+"\\s+)",e.UNDERSCORE_IDENT_RE,/\s*(?=\()/],className:{ +2:"title.function"},keywords:i,contains:[{className:"params",begin:/\(/, +end:/\)/,keywords:i,relevance:0, +contains:[r,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,me,e.C_BLOCK_COMMENT_MODE] +},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},me,r]}},grmr_javascript:Oe, +grmr_json:e=>{const n=["true","false","null"],t={scope:"literal", +beginKeywords:n.join(" ")};return{name:"JSON",keywords:{literal:n},contains:[{ +className:"attr",begin:/"(\\.|[^\\"\r\n])*"(?=\s*:)/,relevance:1.01},{ +match:/[{}[\],:]/,className:"punctuation",relevance:0 +},e.QUOTE_STRING_MODE,t,e.C_NUMBER_MODE,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE], +illegal:"\\S"}},grmr_kotlin:e=>{const n={ +keyword:"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual", +built_in:"Byte Short Char Int Long Boolean Float Double Void Unit Nothing", +literal:"true false null"},t={className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"@" +},a={className:"subst",begin:/\$\{/,end:/\}/,contains:[e.C_NUMBER_MODE]},i={ +className:"variable",begin:"\\$"+e.UNDERSCORE_IDENT_RE},r={className:"string", +variants:[{begin:'"""',end:'"""(?=[^"])',contains:[i,a]},{begin:"'",end:"'", +illegal:/\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"',illegal:/\n/, +contains:[e.BACKSLASH_ESCAPE,i,a]}]};a.contains.push(r);const s={ +className:"meta", +begin:"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*"+e.UNDERSCORE_IDENT_RE+")?" +},o={className:"meta",begin:"@"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\(/, +end:/\)/,contains:[e.inherit(r,{className:"string"}),"self"]}] +},l=me,c=e.COMMENT("/\\*","\\*/",{contains:[e.C_BLOCK_COMMENT_MODE]}),d={ +variants:[{className:"type",begin:e.UNDERSCORE_IDENT_RE},{begin:/\(/,end:/\)/, +contains:[]}]},g=d;return g.variants[1].contains=[d],d.variants[1].contains=[g], +{name:"Kotlin",aliases:["kt","kts"],keywords:n, +contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag", +begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,c,{className:"keyword", +begin:/\b(break|continue|return|this)\b/,starts:{contains:[{className:"symbol", +begin:/@\w+/}]}},t,s,o,{className:"function",beginKeywords:"fun",end:"[(]|$", +returnBegin:!0,excludeEnd:!0,keywords:n,relevance:5,contains:[{ +begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0, +contains:[e.UNDERSCORE_TITLE_MODE]},{className:"type",begin://, +keywords:"reified",relevance:0},{className:"params",begin:/\(/,end:/\)/, +endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\/]/, +endsWithParent:!0,contains:[d,e.C_LINE_COMMENT_MODE,c],relevance:0 +},e.C_LINE_COMMENT_MODE,c,s,o,r,e.C_NUMBER_MODE]},c]},{ +begin:[/class|interface|trait/,/\s+/,e.UNDERSCORE_IDENT_RE],beginScope:{ +3:"title.class"},keywords:"class interface trait",end:/[:\{(]|$/,excludeEnd:!0, +illegal:"extends implements",contains:[{ +beginKeywords:"public protected internal private constructor" +},e.UNDERSCORE_TITLE_MODE,{className:"type",begin://,excludeBegin:!0, +excludeEnd:!0,relevance:0},{className:"type",begin:/[,:]\s*/,end:/[<\(,){\s]|$/, +excludeBegin:!0,returnEnd:!0},s,o]},r,{className:"meta",begin:"^#!/usr/bin/env", +end:"$",illegal:"\n"},l]}},grmr_less:e=>{ +const n=ie(e),t=de,a="[\\w-]+",i="("+a+"|@\\{"+a+"\\})",r=[],s=[],o=e=>({ +className:"string",begin:"~?"+e+".*?"+e}),l=(e,n,t)=>({className:e,begin:n, +relevance:t}),c={$pattern:/[a-z-]+/,keyword:"and or not only", +attribute:se.join(" ")},d={begin:"\\(",end:"\\)",contains:s,keywords:c, +relevance:0} +;s.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,o("'"),o('"'),n.CSS_NUMBER_MODE,{ +begin:"(url|data-uri)\\(",starts:{className:"string",end:"[\\)\\n]", +excludeEnd:!0} +},n.HEXCOLOR,d,l("variable","@@?"+a,10),l("variable","@\\{"+a+"\\}"),l("built_in","~?`[^`]*?`"),{ +className:"attribute",begin:a+"\\s*:",end:":",returnBegin:!0,excludeEnd:!0 +},n.IMPORTANT,{beginKeywords:"and not"},n.FUNCTION_DISPATCH);const g=s.concat({ +begin:/\{/,end:/\}/,contains:r}),u={beginKeywords:"when",endsWithParent:!0, +contains:[{beginKeywords:"and not"}].concat(s)},b={begin:i+"\\s*:", +returnBegin:!0,end:/[;}]/,relevance:0,contains:[{begin:/-(webkit|moz|ms|o)-/ +},n.CSS_VARIABLE,{className:"attribute",begin:"\\b("+ce.join("|")+")\\b", +end:/(?=:)/,starts:{endsWithParent:!0,illegal:"[<=$]",relevance:0,contains:s}}] +},m={className:"keyword", +begin:"@(import|media|charset|font-face|(-[a-z]+-)?keyframes|supports|document|namespace|page|viewport|host)\\b", +starts:{end:"[;{}]",keywords:c,returnEnd:!0,contains:s,relevance:0}},p={ +className:"variable",variants:[{begin:"@"+a+"\\s*:",relevance:15},{begin:"@"+a +}],starts:{end:"[;}]",returnEnd:!0,contains:g}},_={variants:[{ +begin:"[\\.#:&\\[>]",end:"[;{}]"},{begin:i,end:/\{/}],returnBegin:!0, +returnEnd:!0,illegal:"[<='$\"]",relevance:0, +contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,u,l("keyword","all\\b"),l("variable","@\\{"+a+"\\}"),{ +begin:"\\b("+re.join("|")+")\\b",className:"selector-tag" +},n.CSS_NUMBER_MODE,l("selector-tag",i,0),l("selector-id","#"+i),l("selector-class","\\."+i,0),l("selector-tag","&",0),n.ATTRIBUTE_SELECTOR_MODE,{ +className:"selector-pseudo",begin:":("+oe.join("|")+")"},{ +className:"selector-pseudo",begin:":(:)?("+le.join("|")+")"},{begin:/\(/, +end:/\)/,relevance:0,contains:g},{begin:"!important"},n.FUNCTION_DISPATCH]},h={ +begin:a+":(:)?"+`(${t.join("|")})`,returnBegin:!0,contains:[_]} +;return r.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,m,p,h,b,_,u,n.FUNCTION_DISPATCH), +{name:"Less",case_insensitive:!0,illegal:"[=>'/<($\"]",contains:r}}, +grmr_lua:e=>{const n="\\[=*\\[",t="\\]=*\\]",a={begin:n,end:t,contains:["self"] +},i=[e.COMMENT("--(?!"+n+")","$"),e.COMMENT("--"+n,t,{contains:[a],relevance:10 +})];return{name:"Lua",keywords:{$pattern:e.UNDERSCORE_IDENT_RE, +literal:"true false nil", +keyword:"and break do else elseif end for goto if in local not or repeat return then until while", +built_in:"_G _ENV _VERSION __index __newindex __mode __call __metatable __tostring __len __gc __add __sub __mul __div __mod __pow __concat __unm __eq __lt __le assert collectgarbage dofile error getfenv getmetatable ipairs load loadfile loadstring module next pairs pcall print rawequal rawget rawset require select setfenv setmetatable tonumber tostring type unpack xpcall arg self coroutine resume yield status wrap create running debug getupvalue debug sethook getmetatable gethook setmetatable setlocal traceback setfenv getinfo setupvalue getlocal getregistry getfenv io lines write close flush open output type read stderr stdin input stdout popen tmpfile math log max acos huge ldexp pi cos tanh pow deg tan cosh sinh random randomseed frexp ceil floor rad abs sqrt modf asin min mod fmod log10 atan2 exp sin atan os exit setlocale date getenv difftime remove time clock tmpname rename execute package preload loadlib loaded loaders cpath config path seeall string sub upper len gfind rep find match char dump gmatch reverse byte format gsub lower table setn insert getn foreachi maxn foreach concat sort remove" +},contains:i.concat([{className:"function",beginKeywords:"function",end:"\\)", +contains:[e.inherit(e.TITLE_MODE,{ +begin:"([_a-zA-Z]\\w*\\.)*([_a-zA-Z]\\w*:)?[_a-zA-Z]\\w*"}),{className:"params", +begin:"\\(",endsWithParent:!0,contains:i}].concat(i) +},e.C_NUMBER_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"string", +begin:n,end:t,contains:[a],relevance:5}])}},grmr_makefile:e=>{const n={ +className:"variable",variants:[{begin:"\\$\\("+e.UNDERSCORE_IDENT_RE+"\\)", +contains:[e.BACKSLASH_ESCAPE]},{begin:/\$[@%{ +const n={begin:/<\/?[A-Za-z_]/,end:">",subLanguage:"xml",relevance:0},t={ +variants:[{begin:/\[.+?\]\[.*?\]/,relevance:0},{ +begin:/\[.+?\]\(((data|javascript|mailto):|(?:http|ftp)s?:\/\/).*?\)/, +relevance:2},{ +begin:e.regex.concat(/\[.+?\]\(/,/[A-Za-z][A-Za-z0-9+.-]*/,/:\/\/.*?\)/), +relevance:2},{begin:/\[.+?\]\([./?&#].*?\)/,relevance:1},{ +begin:/\[.*?\]\(.*?\)/,relevance:0}],returnBegin:!0,contains:[{match:/\[(?=\])/ +},{className:"string",relevance:0,begin:"\\[",end:"\\]",excludeBegin:!0, +returnEnd:!0},{className:"link",relevance:0,begin:"\\]\\(",end:"\\)", +excludeBegin:!0,excludeEnd:!0},{className:"symbol",relevance:0,begin:"\\]\\[", +end:"\\]",excludeBegin:!0,excludeEnd:!0}]},a={className:"strong",contains:[], +variants:[{begin:/_{2}(?!\s)/,end:/_{2}/},{begin:/\*{2}(?!\s)/,end:/\*{2}/}] +},i={className:"emphasis",contains:[],variants:[{begin:/\*(?![*\s])/,end:/\*/},{ +begin:/_(?![_\s])/,end:/_/,relevance:0}]},r=e.inherit(a,{contains:[] +}),s=e.inherit(i,{contains:[]});a.contains.push(s),i.contains.push(r) +;let o=[n,t];return[a,i,r,s].forEach((e=>{e.contains=e.contains.concat(o) +})),o=o.concat(a,i),{name:"Markdown",aliases:["md","mkdown","mkd"],contains:[{ +className:"section",variants:[{begin:"^#{1,6}",end:"$",contains:o},{ +begin:"(?=^.+?\\n[=-]{2,}$)",contains:[{begin:"^[=-]*$"},{begin:"^",end:"\\n", +contains:o}]}]},n,{className:"bullet",begin:"^[ \t]*([*+-]|(\\d+\\.))(?=\\s+)", +end:"\\s+",excludeEnd:!0},a,i,{className:"quote",begin:"^>\\s+",contains:o, +end:"$"},{className:"code",variants:[{begin:"(`{3,})[^`](.|\\n)*?\\1`*[ ]*"},{ +begin:"(~{3,})[^~](.|\\n)*?\\1~*[ ]*"},{begin:"```",end:"```+[ ]*$"},{ +begin:"~~~",end:"~~~+[ ]*$"},{begin:"`.+?`"},{begin:"(?=^( {4}|\\t))", +contains:[{begin:"^( {4}|\\t)",end:"(\\n)$"}],relevance:0}]},{ +begin:"^[-\\*]{3,}",end:"$"},t,{begin:/^\[[^\n]+\]:/,returnBegin:!0,contains:[{ +className:"symbol",begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0},{ +className:"link",begin:/:\s*/,end:/$/,excludeBegin:!0}]}]}},grmr_objectivec:e=>{ +const n=/[a-zA-Z@][a-zA-Z0-9_]*/,t={$pattern:n, +keyword:["@interface","@class","@protocol","@implementation"]};return{ +name:"Objective-C",aliases:["mm","objc","obj-c","obj-c++","objective-c++"], +keywords:{"variable.language":["this","super"],$pattern:n, +keyword:["while","export","sizeof","typedef","const","struct","for","union","volatile","static","mutable","if","do","return","goto","enum","else","break","extern","asm","case","default","register","explicit","typename","switch","continue","inline","readonly","assign","readwrite","self","@synchronized","id","typeof","nonatomic","IBOutlet","IBAction","strong","weak","copy","in","out","inout","bycopy","byref","oneway","__strong","__weak","__block","__autoreleasing","@private","@protected","@public","@try","@property","@end","@throw","@catch","@finally","@autoreleasepool","@synthesize","@dynamic","@selector","@optional","@required","@encode","@package","@import","@defs","@compatibility_alias","__bridge","__bridge_transfer","__bridge_retained","__bridge_retain","__covariant","__contravariant","__kindof","_Nonnull","_Nullable","_Null_unspecified","__FUNCTION__","__PRETTY_FUNCTION__","__attribute__","getter","setter","retain","unsafe_unretained","nonnull","nullable","null_unspecified","null_resettable","class","instancetype","NS_DESIGNATED_INITIALIZER","NS_UNAVAILABLE","NS_REQUIRES_SUPER","NS_RETURNS_INNER_POINTER","NS_INLINE","NS_AVAILABLE","NS_DEPRECATED","NS_ENUM","NS_OPTIONS","NS_SWIFT_UNAVAILABLE","NS_ASSUME_NONNULL_BEGIN","NS_ASSUME_NONNULL_END","NS_REFINED_FOR_SWIFT","NS_SWIFT_NAME","NS_SWIFT_NOTHROW","NS_DURING","NS_HANDLER","NS_ENDHANDLER","NS_VALUERETURN","NS_VOIDRETURN"], +literal:["false","true","FALSE","TRUE","nil","YES","NO","NULL"], +built_in:["dispatch_once_t","dispatch_queue_t","dispatch_sync","dispatch_async","dispatch_once"], +type:["int","float","char","unsigned","signed","short","long","double","wchar_t","unichar","void","bool","BOOL","id|0","_Bool"] +},illegal:"/,end:/$/,illegal:"\\n" +},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"class", +begin:"("+t.keyword.join("|")+")\\b",end:/(\{|$)/,excludeEnd:!0,keywords:t, +contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"\\."+e.UNDERSCORE_IDENT_RE, +relevance:0}]}},grmr_perl:e=>{const n=e.regex,t=/[dualxmsipngr]{0,12}/,a={ +$pattern:/[\w.]+/, +keyword:"abs accept alarm and atan2 bind binmode bless break caller chdir chmod chomp chop chown chr chroot close closedir connect continue cos crypt dbmclose dbmopen defined delete die do dump each else elsif endgrent endhostent endnetent endprotoent endpwent endservent eof eval exec exists exit exp fcntl fileno flock for foreach fork format formline getc getgrent getgrgid getgrnam gethostbyaddr gethostbyname gethostent getlogin getnetbyaddr getnetbyname getnetent getpeername getpgrp getpriority getprotobyname getprotobynumber getprotoent getpwent getpwnam getpwuid getservbyname getservbyport getservent getsockname getsockopt given glob gmtime goto grep gt hex if index int ioctl join keys kill last lc lcfirst length link listen local localtime log lstat lt ma map mkdir msgctl msgget msgrcv msgsnd my ne next no not oct open opendir or ord our pack package pipe pop pos print printf prototype push q|0 qq quotemeta qw qx rand read readdir readline readlink readpipe recv redo ref rename require reset return reverse rewinddir rindex rmdir say scalar seek seekdir select semctl semget semop send setgrent sethostent setnetent setpgrp setpriority setprotoent setpwent setservent setsockopt shift shmctl shmget shmread shmwrite shutdown sin sleep socket socketpair sort splice split sprintf sqrt srand stat state study sub substr symlink syscall sysopen sysread sysseek system syswrite tell telldir tie tied time times tr truncate uc ucfirst umask undef unless unlink unpack unshift untie until use utime values vec wait waitpid wantarray warn when while write x|0 xor y|0" +},i={className:"subst",begin:"[$@]\\{",end:"\\}",keywords:a},r={begin:/->\{/, +end:/\}/},s={variants:[{begin:/\$\d/},{ +begin:n.concat(/[$%@](\^\w\b|#\w+(::\w+)*|\{\w+\}|\w+(::\w*)*)/,"(?![A-Za-z])(?![@$%])") +},{begin:/[$%@][^\s\w{]/,relevance:0}] +},o=[e.BACKSLASH_ESCAPE,i,s],l=[/!/,/\//,/\|/,/\?/,/'/,/"/,/#/],c=(e,a,i="\\1")=>{ +const r="\\1"===i?i:n.concat(i,a) +;return n.concat(n.concat("(?:",e,")"),a,/(?:\\.|[^\\\/])*?/,r,/(?:\\.|[^\\\/])*?/,i,t) +},d=(e,a,i)=>n.concat(n.concat("(?:",e,")"),a,/(?:\\.|[^\\\/])*?/,i,t),g=[s,e.HASH_COMMENT_MODE,e.COMMENT(/^=\w/,/=cut/,{ +endsWithParent:!0}),r,{className:"string",contains:o,variants:[{ +begin:"q[qwxr]?\\s*\\(",end:"\\)",relevance:5},{begin:"q[qwxr]?\\s*\\[", +end:"\\]",relevance:5},{begin:"q[qwxr]?\\s*\\{",end:"\\}",relevance:5},{ +begin:"q[qwxr]?\\s*\\|",end:"\\|",relevance:5},{begin:"q[qwxr]?\\s*<",end:">", +relevance:5},{begin:"qw\\s+q",end:"q",relevance:5},{begin:"'",end:"'", +contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"'},{begin:"`",end:"`", +contains:[e.BACKSLASH_ESCAPE]},{begin:/\{\w+\}/,relevance:0},{ +begin:"-?\\w+\\s*=>",relevance:0}]},{className:"number", +begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b", +relevance:0},{ +begin:"(\\/\\/|"+e.RE_STARTERS_RE+"|\\b(split|return|print|reverse|grep)\\b)\\s*", +keywords:"split return print reverse grep",relevance:0, +contains:[e.HASH_COMMENT_MODE,{className:"regexp",variants:[{ +begin:c("s|tr|y",n.either(...l,{capture:!0}))},{begin:c("s|tr|y","\\(","\\)")},{ +begin:c("s|tr|y","\\[","\\]")},{begin:c("s|tr|y","\\{","\\}")}],relevance:2},{ +className:"regexp",variants:[{begin:/(m|qr)\/\//,relevance:0},{ +begin:d("(?:m|qr)?",/\//,/\//)},{begin:d("m|qr",n.either(...l,{capture:!0 +}),/\1/)},{begin:d("m|qr",/\(/,/\)/)},{begin:d("m|qr",/\[/,/\]/)},{ +begin:d("m|qr",/\{/,/\}/)}]}]},{className:"function",beginKeywords:"sub", +end:"(\\s*\\(.*?\\))?[;{]",excludeEnd:!0,relevance:5,contains:[e.TITLE_MODE]},{ +begin:"-\\w\\b",relevance:0},{begin:"^__DATA__$",end:"^__END__$", +subLanguage:"mojolicious",contains:[{begin:"^@@.*",end:"$",className:"comment"}] +}];return i.contains=g,r.contains=g,{name:"Perl",aliases:["pl","pm"],keywords:a, +contains:g}},grmr_php:e=>{ +const n=e.regex,t=/(?![A-Za-z0-9])(?![$])/,a=n.concat(/[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/,t),i=n.concat(/(\\?[A-Z][a-z0-9_\x7f-\xff]+|\\?[A-Z]+(?=[A-Z][a-z0-9_\x7f-\xff])){1,}/,t),r={ +scope:"variable",match:"\\$+"+a},s={scope:"subst",variants:[{begin:/\$\w+/},{ +begin:/\{\$/,end:/\}/}]},o=e.inherit(e.APOS_STRING_MODE,{illegal:null +}),l="[ \t\n]",c={scope:"string",variants:[e.inherit(e.QUOTE_STRING_MODE,{ +illegal:null,contains:e.QUOTE_STRING_MODE.contains.concat(s)}),o,{ +begin:/<<<[ \t]*(?:(\w+)|"(\w+)")\n/,end:/[ \t]*(\w+)\b/, +contains:e.QUOTE_STRING_MODE.contains.concat(s),"on:begin":(e,n)=>{ +n.data._beginMatch=e[1]||e[2]},"on:end":(e,n)=>{ +n.data._beginMatch!==e[1]&&n.ignoreMatch()}},e.END_SAME_AS_BEGIN({ +begin:/<<<[ \t]*'(\w+)'\n/,end:/[ \t]*(\w+)\b/})]},d={scope:"number",variants:[{ +begin:"\\b0[bB][01]+(?:_[01]+)*\\b"},{begin:"\\b0[oO][0-7]+(?:_[0-7]+)*\\b"},{ +begin:"\\b0[xX][\\da-fA-F]+(?:_[\\da-fA-F]+)*\\b"},{ +begin:"(?:\\b\\d+(?:_\\d+)*(\\.(?:\\d+(?:_\\d+)*))?|\\B\\.\\d+)(?:[eE][+-]?\\d+)?" +}],relevance:0 +},g=["false","null","true"],u=["__CLASS__","__DIR__","__FILE__","__FUNCTION__","__COMPILER_HALT_OFFSET__","__LINE__","__METHOD__","__NAMESPACE__","__TRAIT__","die","echo","exit","include","include_once","print","require","require_once","array","abstract","and","as","binary","bool","boolean","break","callable","case","catch","class","clone","const","continue","declare","default","do","double","else","elseif","empty","enddeclare","endfor","endforeach","endif","endswitch","endwhile","enum","eval","extends","final","finally","float","for","foreach","from","global","goto","if","implements","instanceof","insteadof","int","integer","interface","isset","iterable","list","match|0","mixed","new","never","object","or","private","protected","public","readonly","real","return","string","switch","throw","trait","try","unset","use","var","void","while","xor","yield"],b=["Error|0","AppendIterator","ArgumentCountError","ArithmeticError","ArrayIterator","ArrayObject","AssertionError","BadFunctionCallException","BadMethodCallException","CachingIterator","CallbackFilterIterator","CompileError","Countable","DirectoryIterator","DivisionByZeroError","DomainException","EmptyIterator","ErrorException","Exception","FilesystemIterator","FilterIterator","GlobIterator","InfiniteIterator","InvalidArgumentException","IteratorIterator","LengthException","LimitIterator","LogicException","MultipleIterator","NoRewindIterator","OutOfBoundsException","OutOfRangeException","OuterIterator","OverflowException","ParentIterator","ParseError","RangeException","RecursiveArrayIterator","RecursiveCachingIterator","RecursiveCallbackFilterIterator","RecursiveDirectoryIterator","RecursiveFilterIterator","RecursiveIterator","RecursiveIteratorIterator","RecursiveRegexIterator","RecursiveTreeIterator","RegexIterator","RuntimeException","SeekableIterator","SplDoublyLinkedList","SplFileInfo","SplFileObject","SplFixedArray","SplHeap","SplMaxHeap","SplMinHeap","SplObjectStorage","SplObserver","SplPriorityQueue","SplQueue","SplStack","SplSubject","SplTempFileObject","TypeError","UnderflowException","UnexpectedValueException","UnhandledMatchError","ArrayAccess","BackedEnum","Closure","Fiber","Generator","Iterator","IteratorAggregate","Serializable","Stringable","Throwable","Traversable","UnitEnum","WeakReference","WeakMap","Directory","__PHP_Incomplete_Class","parent","php_user_filter","self","static","stdClass"],m={ +keyword:u,literal:(e=>{const n=[];return e.forEach((e=>{ +n.push(e),e.toLowerCase()===e?n.push(e.toUpperCase()):n.push(e.toLowerCase()) +})),n})(g),built_in:b},p=e=>e.map((e=>e.replace(/\|\d+$/,""))),_={variants:[{ +match:[/new/,n.concat(l,"+"),n.concat("(?!",p(b).join("\\b|"),"\\b)"),i],scope:{ +1:"keyword",4:"title.class"}}]},h=n.concat(a,"\\b(?!\\()"),f={variants:[{ +match:[n.concat(/::/,n.lookahead(/(?!class\b)/)),h],scope:{2:"variable.constant" +}},{match:[/::/,/class/],scope:{2:"variable.language"}},{ +match:[i,n.concat(/::/,n.lookahead(/(?!class\b)/)),h],scope:{1:"title.class", +3:"variable.constant"}},{match:[i,n.concat("::",n.lookahead(/(?!class\b)/))], +scope:{1:"title.class"}},{match:[i,/::/,/class/],scope:{1:"title.class", +3:"variable.language"}}]},E={scope:"attr", +match:n.concat(a,n.lookahead(":"),n.lookahead(/(?!::)/))},y={relevance:0, +begin:/\(/,end:/\)/,keywords:m,contains:[E,r,f,e.C_BLOCK_COMMENT_MODE,c,d,_] +},N={relevance:0, +match:[/\b/,n.concat("(?!fn\\b|function\\b|",p(u).join("\\b|"),"|",p(b).join("\\b|"),"\\b)"),a,n.concat(l,"*"),n.lookahead(/(?=\()/)], +scope:{3:"title.function.invoke"},contains:[y]};y.contains.push(N) +;const w=[E,f,e.C_BLOCK_COMMENT_MODE,c,d,_];return{case_insensitive:!1, +keywords:m,contains:[{begin:n.concat(/#\[\s*/,i),beginScope:"meta",end:/]/, +endScope:"meta",keywords:{literal:g,keyword:["new","array"]},contains:[{ +begin:/\[/,end:/]/,keywords:{literal:g,keyword:["new","array"]}, +contains:["self",...w]},...w,{scope:"meta",match:i}] +},e.HASH_COMMENT_MODE,e.COMMENT("//","$"),e.COMMENT("/\\*","\\*/",{contains:[{ +scope:"doctag",match:"@[A-Za-z]+"}]}),{match:/__halt_compiler\(\);/, +keywords:"__halt_compiler",starts:{scope:"comment",end:e.MATCH_NOTHING_RE, +contains:[{match:/\?>/,scope:"meta",endsParent:!0}]}},{scope:"meta",variants:[{ +begin:/<\?php/,relevance:10},{begin:/<\?=/},{begin:/<\?/,relevance:.1},{ +begin:/\?>/}]},{scope:"variable.language",match:/\$this\b/},r,N,f,{ +match:[/const/,/\s/,a],scope:{1:"keyword",3:"variable.constant"}},_,{ +scope:"function",relevance:0,beginKeywords:"fn function",end:/[;{]/, +excludeEnd:!0,illegal:"[$%\\[]",contains:[{beginKeywords:"use" +},e.UNDERSCORE_TITLE_MODE,{begin:"=>",endsParent:!0},{scope:"params", +begin:"\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0,keywords:m, +contains:["self",r,f,e.C_BLOCK_COMMENT_MODE,c,d]}]},{scope:"class",variants:[{ +beginKeywords:"enum",illegal:/[($"]/},{beginKeywords:"class interface trait", +illegal:/[:($"]/}],relevance:0,end:/\{/,excludeEnd:!0,contains:[{ +beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{ +beginKeywords:"namespace",relevance:0,end:";",illegal:/[.']/, +contains:[e.inherit(e.UNDERSCORE_TITLE_MODE,{scope:"title.class"})]},{ +beginKeywords:"use",relevance:0,end:";",contains:[{ +match:/\b(as|const|function)\b/,scope:"keyword"},e.UNDERSCORE_TITLE_MODE]},c,d]} +},grmr_php_template:e=>({name:"PHP template",subLanguage:"xml",contains:[{ +begin:/<\?(php|=)?/,end:/\?>/,subLanguage:"php",contains:[{begin:"/\\*", +end:"\\*/",skip:!0},{begin:'b"',end:'"',skip:!0},{begin:"b'",end:"'",skip:!0 +},e.inherit(e.APOS_STRING_MODE,{illegal:null,className:null,contains:null, +skip:!0}),e.inherit(e.QUOTE_STRING_MODE,{illegal:null,className:null, +contains:null,skip:!0})]}]}),grmr_plaintext:e=>({name:"Plain text", +aliases:["text","txt"],disableAutodetect:!0}),grmr_python:e=>{ +const n=e.regex,t=/[\p{XID_Start}_]\p{XID_Continue}*/u,a=["and","as","assert","async","await","break","case","class","continue","def","del","elif","else","except","finally","for","from","global","if","import","in","is","lambda","match","nonlocal|10","not","or","pass","raise","return","try","while","with","yield"],i={ +$pattern:/[A-Za-z]\w+|__\w+__/,keyword:a, +built_in:["__import__","abs","all","any","ascii","bin","bool","breakpoint","bytearray","bytes","callable","chr","classmethod","compile","complex","delattr","dict","dir","divmod","enumerate","eval","exec","filter","float","format","frozenset","getattr","globals","hasattr","hash","help","hex","id","input","int","isinstance","issubclass","iter","len","list","locals","map","max","memoryview","min","next","object","oct","open","ord","pow","print","property","range","repr","reversed","round","set","setattr","slice","sorted","staticmethod","str","sum","super","tuple","type","vars","zip"], +literal:["__debug__","Ellipsis","False","None","NotImplemented","True"], +type:["Any","Callable","Coroutine","Dict","List","Literal","Generic","Optional","Sequence","Set","Tuple","Type","Union"] +},r={className:"meta",begin:/^(>>>|\.\.\.) /},s={className:"subst",begin:/\{/, +end:/\}/,keywords:i,illegal:/#/},o={begin:/\{\{/,relevance:0},l={ +className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{ +begin:/([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?'''/,end:/'''/, +contains:[e.BACKSLASH_ESCAPE,r],relevance:10},{ +begin:/([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?"""/,end:/"""/, +contains:[e.BACKSLASH_ESCAPE,r],relevance:10},{ +begin:/([fF][rR]|[rR][fF]|[fF])'''/,end:/'''/, +contains:[e.BACKSLASH_ESCAPE,r,o,s]},{begin:/([fF][rR]|[rR][fF]|[fF])"""/, +end:/"""/,contains:[e.BACKSLASH_ESCAPE,r,o,s]},{begin:/([uU]|[rR])'/,end:/'/, +relevance:10},{begin:/([uU]|[rR])"/,end:/"/,relevance:10},{ +begin:/([bB]|[bB][rR]|[rR][bB])'/,end:/'/},{begin:/([bB]|[bB][rR]|[rR][bB])"/, +end:/"/},{begin:/([fF][rR]|[rR][fF]|[fF])'/,end:/'/, +contains:[e.BACKSLASH_ESCAPE,o,s]},{begin:/([fF][rR]|[rR][fF]|[fF])"/,end:/"/, +contains:[e.BACKSLASH_ESCAPE,o,s]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE] +},c="[0-9](_?[0-9])*",d=`(\\b(${c}))?\\.(${c})|\\b(${c})\\.`,g="\\b|"+a.join("|"),u={ +className:"number",relevance:0,variants:[{ +begin:`(\\b(${c})|(${d}))[eE][+-]?(${c})[jJ]?(?=${g})`},{begin:`(${d})[jJ]?`},{ +begin:`\\b([1-9](_?[0-9])*|0+(_?0)*)[lLjJ]?(?=${g})`},{ +begin:`\\b0[bB](_?[01])+[lL]?(?=${g})`},{begin:`\\b0[oO](_?[0-7])+[lL]?(?=${g})` +},{begin:`\\b0[xX](_?[0-9a-fA-F])+[lL]?(?=${g})`},{begin:`\\b(${c})[jJ](?=${g})` +}]},b={className:"comment",begin:n.lookahead(/# type:/),end:/$/,keywords:i, +contains:[{begin:/# type:/},{begin:/#/,end:/\b\B/,endsWithParent:!0}]},m={ +className:"params",variants:[{className:"",begin:/\(\s*\)/,skip:!0},{begin:/\(/, +end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:i, +contains:["self",r,u,l,e.HASH_COMMENT_MODE]}]};return s.contains=[l,u,r],{ +name:"Python",aliases:["py","gyp","ipython"],unicodeRegex:!0,keywords:i, +illegal:/(<\/|\?)|=>/,contains:[r,u,{begin:/\bself\b/},{beginKeywords:"if", +relevance:0},l,b,e.HASH_COMMENT_MODE,{match:[/\bdef/,/\s+/,t],scope:{ +1:"keyword",3:"title.function"},contains:[m]},{variants:[{ +match:[/\bclass/,/\s+/,t,/\s*/,/\(\s*/,t,/\s*\)/]},{match:[/\bclass/,/\s+/,t]}], +scope:{1:"keyword",3:"title.class",6:"title.class.inherited"}},{ +className:"meta",begin:/^[\t ]*@/,end:/(?=#)|$/,contains:[u,m,l]}]}}, +grmr_python_repl:e=>({aliases:["pycon"],contains:[{className:"meta.prompt", +starts:{end:/ |$/,starts:{end:"$",subLanguage:"python"}},variants:[{ +begin:/^>>>(?=[ ]|$)/},{begin:/^\.\.\.(?=[ ]|$)/}]}]}),grmr_r:e=>{ +const n=e.regex,t=/(?:(?:[a-zA-Z]|\.[._a-zA-Z])[._a-zA-Z0-9]*)|\.(?!\d)/,a=n.either(/0[xX][0-9a-fA-F]+\.[0-9a-fA-F]*[pP][+-]?\d+i?/,/0[xX][0-9a-fA-F]+(?:[pP][+-]?\d+)?[Li]?/,/(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][+-]?\d+)?[Li]?/),i=/[=!<>:]=|\|\||&&|:::?|<-|<<-|->>|->|\|>|[-+*\/?!$&|:<=>@^~]|\*\*/,r=n.either(/[()]/,/[{}]/,/\[\[/,/[[\]]/,/\\/,/,/) +;return{name:"R",keywords:{$pattern:t, +keyword:"function if in break next repeat else for while", +literal:"NULL NA TRUE FALSE Inf NaN NA_integer_|10 NA_real_|10 NA_character_|10 NA_complex_|10", +built_in:"LETTERS letters month.abb month.name pi T F abs acos acosh all any anyNA Arg as.call as.character as.complex as.double as.environment as.integer as.logical as.null.default as.numeric as.raw asin asinh atan atanh attr attributes baseenv browser c call ceiling class Conj cos cosh cospi cummax cummin cumprod cumsum digamma dim dimnames emptyenv exp expression floor forceAndCall gamma gc.time globalenv Im interactive invisible is.array is.atomic is.call is.character is.complex is.double is.environment is.expression is.finite is.function is.infinite is.integer is.language is.list is.logical is.matrix is.na is.name is.nan is.null is.numeric is.object is.pairlist is.raw is.recursive is.single is.symbol lazyLoadDBfetch length lgamma list log max min missing Mod names nargs nzchar oldClass on.exit pos.to.env proc.time prod quote range Re rep retracemem return round seq_along seq_len seq.int sign signif sin sinh sinpi sqrt standardGeneric substitute sum switch tan tanh tanpi tracemem trigamma trunc unclass untracemem UseMethod xtfrm" +},contains:[e.COMMENT(/#'/,/$/,{contains:[{scope:"doctag",match:/@examples/, +starts:{end:n.lookahead(n.either(/\n^#'\s*(?=@[a-zA-Z]+)/,/\n^(?!#')/)), +endsParent:!0}},{scope:"doctag",begin:"@param",end:/$/,contains:[{ +scope:"variable",variants:[{match:t},{match:/`(?:\\.|[^`\\])+`/}],endsParent:!0 +}]},{scope:"doctag",match:/@[a-zA-Z]+/},{scope:"keyword",match:/\\[a-zA-Z]+/}] +}),e.HASH_COMMENT_MODE,{scope:"string",contains:[e.BACKSLASH_ESCAPE], +variants:[e.END_SAME_AS_BEGIN({begin:/[rR]"(-*)\(/,end:/\)(-*)"/ +}),e.END_SAME_AS_BEGIN({begin:/[rR]"(-*)\{/,end:/\}(-*)"/ +}),e.END_SAME_AS_BEGIN({begin:/[rR]"(-*)\[/,end:/\](-*)"/ +}),e.END_SAME_AS_BEGIN({begin:/[rR]'(-*)\(/,end:/\)(-*)'/ +}),e.END_SAME_AS_BEGIN({begin:/[rR]'(-*)\{/,end:/\}(-*)'/ +}),e.END_SAME_AS_BEGIN({begin:/[rR]'(-*)\[/,end:/\](-*)'/}),{begin:'"',end:'"', +relevance:0},{begin:"'",end:"'",relevance:0}]},{relevance:0,variants:[{scope:{ +1:"operator",2:"number"},match:[i,a]},{scope:{1:"operator",2:"number"}, +match:[/%[^%]*%/,a]},{scope:{1:"punctuation",2:"number"},match:[r,a]},{scope:{ +2:"number"},match:[/[^a-zA-Z0-9._]|^/,a]}]},{scope:{3:"operator"}, +match:[t,/\s+/,/<-/,/\s+/]},{scope:"operator",relevance:0,variants:[{match:i},{ +match:/%[^%]*%/}]},{scope:"punctuation",relevance:0,match:r},{begin:"`",end:"`", +contains:[{begin:/\\./}]}]}},grmr_ruby:e=>{ +const n=e.regex,t="([a-zA-Z_]\\w*[!?=]?|[-+~]@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?)",a=n.either(/\b([A-Z]+[a-z0-9]+)+/,/\b([A-Z]+[a-z0-9]+)+[A-Z]+/),i=n.concat(a,/(::\w+)*/),r={ +"variable.constant":["__FILE__","__LINE__","__ENCODING__"], +"variable.language":["self","super"], +keyword:["alias","and","begin","BEGIN","break","case","class","defined","do","else","elsif","end","END","ensure","for","if","in","module","next","not","or","redo","require","rescue","retry","return","then","undef","unless","until","when","while","yield","include","extend","prepend","public","private","protected","raise","throw"], +built_in:["proc","lambda","attr_accessor","attr_reader","attr_writer","define_method","private_constant","module_function"], +literal:["true","false","nil"]},s={className:"doctag",begin:"@[A-Za-z]+"},o={ +begin:"#<",end:">"},l=[e.COMMENT("#","$",{contains:[s] +}),e.COMMENT("^=begin","^=end",{contains:[s],relevance:10 +}),e.COMMENT("^__END__",e.MATCH_NOTHING_RE)],c={className:"subst",begin:/#\{/, +end:/\}/,keywords:r},d={className:"string",contains:[e.BACKSLASH_ESCAPE,c], +variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/`/,end:/`/},{ +begin:/%[qQwWx]?\(/,end:/\)/},{begin:/%[qQwWx]?\[/,end:/\]/},{ +begin:/%[qQwWx]?\{/,end:/\}/},{begin:/%[qQwWx]?/},{begin:/%[qQwWx]?\//, +end:/\//},{begin:/%[qQwWx]?%/,end:/%/},{begin:/%[qQwWx]?-/,end:/-/},{ +begin:/%[qQwWx]?\|/,end:/\|/},{begin:/\B\?(\\\d{1,3})/},{ +begin:/\B\?(\\x[A-Fa-f0-9]{1,2})/},{begin:/\B\?(\\u\{?[A-Fa-f0-9]{1,6}\}?)/},{ +begin:/\B\?(\\M-\\C-|\\M-\\c|\\c\\M-|\\M-|\\C-\\M-)[\x20-\x7e]/},{ +begin:/\B\?\\(c|C-)[\x20-\x7e]/},{begin:/\B\?\\?\S/},{ +begin:n.concat(/<<[-~]?'?/,n.lookahead(/(\w+)(?=\W)[^\n]*\n(?:[^\n]*\n)*?\s*\1\b/)), +contains:[e.END_SAME_AS_BEGIN({begin:/(\w+)/,end:/(\w+)/, +contains:[e.BACKSLASH_ESCAPE,c]})]}]},g="[0-9](_?[0-9])*",u={className:"number", +relevance:0,variants:[{ +begin:`\\b([1-9](_?[0-9])*|0)(\\.(${g}))?([eE][+-]?(${g})|r)?i?\\b`},{ +begin:"\\b0[dD][0-9](_?[0-9])*r?i?\\b"},{begin:"\\b0[bB][0-1](_?[0-1])*r?i?\\b" +},{begin:"\\b0[oO][0-7](_?[0-7])*r?i?\\b"},{ +begin:"\\b0[xX][0-9a-fA-F](_?[0-9a-fA-F])*r?i?\\b"},{ +begin:"\\b0(_?[0-7])+r?i?\\b"}]},b={variants:[{match:/\(\)/},{ +className:"params",begin:/\(/,end:/(?=\))/,excludeBegin:!0,endsParent:!0, +keywords:r}]},m=[d,{variants:[{match:[/class\s+/,i,/\s+<\s+/,i]},{ +match:[/\b(class|module)\s+/,i]}],scope:{2:"title.class", +4:"title.class.inherited"},keywords:r},{match:[/(include|extend)\s+/,i],scope:{ +2:"title.class"},keywords:r},{relevance:0,match:[i,/\.new[. (]/],scope:{ +1:"title.class"}},{relevance:0,match:/\b[A-Z][A-Z_0-9]+\b/, +className:"variable.constant"},{relevance:0,match:a,scope:"title.class"},{ +match:[/def/,/\s+/,t],scope:{1:"keyword",3:"title.function"},contains:[b]},{ +begin:e.IDENT_RE+"::"},{className:"symbol", +begin:e.UNDERSCORE_IDENT_RE+"(!|\\?)?:",relevance:0},{className:"symbol", +begin:":(?!\\s)",contains:[d,{begin:t}],relevance:0},u,{className:"variable", +begin:"(\\$\\W)|((\\$|@@?)(\\w+))(?=[^@$?])(?![A-Za-z])(?![@$?'])"},{ +className:"params",begin:/\|/,end:/\|/,excludeBegin:!0,excludeEnd:!0, +relevance:0,keywords:r},{begin:"("+e.RE_STARTERS_RE+"|unless)\\s*", +keywords:"unless",contains:[{className:"regexp",contains:[e.BACKSLASH_ESCAPE,c], +illegal:/\n/,variants:[{begin:"/",end:"/[a-z]*"},{begin:/%r\{/,end:/\}[a-z]*/},{ +begin:"%r\\(",end:"\\)[a-z]*"},{begin:"%r!",end:"![a-z]*"},{begin:"%r\\[", +end:"\\][a-z]*"}]}].concat(o,l),relevance:0}].concat(o,l) +;c.contains=m,b.contains=m;const p=[{begin:/^\s*=>/,starts:{end:"$",contains:m} +},{className:"meta.prompt", +begin:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+[>*]|(\\w+-)?\\d+\\.\\d+\\.\\d+(p\\d+)?[^\\d][^>]+>)(?=[ ])", +starts:{end:"$",keywords:r,contains:m}}];return l.unshift(o),{name:"Ruby", +aliases:["rb","gemspec","podspec","thor","irb"],keywords:r,illegal:/\/\*/, +contains:[e.SHEBANG({binary:"ruby"})].concat(p).concat(l).concat(m)}}, +grmr_rust:e=>{const n=e.regex,t={className:"title.function.invoke",relevance:0, +begin:n.concat(/\b/,/(?!let|for|while|if|else|match\b)/,e.IDENT_RE,n.lookahead(/\s*\(/)) +},a="([ui](8|16|32|64|128|size)|f(32|64))?",i=["drop ","Copy","Send","Sized","Sync","Drop","Fn","FnMut","FnOnce","ToOwned","Clone","Debug","PartialEq","PartialOrd","Eq","Ord","AsRef","AsMut","Into","From","Default","Iterator","Extend","IntoIterator","DoubleEndedIterator","ExactSizeIterator","SliceConcatExt","ToString","assert!","assert_eq!","bitflags!","bytes!","cfg!","col!","concat!","concat_idents!","debug_assert!","debug_assert_eq!","env!","eprintln!","panic!","file!","format!","format_args!","include_bytes!","include_str!","line!","local_data_key!","module_path!","option_env!","print!","println!","select!","stringify!","try!","unimplemented!","unreachable!","vec!","write!","writeln!","macro_rules!","assert_ne!","debug_assert_ne!"],r=["i8","i16","i32","i64","i128","isize","u8","u16","u32","u64","u128","usize","f32","f64","str","char","bool","Box","Option","Result","String","Vec"] +;return{name:"Rust",aliases:["rs"],keywords:{$pattern:e.IDENT_RE+"!?",type:r, +keyword:["abstract","as","async","await","become","box","break","const","continue","crate","do","dyn","else","enum","extern","false","final","fn","for","if","impl","in","let","loop","macro","match","mod","move","mut","override","priv","pub","ref","return","self","Self","static","struct","super","trait","true","try","type","typeof","unsafe","unsized","use","virtual","where","while","yield"], +literal:["true","false","Some","None","Ok","Err"],built_in:i},illegal:""},t]}}, +grmr_scss:e=>{const n=ie(e),t=le,a=oe,i="@[a-z-]+",r={className:"variable", +begin:"(\\$[a-zA-Z-][a-zA-Z0-9_-]*)\\b",relevance:0};return{name:"SCSS", +case_insensitive:!0,illegal:"[=/|']", +contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,n.CSS_NUMBER_MODE,{ +className:"selector-id",begin:"#[A-Za-z0-9_-]+",relevance:0},{ +className:"selector-class",begin:"\\.[A-Za-z0-9_-]+",relevance:0 +},n.ATTRIBUTE_SELECTOR_MODE,{className:"selector-tag", +begin:"\\b("+re.join("|")+")\\b",relevance:0},{className:"selector-pseudo", +begin:":("+a.join("|")+")"},{className:"selector-pseudo", +begin:":(:)?("+t.join("|")+")"},r,{begin:/\(/,end:/\)/, +contains:[n.CSS_NUMBER_MODE]},n.CSS_VARIABLE,{className:"attribute", +begin:"\\b("+ce.join("|")+")\\b"},{ +begin:"\\b(whitespace|wait|w-resize|visible|vertical-text|vertical-ideographic|uppercase|upper-roman|upper-alpha|underline|transparent|top|thin|thick|text|text-top|text-bottom|tb-rl|table-header-group|table-footer-group|sw-resize|super|strict|static|square|solid|small-caps|separate|se-resize|scroll|s-resize|rtl|row-resize|ridge|right|repeat|repeat-y|repeat-x|relative|progress|pointer|overline|outside|outset|oblique|nowrap|not-allowed|normal|none|nw-resize|no-repeat|no-drop|newspaper|ne-resize|n-resize|move|middle|medium|ltr|lr-tb|lowercase|lower-roman|lower-alpha|loose|list-item|line|line-through|line-edge|lighter|left|keep-all|justify|italic|inter-word|inter-ideograph|inside|inset|inline|inline-block|inherit|inactive|ideograph-space|ideograph-parenthesis|ideograph-numeric|ideograph-alpha|horizontal|hidden|help|hand|groove|fixed|ellipsis|e-resize|double|dotted|distribute|distribute-space|distribute-letter|distribute-all-lines|disc|disabled|default|decimal|dashed|crosshair|collapse|col-resize|circle|char|center|capitalize|break-word|break-all|bottom|both|bolder|bold|block|bidi-override|below|baseline|auto|always|all-scroll|absolute|table|table-cell)\\b" +},{begin:/:/,end:/[;}{]/,relevance:0, +contains:[n.BLOCK_COMMENT,r,n.HEXCOLOR,n.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,n.IMPORTANT,n.FUNCTION_DISPATCH] +},{begin:"@(page|font-face)",keywords:{$pattern:i,keyword:"@page @font-face"}},{ +begin:"@",end:"[{;]",returnBegin:!0,keywords:{$pattern:/[a-z-]+/, +keyword:"and or not only",attribute:se.join(" ")},contains:[{begin:i, +className:"keyword"},{begin:/[a-z-]+(?=:)/,className:"attribute" +},r,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,n.HEXCOLOR,n.CSS_NUMBER_MODE] +},n.FUNCTION_DISPATCH]}},grmr_shell:e=>({name:"Shell Session", +aliases:["console","shellsession"],contains:[{className:"meta.prompt", +begin:/^\s{0,3}[/~\w\d[\]()@-]*[>%$#][ ]?/,starts:{end:/[^\\](?=\s*$)/, +subLanguage:"bash"}}]}),grmr_sql:e=>{ +const n=e.regex,t=e.COMMENT("--","$"),a=["true","false","unknown"],i=["bigint","binary","blob","boolean","char","character","clob","date","dec","decfloat","decimal","float","int","integer","interval","nchar","nclob","national","numeric","real","row","smallint","time","timestamp","varchar","varying","varbinary"],r=["abs","acos","array_agg","asin","atan","avg","cast","ceil","ceiling","coalesce","corr","cos","cosh","count","covar_pop","covar_samp","cume_dist","dense_rank","deref","element","exp","extract","first_value","floor","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","last_value","lead","listagg","ln","log","log10","lower","max","min","mod","nth_value","ntile","nullif","percent_rank","percentile_cont","percentile_disc","position","position_regex","power","rank","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","row_number","sin","sinh","sqrt","stddev_pop","stddev_samp","substring","substring_regex","sum","tan","tanh","translate","translate_regex","treat","trim","trim_array","unnest","upper","value_of","var_pop","var_samp","width_bucket"],s=["create table","insert into","primary key","foreign key","not null","alter table","add constraint","grouping sets","on overflow","character set","respect nulls","ignore nulls","nulls first","nulls last","depth first","breadth first"],o=r,l=["abs","acos","all","allocate","alter","and","any","are","array","array_agg","array_max_cardinality","as","asensitive","asin","asymmetric","at","atan","atomic","authorization","avg","begin","begin_frame","begin_partition","between","bigint","binary","blob","boolean","both","by","call","called","cardinality","cascaded","case","cast","ceil","ceiling","char","char_length","character","character_length","check","classifier","clob","close","coalesce","collate","collect","column","commit","condition","connect","constraint","contains","convert","copy","corr","corresponding","cos","cosh","count","covar_pop","covar_samp","create","cross","cube","cume_dist","current","current_catalog","current_date","current_default_transform_group","current_path","current_role","current_row","current_schema","current_time","current_timestamp","current_path","current_role","current_transform_group_for_type","current_user","cursor","cycle","date","day","deallocate","dec","decimal","decfloat","declare","default","define","delete","dense_rank","deref","describe","deterministic","disconnect","distinct","double","drop","dynamic","each","element","else","empty","end","end_frame","end_partition","end-exec","equals","escape","every","except","exec","execute","exists","exp","external","extract","false","fetch","filter","first_value","float","floor","for","foreign","frame_row","free","from","full","function","fusion","get","global","grant","group","grouping","groups","having","hold","hour","identity","in","indicator","initial","inner","inout","insensitive","insert","int","integer","intersect","intersection","interval","into","is","join","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","language","large","last_value","lateral","lead","leading","left","like","like_regex","listagg","ln","local","localtime","localtimestamp","log","log10","lower","match","match_number","match_recognize","matches","max","member","merge","method","min","minute","mod","modifies","module","month","multiset","national","natural","nchar","nclob","new","no","none","normalize","not","nth_value","ntile","null","nullif","numeric","octet_length","occurrences_regex","of","offset","old","omit","on","one","only","open","or","order","out","outer","over","overlaps","overlay","parameter","partition","pattern","per","percent","percent_rank","percentile_cont","percentile_disc","period","portion","position","position_regex","power","precedes","precision","prepare","primary","procedure","ptf","range","rank","reads","real","recursive","ref","references","referencing","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","release","result","return","returns","revoke","right","rollback","rollup","row","row_number","rows","running","savepoint","scope","scroll","search","second","seek","select","sensitive","session_user","set","show","similar","sin","sinh","skip","smallint","some","specific","specifictype","sql","sqlexception","sqlstate","sqlwarning","sqrt","start","static","stddev_pop","stddev_samp","submultiset","subset","substring","substring_regex","succeeds","sum","symmetric","system","system_time","system_user","table","tablesample","tan","tanh","then","time","timestamp","timezone_hour","timezone_minute","to","trailing","translate","translate_regex","translation","treat","trigger","trim","trim_array","true","truncate","uescape","union","unique","unknown","unnest","update","upper","user","using","value","values","value_of","var_pop","var_samp","varbinary","varchar","varying","versioning","when","whenever","where","width_bucket","window","with","within","without","year","add","asc","collation","desc","final","first","last","view"].filter((e=>!r.includes(e))),c={ +begin:n.concat(/\b/,n.either(...o),/\s*\(/),relevance:0,keywords:{built_in:o}} +;return{name:"SQL",case_insensitive:!0,illegal:/[{}]|<\//,keywords:{ +$pattern:/\b[\w\.]+/,keyword:((e,{exceptions:n,when:t}={})=>{const a=t +;return n=n||[],e.map((e=>e.match(/\|\d+$/)||n.includes(e)?e:a(e)?e+"|0":e)) +})(l,{when:e=>e.length<3}),literal:a,type:i, +built_in:["current_catalog","current_date","current_default_transform_group","current_path","current_role","current_schema","current_transform_group_for_type","current_user","session_user","system_time","system_user","current_time","localtime","current_timestamp","localtimestamp"] +},contains:[{begin:n.either(...s),relevance:0,keywords:{$pattern:/[\w\.]+/, +keyword:l.concat(s),literal:a,type:i}},{className:"type", +begin:n.either("double precision","large object","with timezone","without timezone") +},c,{className:"variable",begin:/@[a-z0-9][a-z0-9_]*/},{className:"string", +variants:[{begin:/'/,end:/'/,contains:[{begin:/''/}]}]},{begin:/"/,end:/"/, +contains:[{begin:/""/}]},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,t,{ +className:"operator",begin:/[-+*/=%^~]|&&?|\|\|?|!=?|<(?:=>?|<|>)?|>[>=]?/, +relevance:0}]}},grmr_swift:e=>{const n={match:/\s+/,relevance:0 +},t=e.COMMENT("/\\*","\\*/",{contains:["self"]}),a=[e.C_LINE_COMMENT_MODE,t],i={ +match:[/\./,m(...xe,...Me)],className:{2:"keyword"}},r={match:b(/\./,m(...Ae)), +relevance:0},s=Ae.filter((e=>"string"==typeof e)).concat(["_|0"]),o={variants:[{ +className:"keyword", +match:m(...Ae.filter((e=>"string"!=typeof e)).concat(Se).map(ke),...Me)}]},l={ +$pattern:m(/\b\w+/,/#\w+/),keyword:s.concat(Re),literal:Ce},c=[i,r,o],g=[{ +match:b(/\./,m(...De)),relevance:0},{className:"built_in", +match:b(/\b/,m(...De),/(?=\()/)}],u={match:/->/,relevance:0},p=[u,{ +className:"operator",relevance:0,variants:[{match:Be},{match:`\\.(\\.|${Le})+`}] +}],_="([0-9]_*)+",h="([0-9a-fA-F]_*)+",f={className:"number",relevance:0, +variants:[{match:`\\b(${_})(\\.(${_}))?([eE][+-]?(${_}))?\\b`},{ +match:`\\b0x(${h})(\\.(${h}))?([pP][+-]?(${_}))?\\b`},{match:/\b0o([0-7]_*)+\b/ +},{match:/\b0b([01]_*)+\b/}]},E=(e="")=>({className:"subst",variants:[{ +match:b(/\\/,e,/[0\\tnr"']/)},{match:b(/\\/,e,/u\{[0-9a-fA-F]{1,8}\}/)}] +}),y=(e="")=>({className:"subst",match:b(/\\/,e,/[\t ]*(?:[\r\n]|\r\n)/) +}),N=(e="")=>({className:"subst",label:"interpol",begin:b(/\\/,e,/\(/),end:/\)/ +}),w=(e="")=>({begin:b(e,/"""/),end:b(/"""/,e),contains:[E(e),y(e),N(e)] +}),v=(e="")=>({begin:b(e,/"/),end:b(/"/,e),contains:[E(e),N(e)]}),O={ +className:"string", +variants:[w(),w("#"),w("##"),w("###"),v(),v("#"),v("##"),v("###")] +},k=[e.BACKSLASH_ESCAPE,{begin:/\[/,end:/\]/,relevance:0, +contains:[e.BACKSLASH_ESCAPE]}],x={begin:/\/[^\s](?=[^/\n]*\/)/,end:/\//, +contains:k},M=e=>{const n=b(e,/\//),t=b(/\//,e);return{begin:n,end:t, +contains:[...k,{scope:"comment",begin:`#(?!.*${t})`,end:/$/}]}},S={ +scope:"regexp",variants:[M("###"),M("##"),M("#"),x]},A={match:b(/`/,Fe,/`/) +},C=[A,{className:"variable",match:/\$\d+/},{className:"variable", +match:`\\$${ze}+`}],T=[{match:/(@|#(un)?)available/,scope:"keyword",starts:{ +contains:[{begin:/\(/,end:/\)/,keywords:Pe,contains:[...p,f,O]}]}},{ +scope:"keyword",match:b(/@/,m(...je))},{scope:"meta",match:b(/@/,Fe)}],R={ +match:d(/\b[A-Z]/),relevance:0,contains:[{className:"type", +match:b(/(AV|CA|CF|CG|CI|CL|CM|CN|CT|MK|MP|MTK|MTL|NS|SCN|SK|UI|WK|XC)/,ze,"+") +},{className:"type",match:Ue,relevance:0},{match:/[?!]+/,relevance:0},{ +match:/\.\.\./,relevance:0},{match:b(/\s+&\s+/,d(Ue)),relevance:0}]},D={ +begin://,keywords:l,contains:[...a,...c,...T,u,R]};R.contains.push(D) +;const I={begin:/\(/,end:/\)/,relevance:0,keywords:l,contains:["self",{ +match:b(Fe,/\s*:/),keywords:"_|0",relevance:0 +},...a,S,...c,...g,...p,f,O,...C,...T,R]},L={begin://, +keywords:"repeat each",contains:[...a,R]},B={begin:/\(/,end:/\)/,keywords:l, +contains:[{begin:m(d(b(Fe,/\s*:/)),d(b(Fe,/\s+/,Fe,/\s*:/))),end:/:/, +relevance:0,contains:[{className:"keyword",match:/\b_\b/},{className:"params", +match:Fe}]},...a,...c,...p,f,O,...T,R,I],endsParent:!0,illegal:/["']/},$={ +match:[/(func|macro)/,/\s+/,m(A.match,Fe,Be)],className:{1:"keyword", +3:"title.function"},contains:[L,B,n],illegal:[/\[/,/%/]},z={ +match:[/\b(?:subscript|init[?!]?)/,/\s*(?=[<(])/],className:{1:"keyword"}, +contains:[L,B,n],illegal:/\[|%/},F={match:[/operator/,/\s+/,Be],className:{ +1:"keyword",3:"title"}},U={begin:[/precedencegroup/,/\s+/,Ue],className:{ +1:"keyword",3:"title"},contains:[R],keywords:[...Te,...Ce],end:/}/} +;for(const e of O.variants){const n=e.contains.find((e=>"interpol"===e.label)) +;n.keywords=l;const t=[...c,...g,...p,f,O,...C];n.contains=[...t,{begin:/\(/, +end:/\)/,contains:["self",...t]}]}return{name:"Swift",keywords:l, +contains:[...a,$,z,{beginKeywords:"struct protocol class extension enum actor", +end:"\\{",excludeEnd:!0,keywords:l,contains:[e.inherit(e.TITLE_MODE,{ +className:"title.class",begin:/[A-Za-z$_][\u00C0-\u02B80-9A-Za-z$_]*/}),...c] +},F,U,{beginKeywords:"import",end:/$/,contains:[...a],relevance:0 +},S,...c,...g,...p,f,O,...C,...T,R,I]}},grmr_typescript:e=>{ +const n=Oe(e),t=_e,a=["any","void","number","boolean","string","object","never","symbol","bigint","unknown"],i={ +beginKeywords:"namespace",end:/\{/,excludeEnd:!0, +contains:[n.exports.CLASS_REFERENCE]},r={beginKeywords:"interface",end:/\{/, +excludeEnd:!0,keywords:{keyword:"interface extends",built_in:a}, +contains:[n.exports.CLASS_REFERENCE]},s={$pattern:_e, +keyword:he.concat(["type","namespace","interface","public","private","protected","implements","declare","abstract","readonly","enum","override"]), +literal:fe,built_in:ve.concat(a),"variable.language":we},o={className:"meta", +begin:"@"+t},l=(e,n,t)=>{const a=e.contains.findIndex((e=>e.label===n)) +;if(-1===a)throw Error("can not find mode to replace");e.contains.splice(a,1,t)} +;return Object.assign(n.keywords,s), +n.exports.PARAMS_CONTAINS.push(o),n.contains=n.contains.concat([o,i,r]), +l(n,"shebang",e.SHEBANG()),l(n,"use_strict",{className:"meta",relevance:10, +begin:/^\s*['"]use strict['"]/ +}),n.contains.find((e=>"func.def"===e.label)).relevance=0,Object.assign(n,{ +name:"TypeScript",aliases:["ts","tsx","mts","cts"]}),n},grmr_vbnet:e=>{ +const n=e.regex,t=/\d{1,2}\/\d{1,2}\/\d{4}/,a=/\d{4}-\d{1,2}-\d{1,2}/,i=/(\d|1[012])(:\d+){0,2} *(AM|PM)/,r=/\d{1,2}(:\d{1,2}){1,2}/,s={ +className:"literal",variants:[{begin:n.concat(/# */,n.either(a,t),/ *#/)},{ +begin:n.concat(/# */,r,/ *#/)},{begin:n.concat(/# */,i,/ *#/)},{ +begin:n.concat(/# */,n.either(a,t),/ +/,n.either(i,r),/ *#/)}] +},o=e.COMMENT(/'''/,/$/,{contains:[{className:"doctag",begin:/<\/?/,end:/>/}] +}),l=e.COMMENT(null,/$/,{variants:[{begin:/'/},{begin:/([\t ]|^)REM(?=\s)/}]}) +;return{name:"Visual Basic .NET",aliases:["vb"],case_insensitive:!0, +classNameAliases:{label:"symbol"},keywords:{ +keyword:"addhandler alias aggregate ansi as async assembly auto binary by byref byval call case catch class compare const continue custom declare default delegate dim distinct do each equals else elseif end enum erase error event exit explicit finally for friend from function get global goto group handles if implements imports in inherits interface into iterator join key let lib loop me mid module mustinherit mustoverride mybase myclass namespace narrowing new next notinheritable notoverridable of off on operator option optional order overloads overridable overrides paramarray partial preserve private property protected public raiseevent readonly redim removehandler resume return select set shadows shared skip static step stop structure strict sub synclock take text then throw to try unicode until using when where while widening with withevents writeonly yield", +built_in:"addressof and andalso await directcast gettype getxmlnamespace is isfalse isnot istrue like mod nameof new not or orelse trycast typeof xor cbool cbyte cchar cdate cdbl cdec cint clng cobj csbyte cshort csng cstr cuint culng cushort", +type:"boolean byte char date decimal double integer long object sbyte short single string uinteger ulong ushort", +literal:"true false nothing"}, +illegal:"//|\\{|\\}|endif|gosub|variant|wend|^\\$ ",contains:[{ +className:"string",begin:/"(""|[^/n])"C\b/},{className:"string",begin:/"/, +end:/"/,illegal:/\n/,contains:[{begin:/""/}]},s,{className:"number",relevance:0, +variants:[{begin:/\b\d[\d_]*((\.[\d_]+(E[+-]?[\d_]+)?)|(E[+-]?[\d_]+))[RFD@!#]?/ +},{begin:/\b\d[\d_]*((U?[SIL])|[%&])?/},{begin:/&H[\dA-F_]+((U?[SIL])|[%&])?/},{ +begin:/&O[0-7_]+((U?[SIL])|[%&])?/},{begin:/&B[01_]+((U?[SIL])|[%&])?/}]},{ +className:"label",begin:/^\w+:/},o,l,{className:"meta", +begin:/[\t ]*#(const|disable|else|elseif|enable|end|externalsource|if|region)\b/, +end:/$/,keywords:{ +keyword:"const disable else elseif enable end externalsource if region then"}, +contains:[l]}]}},grmr_wasm:e=>{e.regex;const n=e.COMMENT(/\(;/,/;\)/) +;return n.contains.push("self"),{name:"WebAssembly",keywords:{$pattern:/[\w.]+/, +keyword:["anyfunc","block","br","br_if","br_table","call","call_indirect","data","drop","elem","else","end","export","func","global.get","global.set","local.get","local.set","local.tee","get_global","get_local","global","if","import","local","loop","memory","memory.grow","memory.size","module","mut","nop","offset","param","result","return","select","set_global","set_local","start","table","tee_local","then","type","unreachable"] +},contains:[e.COMMENT(/;;/,/$/),n,{match:[/(?:offset|align)/,/\s*/,/=/], +className:{1:"keyword",3:"operator"}},{className:"variable",begin:/\$[\w_]+/},{ +match:/(\((?!;)|\))+/,className:"punctuation",relevance:0},{ +begin:[/(?:func|call|call_indirect)/,/\s+/,/\$[^\s)]+/],className:{1:"keyword", +3:"title.function"}},e.QUOTE_STRING_MODE,{match:/(i32|i64|f32|f64)(?!\.)/, +className:"type"},{className:"keyword", +match:/\b(f32|f64|i32|i64)(?:\.(?:abs|add|and|ceil|clz|const|convert_[su]\/i(?:32|64)|copysign|ctz|demote\/f64|div(?:_[su])?|eqz?|extend_[su]\/i32|floor|ge(?:_[su])?|gt(?:_[su])?|le(?:_[su])?|load(?:(?:8|16|32)_[su])?|lt(?:_[su])?|max|min|mul|nearest|neg?|or|popcnt|promote\/f32|reinterpret\/[fi](?:32|64)|rem_[su]|rot[lr]|shl|shr_[su]|store(?:8|16|32)?|sqrt|sub|trunc(?:_[su]\/f(?:32|64))?|wrap\/i64|xor))\b/ +},{className:"number",relevance:0, +match:/[+-]?\b(?:\d(?:_?\d)*(?:\.\d(?:_?\d)*)?(?:[eE][+-]?\d(?:_?\d)*)?|0x[\da-fA-F](?:_?[\da-fA-F])*(?:\.[\da-fA-F](?:_?[\da-fA-D])*)?(?:[pP][+-]?\d(?:_?\d)*)?)\b|\binf\b|\bnan(?::0x[\da-fA-F](?:_?[\da-fA-D])*)?\b/ +}]}},grmr_xml:e=>{ +const n=e.regex,t=n.concat(/[\p{L}_]/u,n.optional(/[\p{L}0-9_.-]*:/u),/[\p{L}0-9_.-]*/u),a={ +className:"symbol",begin:/&[a-z]+;|&#[0-9]+;|&#x[a-f0-9]+;/},i={begin:/\s/, +contains:[{className:"keyword",begin:/#?[a-z_][a-z1-9_-]+/,illegal:/\n/}] +},r=e.inherit(i,{begin:/\(/,end:/\)/}),s=e.inherit(e.APOS_STRING_MODE,{ +className:"string"}),o=e.inherit(e.QUOTE_STRING_MODE,{className:"string"}),l={ +endsWithParent:!0,illegal:/`]+/}]}]}]};return{ +name:"HTML, XML", +aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"], +case_insensitive:!0,unicodeRegex:!0,contains:[{className:"meta",begin://,relevance:10,contains:[i,o,s,r,{begin:/\[/,end:/\]/,contains:[{ +className:"meta",begin://,contains:[i,r,o,s]}]}] +},e.COMMENT(//,{relevance:10}),{begin://, +relevance:10},a,{className:"meta",end:/\?>/,variants:[{begin:/<\?xml/, +relevance:10,contains:[o]},{begin:/<\?[a-z][a-z0-9]+/}]},{className:"tag", +begin:/)/,end:/>/,keywords:{name:"style"},contains:[l],starts:{ +end:/<\/style>/,returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag", +begin:/)/,end:/>/,keywords:{name:"script"},contains:[l],starts:{ +end:/<\/script>/,returnEnd:!0,subLanguage:["javascript","handlebars","xml"]}},{ +className:"tag",begin:/<>|<\/>/},{className:"tag", +begin:n.concat(//,/>/,/\s/)))), +end:/\/?>/,contains:[{className:"name",begin:t,relevance:0,starts:l}]},{ +className:"tag",begin:n.concat(/<\//,n.lookahead(n.concat(t,/>/))),contains:[{ +className:"name",begin:t,relevance:0},{begin:/>/,relevance:0,endsParent:!0}]}]} +},grmr_yaml:e=>{ +const n="true false yes no null",t="[\\w#;/?:@&=+$,.~*'()[\\]]+",a={ +className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/ +},{begin:/\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:"template-variable", +variants:[{begin:/\{\{/,end:/\}\}/},{begin:/%\{/,end:/\}/}]}]},i=e.inherit(a,{ +variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/[^\s,{}[\]]+/}]}),r={ +end:",",endsWithParent:!0,excludeEnd:!0,keywords:n,relevance:0},s={begin:/\{/, +end:/\}/,contains:[r],illegal:"\\n",relevance:0},o={begin:"\\[",end:"\\]", +contains:[r],illegal:"\\n",relevance:0},l=[{className:"attr",variants:[{ +begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{ +begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---\\s*$", +relevance:10},{className:"string", +begin:"[\\|>]([1-9]?[+-])?[ ]*\\n( +)[^ ][^\\n]*\\n(\\2[^\\n]+\\n?)*"},{ +begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0, +relevance:0},{className:"type",begin:"!\\w+!"+t},{className:"type", +begin:"!<"+t+">"},{className:"type",begin:"!"+t},{className:"type",begin:"!!"+t +},{className:"meta",begin:"&"+e.UNDERSCORE_IDENT_RE+"$"},{className:"meta", +begin:"\\*"+e.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"-(?=[ ]|$)", +relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{ +className:"number", +begin:"\\b[0-9]{4}(-[0-9][0-9]){0,2}([Tt \\t][0-9][0-9]?(:[0-9][0-9]){2})?(\\.[0-9]*)?([ \\t])*(Z|[-+][0-9][0-9]?(:[0-9][0-9])?)?\\b" +},{className:"number",begin:e.C_NUMBER_RE+"\\b",relevance:0},s,o,a],c=[...l] +;return c.pop(),c.push(i),r.contains=c,{name:"YAML",case_insensitive:!0, +aliases:["yml"],contains:l}}});const He=ae;for(const e of Object.keys(Ke)){ +const n=e.replace("grmr_","").replace("_","-");He.registerLanguage(n,Ke[e])} +return He}() +;"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=hljs);/*! `erlang` grammar compiled for Highlight.js 11.9.0 */ +(()=>{var e=(()=>{"use strict";return e=>{ +const n="[a-z'][a-zA-Z0-9_']*",r="("+n+":"+n+"|"+n+")",a={ +keyword:"after and andalso|10 band begin bnot bor bsl bzr bxor case catch cond div end fun if let not of orelse|10 query receive rem try when xor", +literal:"false true"},i=e.COMMENT("%","$"),s={className:"number", +begin:"\\b(\\d+(_\\d+)*#[a-fA-F0-9]+(_[a-fA-F0-9]+)*|\\d+(_\\d+)*(\\.\\d+(_\\d+)*)?([eE][-+]?\\d+)?)", +relevance:0},c={begin:"fun\\s+"+n+"/\\d+"},t={begin:r+"\\(",end:"\\)", +returnBegin:!0,relevance:0,contains:[{begin:r,relevance:0},{begin:"\\(", +end:"\\)",endsWithParent:!0,returnEnd:!0,relevance:0}]},d={begin:/\{/,end:/\}/, +relevance:0},o={begin:"\\b_([A-Z][A-Za-z0-9_]*)?",relevance:0},l={ +begin:"[A-Z][a-zA-Z0-9_]*",relevance:0},b={begin:"#"+e.UNDERSCORE_IDENT_RE, +relevance:0,returnBegin:!0,contains:[{begin:"#"+e.UNDERSCORE_IDENT_RE, +relevance:0},{begin:/\{/,end:/\}/,relevance:0}]},g={ +beginKeywords:"fun receive if try case",end:"end",keywords:a} +;g.contains=[i,c,e.inherit(e.APOS_STRING_MODE,{className:"" +}),g,t,e.QUOTE_STRING_MODE,s,d,o,l,b] +;const E=[i,c,g,t,e.QUOTE_STRING_MODE,s,d,o,l,b] +;t.contains[1].contains=E,d.contains=E,b.contains[1].contains=E;const u={ +className:"params",begin:"\\(",end:"\\)",contains:E};return{name:"Erlang", +aliases:["erl"],keywords:a,illegal:"(",returnBegin:!0, +illegal:"\\(|#|//|/\\*|\\\\|:|;",contains:[u,e.inherit(e.TITLE_MODE,{begin:n})], +starts:{end:";|\\.",keywords:a,contains:E}},i,{begin:"^-",end:"\\.",relevance:0, +excludeEnd:!0,returnBegin:!0,keywords:{$pattern:"-"+e.IDENT_RE, +keyword:["-module","-record","-undef","-export","-ifdef","-ifndef","-author","-copyright","-doc","-vsn","-import","-include","-include_lib","-compile","-define","-else","-endif","-file","-behaviour","-behavior","-spec"].map((e=>e+"|1.5")).join(" ") +},contains:[u]},s,e.QUOTE_STRING_MODE,b,o,l,d,{begin:/\.$/}]}}})() +;hljs.registerLanguage("erlang",e)})(); \ No newline at end of file From bd77914d66813dff2797df9fb9dd752bd367c8ed Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Fri, 19 Sep 2025 12:28:33 -0400 Subject: [PATCH 10/17] docs: Enhance Erlang documentation extraction and formatting - Update `build-literate-erlang.sh` to improve the extraction of module and function documentation by preserving paragraph breaks. - Introduce a new function `write_code_with_inline_comments` to handle inline comments in Erlang code, ensuring proper formatting and structure in the output. - Refactor existing code to enhance readability and maintainability while processing Erlang documentation. --- docs/build-literate-erlang.sh | 115 ++++++++++++++++++++++++++++++---- 1 file changed, 103 insertions(+), 12 deletions(-) diff --git a/docs/build-literate-erlang.sh b/docs/build-literate-erlang.sh index e55589c6d..6bf71fa7f 100755 --- a/docs/build-literate-erlang.sh +++ b/docs/build-literate-erlang.sh @@ -117,7 +117,7 @@ extract_module_doc() { fi done < "$file" - # Clean up @doc prefixes, empty lines, and convert edocs syntax to markdown + # Clean up @doc prefixes and convert edocs syntax to markdown, preserving paragraph breaks echo "$doc_content" | \ sed 's/^@doc$//' | \ sed 's/^@doc //' | \ @@ -125,15 +125,27 @@ extract_module_doc() { sed 's/^@author /**Author:** /' | \ sed 's/^@copyright /**Copyright:** /' | \ sed 's/^---*$//' | \ - sed '/^[[:space:]]*$/d' | \ - sed "s/\`\([^']*\)'/\`\1\`/g" + sed "s/\`\([^']*\)'/\`\1\`/g" | \ + awk ' + BEGIN { prev_empty = 0 } + /^[[:space:]]*$/ { + if (!prev_empty) { + print "" + prev_empty = 1 + } + next + } + { + print $0 + prev_empty = 0 + }' } # --- Function to extract function documentation --- extract_function_doc() { local content="$1" - # Remove leading %% or % and @doc tags, then convert edocs syntax to markdown + # Remove leading %% or % and @doc tags, then convert edocs syntax to markdown, preserving paragraph breaks echo "$content" | \ sed 's/^%% *//' | \ sed 's/^% *//' | \ @@ -143,8 +155,20 @@ extract_function_doc() { sed 's/^@author /**Author:** /' | \ sed 's/^@copyright /**Copyright:** /' | \ sed 's/^---*$//' | \ - sed '/^$/d' | \ - sed "s/\`\([^']*\)'/\`\1\`/g" + sed "s/\`\([^']*\)'/\`\1\`/g" | \ + awk ' + BEGIN { prev_empty = 0 } + /^[[:space:]]*$/ { + if (!prev_empty) { + print "" + prev_empty = 1 + } + next + } + { + print $0 + prev_empty = 0 + }' } # --- Function to process a single Erlang file --- @@ -250,8 +274,9 @@ EOF # Continue collecting function content if [ "$in_function" = true ]; then function_content+="$line"$'\n' - # Check for function end (period at end of line not in string) - if [[ "$line" =~ \.[[:space:]]*$ ]] && ! [[ "$line" =~ \" ]]; then + # Check for function end (period at end of line, not in string or comment) + if [[ "$line" =~ ^[[:space:]]*end\.[[:space:]]*$ ]] || + ([[ "$line" =~ \.[[:space:]]*$ ]] && ! [[ "$line" =~ \" ]] && ! [[ "$line" =~ ^[[:space:]]*% ]]); then in_function=false write_clean_function "$output_file" "$current_function" "$spec_content" "$doc_content" "$function_content" "$functions_written" ((functions_written++)) @@ -312,10 +337,76 @@ write_clean_function() { echo "" >> "$output_file" fi - # Add implementation - echo '```erlang' >> "$output_file" - echo -n "$code" | sed '/^[[:space:]]*$/d' >> "$output_file" - echo '```' >> "$output_file" + # Add implementation with inline comment processing + write_code_with_inline_comments "$output_file" "$code" +} + +# --- Function to write code blocks with inline comments breaking them --- +write_code_with_inline_comments() { + local output_file="$1" + local code="$2" + + local in_code_block=false + local in_comment_block=false + local comment_lines=() + + while IFS= read -r line; do + # Check if this is an inline comment + # Match lines that have whitespace followed by a single % (not %% or %%%) + if [[ "$line" =~ ^[[:space:]]*%([[:space:]]|$) ]] && ! [[ "$line" =~ ^[[:space:]]*%% ]]; then + # If we were in a code block, close it + if [ "$in_code_block" = true ]; then + echo '```' >> "$output_file" + echo "" >> "$output_file" + in_code_block=false + fi + + # Extract comment text - remove leading whitespace and % + comment_text=$(echo "$line" | sed 's/^[[:space:]]*%//') + + # Remove leading space after % if present + comment_text=$(echo "$comment_text" | sed 's/^[[:space:]]//') + + # Convert backtick-quote pairs to proper markdown backticks + comment_text=$(echo "$comment_text" | sed "s/\`\([^']*\)'/\`\1\`/g") + + # Add to comment lines array + comment_lines+=("$comment_text") + in_comment_block=true + else + # Regular code line (including %% and %%% doc comments) + # If we were collecting comments, write them out first + if [ "$in_comment_block" = true ]; then + # Write each comment line separately to preserve structure + for comment_line in "${comment_lines[@]}"; do + echo "$comment_line" >> "$output_file" + done + echo "" >> "$output_file" + echo '```erlang' >> "$output_file" + in_comment_block=false + comment_lines=() + in_code_block=true + elif [ "$in_code_block" = false ]; then + echo '```erlang' >> "$output_file" + in_code_block=true + fi + echo "$line" >> "$output_file" + fi + done <<< "$code" + + # Handle any remaining comment lines + if [ "$in_comment_block" = true ]; then + for comment_line in "${comment_lines[@]}"; do + echo "$comment_line" >> "$output_file" + done + echo "" >> "$output_file" + fi + + # Close any remaining code block + if [ "$in_code_block" = true ]; then + echo '```' >> "$output_file" + echo "" >> "$output_file" + fi } # --- Main processing loop --- From 17e8e440bfd03d48f19f09bdb2c61f8578989b08 Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Fri, 19 Sep 2025 12:38:58 -0400 Subject: [PATCH 11/17] docs: Update book configuration and enhance theme handling - Change authors in `book.toml` to "HyperBEAM Core Team". - Set default theme to "rust" in `custom.js` if no theme is stored, improving user experience with theme management. - Introduce `setDefaultTheme` function to manage theme preferences effectively. --- docs/book/book.toml | 3 ++- docs/book/custom.js | 15 +++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/docs/book/book.toml b/docs/book/book.toml index 86adbc7af..ab213ea81 100644 --- a/docs/book/book.toml +++ b/docs/book/book.toml @@ -1,5 +1,5 @@ [book] -authors = ["Dylan Shade", "HyperBEAM Team"] +authors = ["HyperBEAM Core Team"] language = "en" src = "src" title = "HyperBEAM Book" @@ -12,6 +12,7 @@ build-dir = "dist" edition = "2021" [output.html] +default-theme = "rust" additional-css = ["custom.css", "theme/highlight.css"] additional-js = ["custom.js"] mathjax-support = false diff --git a/docs/book/custom.js b/docs/book/custom.js index a69e63e8a..d3bbf168c 100644 --- a/docs/book/custom.js +++ b/docs/book/custom.js @@ -2,6 +2,9 @@ // Following mdBook best practices: minimal JavaScript additions document.addEventListener('DOMContentLoaded', function() { + // Set default theme to Rust if no theme is stored + setDefaultTheme(); + // Initialize theme change detection initThemeDetection(); @@ -12,6 +15,18 @@ document.addEventListener('DOMContentLoaded', function() { replaceEditWithCopy(); }); +function setDefaultTheme() { + // Check if there's already a stored theme preference + const storedTheme = localStorage.getItem('mdbook-theme'); + + // If no theme is stored, set default to rust + if (!storedTheme) { + localStorage.setItem('mdbook-theme', 'rust'); + document.documentElement.className = document.documentElement.className.replace(/\brust\b|\blight\b|\bnavy\b|\bayu\b/g, '').trim(); + document.documentElement.classList.add('rust'); + } +} + function initThemeDetection() { // Watch for theme changes via class changes on html element const observer = new MutationObserver(function(mutations) { From 5465764d4181389888230d204ee629b2e689103e Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Fri, 19 Sep 2025 15:39:55 -0400 Subject: [PATCH 12/17] docs: Add scripts for building and serving HyperBEAM documentation - Introduce `build-and-serve.sh` to automate the generation and local serving of literate documentation. - Add `build-literate-erlang-js.sh` for a comprehensive Erlang documentation generation using JavaScript. - Update `build-literate-erlang.sh` to improve documentation extraction and formatting. - Create `erlang-literate-parser.js` for enhanced parsing of Erlang files, supporting various comment types and annotations. - Add `README.md` in the book source directory to provide an overview of the generated documentation. - Remove outdated `erlang.min.js` and `highlight.js` files to streamline the documentation theme. --- docs/book/src/README.md | 159 ++++ docs/build-and-serve.sh | 29 + docs/build-literate-erlang-js.sh | 140 ++++ docs/build-literate-erlang.sh | 228 ++++- docs/docs/book/theme/erlang.min.js | 27 - docs/docs/book/theme/highlight.js | 1239 ---------------------------- docs/erlang-literate-parser.js | 580 +++++++++++++ 7 files changed, 1107 insertions(+), 1295 deletions(-) create mode 100644 docs/book/src/README.md create mode 100755 docs/build-and-serve.sh create mode 100755 docs/build-literate-erlang-js.sh delete mode 100644 docs/docs/book/theme/erlang.min.js delete mode 100644 docs/docs/book/theme/highlight.js create mode 100755 docs/erlang-literate-parser.js diff --git a/docs/book/src/README.md b/docs/book/src/README.md new file mode 100644 index 000000000..1376130f0 --- /dev/null +++ b/docs/book/src/README.md @@ -0,0 +1,159 @@ +# HyperBEAM Literate Erlang Documentation + +This directory contains literate Erlang documentation generated from the HyperBEAM source code. +Each file combines the source code with embedded documentation in a format optimized for GitHub. + +## Modules + +| Module | Description | +|--------|-------------| +| [ar_bundles](./ar_bundles.erl.md) | Erlang module... | +| [ar_deep_hash](./ar_deep_hash.erl.md) | Erlang module... | +| [ar_rate_limiter](./ar_rate_limiter.erl.md) | Erlang module... | +| [ar_timestamp](./ar_timestamp.erl.md) | Erlang module... | +| [ar_tx](./ar_tx.erl.md) | Erlang module... | +| [ar_wallet](./ar_wallet.erl.md) | Erlang module... | +| [dev_apply](./dev_apply.erl.md) | Erlang module... | +| [dev_arweave_block_cache](./dev_arweave_block_cache.erl.md) | Erlang module... | +| [dev_arweave](./dev_arweave.erl.md) | Erlang module... | +| [dev_auth_hook](./dev_auth_hook.erl.md) | Erlang module... | +| [dev_cache](./dev_cache.erl.md) | Erlang module... | +| [dev_cacheviz](./dev_cacheviz.erl.md) | Erlang module... | +| [dev_codec_ans104_from](./dev_codec_ans104_from.erl.md) | Erlang module... | +| [dev_codec_ans104_to](./dev_codec_ans104_to.erl.md) | Erlang module... | +| [dev_codec_ans104](./dev_codec_ans104.erl.md) | Erlang module... | +| [dev_codec_cookie_auth](./dev_codec_cookie_auth.erl.md) | Erlang module... | +| [dev_codec_cookie_test_vectors](./dev_codec_cookie_test_vectors.erl.md) | Erlang module... | +| [dev_codec_cookie](./dev_codec_cookie.erl.md) | Erlang module... | +| [dev_codec_flat](./dev_codec_flat.erl.md) | Erlang module... | +| [dev_codec_http_auth](./dev_codec_http_auth.erl.md) | Erlang module... | +| [dev_codec_httpsig_conv](./dev_codec_httpsig_conv.erl.md) | Erlang module... | +| [dev_codec_httpsig_keyid](./dev_codec_httpsig_keyid.erl.md) | Erlang module... | +| [dev_codec_httpsig_proxy](./dev_codec_httpsig_proxy.erl.md) | Erlang module... | +| [dev_codec_httpsig_siginfo](./dev_codec_httpsig_siginfo.erl.md) | Erlang module... | +| [dev_codec_httpsig](./dev_codec_httpsig.erl.md) | Erlang module... | +| [dev_codec_json](./dev_codec_json.erl.md) | Erlang module... | +| [dev_codec_structured](./dev_codec_structured.erl.md) | Erlang module... | +| [dev_copycat_arweave](./dev_copycat_arweave.erl.md) | Erlang module... | +| [dev_copycat_graphql](./dev_copycat_graphql.erl.md) | Erlang module... | +| [dev_copycat](./dev_copycat.erl.md) | Erlang module... | +| [dev_cron](./dev_cron.erl.md) | Erlang module... | +| [dev_cu](./dev_cu.erl.md) | Erlang module... | +| [dev_dedup](./dev_dedup.erl.md) | Erlang module... | +| [dev_delegated_compute](./dev_delegated_compute.erl.md) | Erlang module... | +| [dev_faff](./dev_faff.erl.md) | Erlang module... | +| [dev_genesis_wasm](./dev_genesis_wasm.erl.md) | Erlang module... | +| [dev_green_zone](./dev_green_zone.erl.md) | Erlang module... | +| [dev_hook](./dev_hook.erl.md) | Erlang module... | +| [dev_hyperbuddy](./dev_hyperbuddy.erl.md) | Erlang module... | +| [dev_json_iface](./dev_json_iface.erl.md) | Erlang module... | +| [dev_local_name](./dev_local_name.erl.md) | Erlang module... | +| [dev_lookup](./dev_lookup.erl.md) | Erlang module... | +| [dev_lua_lib](./dev_lua_lib.erl.md) | Erlang module... | +| [dev_lua_test_ledgers](./dev_lua_test_ledgers.erl.md) | Erlang module... | +| [dev_lua_test](./dev_lua_test.erl.md) | Erlang module... | +| [dev_lua](./dev_lua.erl.md) | Erlang module... | +| [dev_manifest](./dev_manifest.erl.md) | Erlang module... | +| [dev_message](./dev_message.erl.md) | Erlang module... | +| [dev_meta](./dev_meta.erl.md) | Erlang module... | +| [dev_monitor](./dev_monitor.erl.md) | Erlang module... | +| [dev_multipass](./dev_multipass.erl.md) | Erlang module... | +| [dev_name](./dev_name.erl.md) | Erlang module... | +| [dev_node_process](./dev_node_process.erl.md) | Erlang module... | +| [dev_p4](./dev_p4.erl.md) | Erlang module... | +| [dev_patch](./dev_patch.erl.md) | Erlang module... | +| [dev_poda](./dev_poda.erl.md) | Erlang module... | +| [dev_process_cache](./dev_process_cache.erl.md) | Erlang module... | +| [dev_process_worker](./dev_process_worker.erl.md) | Erlang module... | +| [dev_process](./dev_process.erl.md) | Erlang module... | +| [dev_profile](./dev_profile.erl.md) | Erlang module... | +| [dev_push](./dev_push.erl.md) | Erlang module... | +| [dev_query_arweave](./dev_query_arweave.erl.md) | Erlang module... | +| [dev_query_graphql](./dev_query_graphql.erl.md) | Erlang module... | +| [dev_query_test_vectors](./dev_query_test_vectors.erl.md) | Erlang module... | +| [dev_query](./dev_query.erl.md) | Erlang module... | +| [dev_relay](./dev_relay.erl.md) | Erlang module... | +| [dev_router](./dev_router.erl.md) | Erlang module... | +| [dev_scheduler_cache](./dev_scheduler_cache.erl.md) | Erlang module... | +| [dev_scheduler_formats](./dev_scheduler_formats.erl.md) | Erlang module... | +| [dev_scheduler_registry](./dev_scheduler_registry.erl.md) | Erlang module... | +| [dev_scheduler_server](./dev_scheduler_server.erl.md) | Erlang module... | +| [dev_scheduler](./dev_scheduler.erl.md) | Erlang module... | +| [dev_secret](./dev_secret.erl.md) | Erlang module... | +| [dev_simple_pay](./dev_simple_pay.erl.md) | Erlang module... | +| [dev_snp_nif](./dev_snp_nif.erl.md) | Erlang module... | +| [dev_snp](./dev_snp.erl.md) | Erlang module... | +| [dev_stack](./dev_stack.erl.md) | Erlang module... | +| [dev_test](./dev_test.erl.md) | Erlang module... | +| [dev_volume](./dev_volume.erl.md) | Erlang module... | +| [dev_wasi](./dev_wasi.erl.md) | Erlang module... | +| [dev_wasm](./dev_wasm.erl.md) | Erlang module... | +| [dev_whois](./dev_whois.erl.md) | Erlang module... | +| [hb_ao_test_vectors](./hb_ao_test_vectors.erl.md) | Erlang module... | +| [hb_ao](./hb_ao.erl.md) | Erlang module... | +| [hb_app](./hb_app.erl.md) | Erlang module... | +| [hb_beamr_io](./hb_beamr_io.erl.md) | Erlang module... | +| [hb_beamr](./hb_beamr.erl.md) | Erlang module... | +| [hb_cache_control](./hb_cache_control.erl.md) | Erlang module... | +| [hb_cache_render](./hb_cache_render.erl.md) | Erlang module... | +| [hb_cache](./hb_cache.erl.md) | Erlang module... | +| [hb_client](./hb_client.erl.md) | Erlang module... | +| [hb_crypto](./hb_crypto.erl.md) | Erlang module... | +| [hb_debugger](./hb_debugger.erl.md) | Erlang module... | +| [hb_escape](./hb_escape.erl.md) | Erlang module... | +| [hb_event](./hb_event.erl.md) | Erlang module... | +| [hb_examples](./hb_examples.erl.md) | Erlang module... | +| [hb_features](./hb_features.erl.md) | Erlang module... | +| [hb_format](./hb_format.erl.md) | Erlang module... | +| [hb_gateway_client](./hb_gateway_client.erl.md) | Erlang module... | +| [hb_http_benchmark_tests](./hb_http_benchmark_tests.erl.md) | Erlang module... | +| [hb_http_client_sup](./hb_http_client_sup.erl.md) | Erlang module... | +| [hb_http_client](./hb_http_client.erl.md) | Erlang module... | +| [hb_http_multi](./hb_http_multi.erl.md) | Erlang module... | +| [hb_http_server](./hb_http_server.erl.md) | Erlang module... | +| [hb_http](./hb_http.erl.md) | Erlang module... | +| [hb_json](./hb_json.erl.md) | Erlang module... | +| [hb_keccak](./hb_keccak.erl.md) | Erlang module... | +| [hb_link](./hb_link.erl.md) | Erlang module... | +| [hb_logger](./hb_logger.erl.md) | Erlang module... | +| [hb_maps](./hb_maps.erl.md) | Erlang module... | +| [hb_message_test_vectors](./hb_message_test_vectors.erl.md) | Erlang module... | +| [hb_message](./hb_message.erl.md) | Erlang module... | +| [hb_metrics_collector](./hb_metrics_collector.erl.md) | Erlang module... | +| [hb_name](./hb_name.erl.md) | Erlang module... | +| [hb_opts](./hb_opts.erl.md) | Erlang module... | +| [hb_path](./hb_path.erl.md) | Erlang module... | +| [hb_persistent](./hb_persistent.erl.md) | Erlang module... | +| [hb_private](./hb_private.erl.md) | Erlang module... | +| [hb_process_monitor](./hb_process_monitor.erl.md) | Erlang module... | +| [hb_router](./hb_router.erl.md) | Erlang module... | +| [hb_singleton](./hb_singleton.erl.md) | Erlang module... | +| [hb_store_fs](./hb_store_fs.erl.md) | Erlang module... | +| [hb_store_gateway](./hb_store_gateway.erl.md) | Erlang module... | +| [hb_store_lmdb](./hb_store_lmdb.erl.md) | Erlang module... | +| [hb_store_lru](./hb_store_lru.erl.md) | Erlang module... | +| [hb_store_opts](./hb_store_opts.erl.md) | Erlang module... | +| [hb_store_remote_node](./hb_store_remote_node.erl.md) | Erlang module... | +| [hb_store_rocksdb](./hb_store_rocksdb.erl.md) | Erlang module... | +| [hb_store](./hb_store.erl.md) | Erlang module... | +| [hb_structured_fields](./hb_structured_fields.erl.md) | Erlang module... | +| [hb_sup](./hb_sup.erl.md) | Erlang module... | +| [hb_test_utils](./hb_test_utils.erl.md) | Erlang module... | +| [hb_tracer](./hb_tracer.erl.md) | Erlang module... | +| [hb_util](./hb_util.erl.md) | Erlang module... | +| [hb_volume](./hb_volume.erl.md) | Erlang module... | +| [hb](./hb.erl.md) | Erlang module... | +| [rsa_pss](./rsa_pss.erl.md) | Erlang module... | + +## About Literate Programming + +Literate programming is a methodology that combines a programming language with a documentation language, +making programs more robust, more portable, and more easily maintained than programs written only in a +high-level language. + +These files present the HyperBEAM source code in a narrative format, with documentation and code +interwoven to provide better understanding of the implementation. + +--- + +*Generated on 2025-09-19 17:15:25 UTC* diff --git a/docs/build-and-serve.sh b/docs/build-and-serve.sh new file mode 100755 index 000000000..c590e7c8d --- /dev/null +++ b/docs/build-and-serve.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +# HyperBEAM Documentation Build and Serve Script +# This script generates literate documentation and serves it locally + +set -e + +# Colors for output +GREEN='\033[0;32m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +echo -e "${BLUE}🚀 Building and serving HyperBEAM documentation...${NC}" + +# Generate literate documentation +echo -e "${GREEN}📚 Generating literate documentation...${NC}" +./build-literate-erlang-js.sh + +# Build and serve mdBook +echo -e "${GREEN}📖 Building mdBook...${NC}" +cd book +mdbook build + +echo -e "${GREEN}🌐 Starting development server...${NC}" +echo -e "${BLUE}📖 Documentation will be available at: http://localhost:3033${NC}" +echo -e "${BLUE}🔄 Auto-reload enabled - changes will be reflected automatically${NC}" +echo -e "${BLUE}⏹️ Press Ctrl+C to stop the server${NC}" + +mdbook serve --port 3033 \ No newline at end of file diff --git a/docs/build-literate-erlang-js.sh b/docs/build-literate-erlang-js.sh new file mode 100755 index 000000000..91b5545ce --- /dev/null +++ b/docs/build-literate-erlang-js.sh @@ -0,0 +1,140 @@ +#!/bin/bash + +# Comprehensive Erlang Literate Documentation Builder (JavaScript Implementation) +# This script wraps the JavaScript parser for seamless integration + +# Colors for output +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +# Get script directory and root +SCRIPT_DIR="$(dirname "$(realpath "$0")")" +ROOT_DIR="$(dirname "$SCRIPT_DIR")" +cd "$ROOT_DIR" + +# Configuration +SRC_DIR="${SRC_DIR:-$ROOT_DIR/src}" +OUTPUT_DIR="${OUTPUT_DIR:-$ROOT_DIR/docs/literate-erlang}" +PARSER_SCRIPT="$SCRIPT_DIR/erlang-literate-parser.js" + +# Parse arguments +VERBOSE=false +if [[ "$@" == *"-v"* ]] || [[ "$@" == *"--verbose"* ]]; then + VERBOSE=true +fi + +echo -e "${GREEN}HyperBEAM Literate Erlang Documentation Generator (JavaScript)${NC}" +echo "========================================================" + +# Check for Node.js +if ! command -v node &> /dev/null; then + echo -e "${RED}Error: Node.js is required but not installed.${NC}" + echo "Please install Node.js (version 14 or later) to use this parser." + echo "" + echo "On macOS with Homebrew: brew install node" + echo "On Ubuntu: curl -fsSL https://deb.nodesource.com/setup_lts.x | sudo -E bash - && sudo apt-get install -y nodejs" + exit 1 +fi + +# Check Node.js version (need ES modules support) +NODE_VERSION=$(node --version | cut -d'v' -f2 | cut -d'.' -f1) +if [ "$NODE_VERSION" -lt 14 ]; then + echo -e "${RED}Error: Node.js version 14 or later is required for ES modules support.${NC}" + echo "Current version: $(node --version)" + exit 1 +fi + +# Verify source directory exists +if [ ! -d "$SRC_DIR" ]; then + echo -e "${RED}Error: Source directory not found: $SRC_DIR${NC}" + exit 1 +fi + +# Verify parser script exists +if [ ! -f "$PARSER_SCRIPT" ]; then + echo -e "${RED}Error: Parser script not found: $PARSER_SCRIPT${NC}" + exit 1 +fi + +# Make parser executable +chmod +x "$PARSER_SCRIPT" + +# Count Erlang files +ERL_COUNT=$(find "$SRC_DIR" -name "*.erl" -type f | wc -l) +if [ "$ERL_COUNT" -eq 0 ]; then + echo -e "${YELLOW}Warning: No .erl files found in $SRC_DIR${NC}" + exit 0 +fi + +echo "Source directory: $SRC_DIR" +echo "Output directory: $OUTPUT_DIR" +echo "Found $ERL_COUNT Erlang files" +echo "" + +# Create output directory if it doesn't exist +mkdir -p "$OUTPUT_DIR" + +# Run the JavaScript parser +echo -e "${GREEN}Generating literate documentation...${NC}" + +# Set environment variables and run parser +export SRC_DIR="$SRC_DIR" +export OUTPUT_DIR="$OUTPUT_DIR" + +if [ "$VERBOSE" = true ]; then + node "$PARSER_SCRIPT" --verbose +else + node "$PARSER_SCRIPT" +fi + +PARSER_EXIT_CODE=$? + +if [ $PARSER_EXIT_CODE -eq 0 ]; then + echo "" + echo -e "${GREEN}✓ Literate documentation generated successfully${NC}" + + # List generated files + if [ "$VERBOSE" = true ]; then + echo "" + echo "Generated files:" + ls -la "$OUTPUT_DIR"/*.md 2>/dev/null | while read -r line; do + echo " $line" + done + fi + + # Copy to mdBook if it exists + if [ -d "$ROOT_DIR/docs/book/src" ]; then + echo -e "${GREEN}Copying documentation to mdBook...${NC}" + cp "$OUTPUT_DIR"/*.md "$ROOT_DIR/docs/book/src/" 2>/dev/null + if [ $? -eq 0 ]; then + echo -e "${GREEN}✓ Documentation copied to mdBook${NC}" + else + echo -e "${YELLOW}Warning: Could not copy to mdBook (no files generated?)${NC}" + fi + fi + + # Build mdBook if available + if command -v mdbook &> /dev/null && [ -f "$ROOT_DIR/docs/book/book.toml" ]; then + echo -e "${GREEN}Building mdBook...${NC}" + cd "$ROOT_DIR/docs/book" + mdbook build + if [ $? -eq 0 ]; then + echo -e "${GREEN}✓ mdBook built successfully${NC}" + echo "View at: file://$ROOT_DIR/docs/book/book/index.html" + else + echo -e "${YELLOW}Warning: mdBook build failed${NC}" + fi + cd "$ROOT_DIR" + fi + + echo "" + echo -e "${GREEN}Documentation generation complete!${NC}" + echo "Output directory: $OUTPUT_DIR" + +else + echo "" + echo -e "${RED}✗ Documentation generation failed (exit code: $PARSER_EXIT_CODE)${NC}" + exit $PARSER_EXIT_CODE +fi \ No newline at end of file diff --git a/docs/build-literate-erlang.sh b/docs/build-literate-erlang.sh index 6bf71fa7f..f1cf0262d 100755 --- a/docs/build-literate-erlang.sh +++ b/docs/build-literate-erlang.sh @@ -2,6 +2,10 @@ # Script to generate literate Erlang documentation from HyperBEAM source files # +# ⚠️ DEPRECATED: This bash parser has been superseded by the JavaScript version +# For superior results with comprehensive comment parsing, use: +# ./docs/build-literate-erlang-js.sh +# # This creates .erl.md files that combine source code with documentation # in a format optimized for GitHub rendering with cleaner appearance # @@ -88,6 +92,14 @@ ${GRAY} =${GRAY}%%${NEON_GREEN}*+${BRIGHT_RED}=-:::::::::${GRAY} LITERAT display_logo log_step "LITERATE ERLANG DOCUMENTATION GENERATION" +# Display deprecation notice +echo -e "${YELLOW}${BOLD}⚠️ DEPRECATION NOTICE${NC}" +echo -e "${YELLOW}This bash parser has been superseded by a superior JavaScript implementation.${NC}" +echo -e "${YELLOW}For comprehensive comment parsing and true literate programming, use:${NC}" +echo -e "${GREEN}${BOLD} ./docs/build-literate-erlang-js.sh${NC}" +echo -e "${GRAY}Continuing with legacy bash parser...${NC}" +echo "" + # Ensure we're in the root directory ROOT_DIR="$(dirname "$(realpath "$0")")/.." cd "$ROOT_DIR" || { log_error "Failed to change to root directory"; exit 1; } @@ -145,30 +157,171 @@ extract_module_doc() { extract_function_doc() { local content="$1" - # Remove leading %% or % and @doc tags, then convert edocs syntax to markdown, preserving paragraph breaks - echo "$content" | \ + # Clean the content and separate into sections + local cleaned_content=$(echo "$content" | \ sed 's/^%% *//' | \ sed 's/^% *//' | \ sed 's/^@doc$//' | \ sed 's/^@doc //' | \ sed 's/^@end$//' | \ - sed 's/^@author /**Author:** /' | \ - sed 's/^@copyright /**Copyright:** /' | \ - sed 's/^---*$//' | \ - sed "s/\`\([^']*\)'/\`\1\`/g" | \ - awk ' - BEGIN { prev_empty = 0 } - /^[[:space:]]*$/ { - if (!prev_empty) { - print "" - prev_empty = 1 + sed "s/\`\([^']*\)'/\`\1\`/g") + + # Initialize variables for different sections + local description="" + local params="" + local returns="" + local in_params=false + local in_returns=false + local current_param="" + + while IFS= read -r line; do + if [[ "$line" =~ ^@param[[:space:]]+([^[:space:]]+)[[:space:]]+(.*)$ ]]; then + # Save any current param before starting new one + if [ -n "$current_param" ]; then + params+="- ${current_param}"$'\n' + fi + # Start new param with code-formatted name + current_param="\`${BASH_REMATCH[1]}\` - ${BASH_REMATCH[2]}" + in_params=true + in_returns=false + elif [[ "$line" =~ ^@returns?[[:space:]]+(.*)$ ]]; then + # Save any current param before starting returns + if [ -n "$current_param" ]; then + params+="- ${current_param}"$'\n' + current_param="" + fi + returns="${BASH_REMATCH[1]}" + in_params=false + in_returns=true + elif [[ "$line" =~ ^@author[[:space:]]+(.*)$ ]]; then + # Skip author lines for now + continue + elif [[ "$line" =~ ^@copyright[[:space:]]+(.*)$ ]]; then + # Skip copyright lines for now + continue + elif [[ "$line" =~ ^[[:space:]]*$ ]]; then + # Empty line - add to current section + if [ "$in_params" = true ] && [ -n "$current_param" ]; then + current_param+=" " + elif [ "$in_returns" = true ]; then + returns+=" " + elif [ "$in_params" = false ] && [ "$in_returns" = false ]; then + description+="$line"$'\n' + fi + else + # Regular content line + if [ "$in_params" = true ]; then + # Continue current param description - clean up whitespace + local cleaned_line=$(echo "$line" | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//') + current_param+=" $cleaned_line" + elif [ "$in_returns" = true ]; then + # Continue returns description - clean up whitespace + local cleaned_line=$(echo "$line" | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//') + returns+=" $cleaned_line" + else + # Part of main description + description+="$line"$'\n' + fi + fi + done <<< "$cleaned_content" + + # Save any remaining param + if [ -n "$current_param" ]; then + params+="- ${current_param}"$'\n' + fi + + # Build formatted output + local output="" + + # Add description (clean up extra newlines) + if [ -n "$description" ]; then + output+=$(echo "$description" | awk ' + BEGIN { prev_empty = 0 } + /^[[:space:]]*$/ { + if (!prev_empty) { + print "" + prev_empty = 1 + } + next } - next - } - { - print $0 - prev_empty = 0 - }' + { + print $0 + prev_empty = 0 + }') + output+=$'\n' + fi + + # Add parameters section + if [ -n "$params" ]; then + output+=$'\n'"#### Parameters"$'\n'$'\n' + output+="$params"$'\n' + fi + + # Add returns section + if [ -n "$returns" ]; then + output+=$'\n'"#### Returns"$'\n'$'\n' + # Clean up returns text and parse return type vs description + local cleaned_returns=$(echo "$returns" | sed 's/[[:space:]]\+/ /g' | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//') + + # Try to extract and format return types - always use bullet format + # Check for multiple return patterns first (most comprehensive) + if [[ "$cleaned_returns" =~ , ]]; then + # Try to format multiple return patterns - handle nested braces properly + # Use a more robust approach for nested structures + local formatted_returns=$(echo "$cleaned_returns" | \ + perl -pe 's/(\{(?:[^{}]++|(?1))*\})/`$1`/g' 2>/dev/null || \ + echo "$cleaned_returns" | sed -E 's/(\{[^{}]*(\{[^}]*\}[^{}]*)*\})/`\1`/g') + formatted_returns=$(echo "$formatted_returns" | sed -E 's/([[:space:]]|^)(not_found|error|ok|true|false)([[:space:]]|$)/\1`\2`\3/g') + output+="- $formatted_returns"$'\n' + elif [[ "$cleaned_returns" =~ ^\{ ]]; then + # Complex return type - use better pattern to handle nested braces + # Extract the complete tuple including nested structures + local temp_string="$cleaned_returns" + local brace_count=0 + local char_pos=0 + local return_type="" + + # Parse character by character to find matching braces + while [ $char_pos -lt ${#temp_string} ]; do + local char="${temp_string:$char_pos:1}" + return_type+="$char" + + if [ "$char" = "{" ]; then + ((brace_count++)) + elif [ "$char" = "}" ]; then + ((brace_count--)) + if [ $brace_count -eq 0 ]; then + break + fi + fi + ((char_pos++)) + done + + # Extract description after the return type + local return_desc="${temp_string:$((char_pos + 1))}" + return_desc=$(echo "$return_desc" | sed 's/^[[:space:]]*//') + + if [ -n "$return_desc" ]; then + output+="- \`$return_type\` $return_desc"$'\n' + else + output+="- \`$return_type\`"$'\n' + fi + elif [[ "$cleaned_returns" =~ ^(true|false)[[:space:]]+(.*) ]]; then + # Boolean return types + local return_type="${BASH_REMATCH[1]}" + local return_desc="${BASH_REMATCH[2]}" + output+="- \`$return_type\` $return_desc"$'\n' + elif [[ "$cleaned_returns" =~ ^(ok|error|not_found)[[:space:]]+(.*) ]]; then + # Simple atom return types + local return_type="${BASH_REMATCH[1]}" + local return_desc="${BASH_REMATCH[2]}" + output+="- \`$return_type\` $return_desc"$'\n' + else + output+="- $cleaned_returns"$'\n' + fi + fi + + echo "$output" } # --- Function to process a single Erlang file --- @@ -223,21 +376,28 @@ EOF local function_content="" local spec_content="" local doc_content="" + local previous_doc_content="" local functions_written=0 while IFS= read -r line; do # Check for doc comments (before functions) - if [[ "$line" =~ ^%+[[:space:]]?@doc[[:space:]](.*)$ ]] || - ([[ "$line" =~ ^%+[[:space:]](.*)$ ]] && [ "$in_doc_comment" = true ]); then + if [[ "$line" =~ ^%%[[:space:]]?@doc[[:space:]](.*)$ ]] || + [[ "$line" =~ ^%%[[:space:]]?@doc$ ]]; then in_doc_comment=true if [[ "$line" =~ @doc[[:space:]](.*)$ ]]; then doc_content+="${BASH_REMATCH[1]}"$'\n' - else - doc_content+="${BASH_REMATCH[1]}"$'\n' fi continue fi + # Continue collecting doc comment lines + if [ "$in_doc_comment" = true ] && [[ "$line" =~ ^%% ]]; then + # Remove %% prefix and collect + local cleaned_line=$(echo "$line" | sed 's/^%%[[:space:]]*//') + doc_content+="$cleaned_line"$'\n' + continue + fi + # Check for -spec if [[ "$line" =~ ^-spec[[:space:]] ]]; then in_spec=true @@ -259,15 +419,18 @@ EOF if [[ "$line" =~ ^([a-z][a-z0-9_]*)[[:space:]]*\( ]]; then # If we were already in a function, write it out if [ -n "$current_function" ] && [ -n "$function_content" ]; then - write_clean_function "$output_file" "$current_function" "$spec_content" "$doc_content" "$function_content" "$functions_written" + write_clean_function "$output_file" "$current_function" "$spec_content" "$previous_doc_content" "$function_content" "$functions_written" ((functions_written++)) fi - # Start new function + # Start new function - preserve current doc_content for this function current_function="${BASH_REMATCH[1]}" function_content="$line"$'\n' in_function=true in_doc_comment=false + previous_doc_content="$doc_content" + spec_content="" + doc_content="" continue fi @@ -278,22 +441,25 @@ EOF if [[ "$line" =~ ^[[:space:]]*end\.[[:space:]]*$ ]] || ([[ "$line" =~ \.[[:space:]]*$ ]] && ! [[ "$line" =~ \" ]] && ! [[ "$line" =~ ^[[:space:]]*% ]]); then in_function=false - write_clean_function "$output_file" "$current_function" "$spec_content" "$doc_content" "$function_content" "$functions_written" + write_clean_function "$output_file" "$current_function" "$spec_content" "$previous_doc_content" "$function_content" "$functions_written" ((functions_written++)) current_function="" function_content="" spec_content="" + previous_doc_content="" + fi + else + # Only reset doc content if we hit a non-comment, non-spec, non-function line + if ! [[ "$line" =~ ^% ]] && ! [[ "$line" =~ ^-spec ]] && [ "$in_doc_comment" = false ]; then doc_content="" fi - elif [ "$in_doc_comment" = false ]; then - # Reset doc content if we hit a non-comment, non-function line - doc_content="" + in_doc_comment=false fi done < "$src_file" # Write any remaining function if [ -n "$current_function" ] && [ -n "$function_content" ]; then - write_clean_function "$output_file" "$current_function" "$spec_content" "$doc_content" "$function_content" "$functions_written" + write_clean_function "$output_file" "$current_function" "$spec_content" "$previous_doc_content" "$function_content" "$functions_written" fi # Add footer @@ -337,6 +503,10 @@ write_clean_function() { echo "" >> "$output_file" fi + # Add Function subheader before the implementation + echo "#### Function" >> "$output_file" + echo "" >> "$output_file" + # Add implementation with inline comment processing write_code_with_inline_comments "$output_file" "$code" } diff --git a/docs/docs/book/theme/erlang.min.js b/docs/docs/book/theme/erlang.min.js deleted file mode 100644 index 9b48a797d..000000000 --- a/docs/docs/book/theme/erlang.min.js +++ /dev/null @@ -1,27 +0,0 @@ -/*! `erlang` grammar compiled for Highlight.js 11.9.0 */ -(()=>{var e=(()=>{"use strict";return e=>{ -const n="[a-z'][a-zA-Z0-9_']*",r="("+n+":"+n+"|"+n+")",a={ -keyword:"after and andalso|10 band begin bnot bor bsl bzr bxor case catch cond div end fun if let not of orelse|10 query receive rem try when xor", -literal:"false true"},i=e.COMMENT("%","$"),s={className:"number", -begin:"\\b(\\d+(_\\d+)*#[a-fA-F0-9]+(_[a-fA-F0-9]+)*|\\d+(_\\d+)*(\\.\\d+(_\\d+)*)?([eE][-+]?\\d+)?)", -relevance:0},c={begin:"fun\\s+"+n+"/\\d+"},t={begin:r+"\\(",end:"\\)", -returnBegin:!0,relevance:0,contains:[{begin:r,relevance:0},{begin:"\\(", -end:"\\)",endsWithParent:!0,returnEnd:!0,relevance:0}]},d={begin:/\{/,end:/\}/, -relevance:0},o={begin:"\\b_([A-Z][A-Za-z0-9_]*)?",relevance:0},l={ -begin:"[A-Z][a-zA-Z0-9_]*",relevance:0},b={begin:"#"+e.UNDERSCORE_IDENT_RE, -relevance:0,returnBegin:!0,contains:[{begin:"#"+e.UNDERSCORE_IDENT_RE, -relevance:0},{begin:/\{/,end:/\}/,relevance:0}]},g={ -beginKeywords:"fun receive if try case",end:"end",keywords:a} -;g.contains=[i,c,e.inherit(e.APOS_STRING_MODE,{className:"" -}),g,t,e.QUOTE_STRING_MODE,s,d,o,l,b] -;const E=[i,c,g,t,e.QUOTE_STRING_MODE,s,d,o,l,b] -;t.contains[1].contains=E,d.contains=E,b.contains[1].contains=E;const u={ -className:"params",begin:"\\(",end:"\\)",contains:E};return{name:"Erlang", -aliases:["erl"],keywords:a,illegal:"(",returnBegin:!0, -illegal:"\\(|#|//|/\\*|\\\\|:|;",contains:[u,e.inherit(e.TITLE_MODE,{begin:n})], -starts:{end:";|\\.",keywords:a,contains:E}},i,{begin:"^-",end:"\\.",relevance:0, -excludeEnd:!0,returnBegin:!0,keywords:{$pattern:"-"+e.IDENT_RE, -keyword:["-module","-record","-undef","-export","-ifdef","-ifndef","-author","-copyright","-doc","-vsn","-import","-include","-include_lib","-compile","-define","-else","-endif","-file","-behaviour","-behavior","-spec"].map((e=>e+"|1.5")).join(" ") -},contains:[u]},s,e.QUOTE_STRING_MODE,b,o,l,d,{begin:/\.$/}]}}})() -;hljs.registerLanguage("erlang",e)})(); \ No newline at end of file diff --git a/docs/docs/book/theme/highlight.js b/docs/docs/book/theme/highlight.js deleted file mode 100644 index e72ef8c77..000000000 --- a/docs/docs/book/theme/highlight.js +++ /dev/null @@ -1,1239 +0,0 @@ -/*! - Highlight.js v11.9.0 (git: f47103d4f1) - (c) 2006-2023 undefined and other contributors - License: BSD-3-Clause - */ -var hljs=function(){"use strict";function e(n){ -return n instanceof Map?n.clear=n.delete=n.set=()=>{ -throw Error("map is read-only")}:n instanceof Set&&(n.add=n.clear=n.delete=()=>{ -throw Error("set is read-only") -}),Object.freeze(n),Object.getOwnPropertyNames(n).forEach((t=>{ -const a=n[t],i=typeof a;"object"!==i&&"function"!==i||Object.isFrozen(a)||e(a) -})),n}class n{constructor(e){ -void 0===e.data&&(e.data={}),this.data=e.data,this.isMatchIgnored=!1} -ignoreMatch(){this.isMatchIgnored=!0}}function t(e){ -return e.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'") -}function a(e,...n){const t=Object.create(null);for(const n in e)t[n]=e[n] -;return n.forEach((e=>{for(const n in e)t[n]=e[n]})),t}const i=e=>!!e.scope -;class r{constructor(e,n){ -this.buffer="",this.classPrefix=n.classPrefix,e.walk(this)}addText(e){ -this.buffer+=t(e)}openNode(e){if(!i(e))return;const n=((e,{prefix:n})=>{ -if(e.startsWith("language:"))return e.replace("language:","language-") -;if(e.includes(".")){const t=e.split(".") -;return[`${n}${t.shift()}`,...t.map(((e,n)=>`${e}${"_".repeat(n+1)}`))].join(" ") -}return`${n}${e}`})(e.scope,{prefix:this.classPrefix});this.span(n)} -closeNode(e){i(e)&&(this.buffer+="")}value(){return this.buffer}span(e){ -this.buffer+=``}}const s=(e={})=>{const n={children:[]} -;return Object.assign(n,e),n};class o{constructor(){ -this.rootNode=s(),this.stack=[this.rootNode]}get top(){ -return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){ -this.top.children.push(e)}openNode(e){const n=s({scope:e}) -;this.add(n),this.stack.push(n)}closeNode(){ -if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){ -for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)} -walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,n){ -return"string"==typeof n?e.addText(n):n.children&&(e.openNode(n), -n.children.forEach((n=>this._walk(e,n))),e.closeNode(n)),e}static _collapse(e){ -"string"!=typeof e&&e.children&&(e.children.every((e=>"string"==typeof e))?e.children=[e.children.join("")]:e.children.forEach((e=>{ -o._collapse(e)})))}}class l extends o{constructor(e){super(),this.options=e} -addText(e){""!==e&&this.add(e)}startScope(e){this.openNode(e)}endScope(){ -this.closeNode()}__addSublanguage(e,n){const t=e.root -;n&&(t.scope="language:"+n),this.add(t)}toHTML(){ -return new r(this,this.options).value()}finalize(){ -return this.closeAllNodes(),!0}}function c(e){ -return e?"string"==typeof e?e:e.source:null}function d(e){return b("(?=",e,")")} -function g(e){return b("(?:",e,")*")}function u(e){return b("(?:",e,")?")} -function b(...e){return e.map((e=>c(e))).join("")}function m(...e){const n=(e=>{ -const n=e[e.length-1] -;return"object"==typeof n&&n.constructor===Object?(e.splice(e.length-1,1),n):{} -})(e);return"("+(n.capture?"":"?:")+e.map((e=>c(e))).join("|")+")"} -function p(e){return RegExp(e.toString()+"|").exec("").length-1} -const _=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./ -;function h(e,{joinWith:n}){let t=0;return e.map((e=>{t+=1;const n=t -;let a=c(e),i="";for(;a.length>0;){const e=_.exec(a);if(!e){i+=a;break} -i+=a.substring(0,e.index), -a=a.substring(e.index+e[0].length),"\\"===e[0][0]&&e[1]?i+="\\"+(Number(e[1])+n):(i+=e[0], -"("===e[0]&&t++)}return i})).map((e=>`(${e})`)).join(n)} -const f="[a-zA-Z]\\w*",E="[a-zA-Z_]\\w*",y="\\b\\d+(\\.\\d+)?",N="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",w="\\b(0b[01]+)",v={ -begin:"\\\\[\\s\\S]",relevance:0},O={scope:"string",begin:"'",end:"'", -illegal:"\\n",contains:[v]},k={scope:"string",begin:'"',end:'"',illegal:"\\n", -contains:[v]},x=(e,n,t={})=>{const i=a({scope:"comment",begin:e,end:n, -contains:[]},t);i.contains.push({scope:"doctag", -begin:"[ ]*(?=(TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):)", -end:/(TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):/,excludeBegin:!0,relevance:0}) -;const r=m("I","a","is","so","us","to","at","if","in","it","on",/[A-Za-z]+['](d|ve|re|ll|t|s|n)/,/[A-Za-z]+[-][a-z]+/,/[A-Za-z][a-z]{2,}/) -;return i.contains.push({begin:b(/[ ]+/,"(",r,/[.]?[:]?([.][ ]|[ ])/,"){3}")}),i -},M=x("//","$"),S=x("/\\*","\\*/"),A=x("#","$");var C=Object.freeze({ -__proto__:null,APOS_STRING_MODE:O,BACKSLASH_ESCAPE:v,BINARY_NUMBER_MODE:{ -scope:"number",begin:w,relevance:0},BINARY_NUMBER_RE:w,COMMENT:x, -C_BLOCK_COMMENT_MODE:S,C_LINE_COMMENT_MODE:M,C_NUMBER_MODE:{scope:"number", -begin:N,relevance:0},C_NUMBER_RE:N,END_SAME_AS_BEGIN:e=>Object.assign(e,{ -"on:begin":(e,n)=>{n.data._beginMatch=e[1]},"on:end":(e,n)=>{ -n.data._beginMatch!==e[1]&&n.ignoreMatch()}}),HASH_COMMENT_MODE:A,IDENT_RE:f, -MATCH_NOTHING_RE:/\b\B/,METHOD_GUARD:{begin:"\\.\\s*"+E,relevance:0}, -NUMBER_MODE:{scope:"number",begin:y,relevance:0},NUMBER_RE:y, -PHRASAL_WORDS_MODE:{ -begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/ -},QUOTE_STRING_MODE:k,REGEXP_MODE:{scope:"regexp",begin:/\/(?=[^/\n]*\/)/, -end:/\/[gimuy]*/,contains:[v,{begin:/\[/,end:/\]/,relevance:0,contains:[v]}]}, -RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~", -SHEBANG:(e={})=>{const n=/^#![ ]*\// -;return e.binary&&(e.begin=b(n,/.*\b/,e.binary,/\b.*/)),a({scope:"meta",begin:n, -end:/$/,relevance:0,"on:begin":(e,n)=>{0!==e.index&&n.ignoreMatch()}},e)}, -TITLE_MODE:{scope:"title",begin:f,relevance:0},UNDERSCORE_IDENT_RE:E, -UNDERSCORE_TITLE_MODE:{scope:"title",begin:E,relevance:0}});function T(e,n){ -"."===e.input[e.index-1]&&n.ignoreMatch()}function R(e,n){ -void 0!==e.className&&(e.scope=e.className,delete e.className)}function D(e,n){ -n&&e.beginKeywords&&(e.begin="\\b("+e.beginKeywords.split(" ").join("|")+")(?!\\.)(?=\\b|\\s)", -e.__beforeBegin=T,e.keywords=e.keywords||e.beginKeywords,delete e.beginKeywords, -void 0===e.relevance&&(e.relevance=0))}function I(e,n){ -Array.isArray(e.illegal)&&(e.illegal=m(...e.illegal))}function L(e,n){ -if(e.match){ -if(e.begin||e.end)throw Error("begin & end are not supported with match") -;e.begin=e.match,delete e.match}}function B(e,n){ -void 0===e.relevance&&(e.relevance=1)}const $=(e,n)=>{if(!e.beforeMatch)return -;if(e.starts)throw Error("beforeMatch cannot be used with starts") -;const t=Object.assign({},e);Object.keys(e).forEach((n=>{delete e[n] -})),e.keywords=t.keywords,e.begin=b(t.beforeMatch,d(t.begin)),e.starts={ -relevance:0,contains:[Object.assign(t,{endsParent:!0})] -},e.relevance=0,delete t.beforeMatch -},z=["of","and","for","in","not","or","if","then","parent","list","value"],F="keyword" -;function U(e,n,t=F){const a=Object.create(null) -;return"string"==typeof e?i(t,e.split(" ")):Array.isArray(e)?i(t,e):Object.keys(e).forEach((t=>{ -Object.assign(a,U(e[t],n,t))})),a;function i(e,t){ -n&&(t=t.map((e=>e.toLowerCase()))),t.forEach((n=>{const t=n.split("|") -;a[t[0]]=[e,j(t[0],t[1])]}))}}function j(e,n){ -return n?Number(n):(e=>z.includes(e.toLowerCase()))(e)?0:1}const P={},K=e=>{ -console.error(e)},H=(e,...n)=>{console.log("WARN: "+e,...n)},q=(e,n)=>{ -P[`${e}/${n}`]||(console.log(`Deprecated as of ${e}. ${n}`),P[`${e}/${n}`]=!0) -},G=Error();function Z(e,n,{key:t}){let a=0;const i=e[t],r={},s={} -;for(let e=1;e<=n.length;e++)s[e+a]=i[e],r[e+a]=!0,a+=p(n[e-1]) -;e[t]=s,e[t]._emit=r,e[t]._multi=!0}function W(e){(e=>{ -e.scope&&"object"==typeof e.scope&&null!==e.scope&&(e.beginScope=e.scope, -delete e.scope)})(e),"string"==typeof e.beginScope&&(e.beginScope={ -_wrap:e.beginScope}),"string"==typeof e.endScope&&(e.endScope={_wrap:e.endScope -}),(e=>{if(Array.isArray(e.begin)){ -if(e.skip||e.excludeBegin||e.returnBegin)throw K("skip, excludeBegin, returnBegin not compatible with beginScope: {}"), -G -;if("object"!=typeof e.beginScope||null===e.beginScope)throw K("beginScope must be object"), -G;Z(e,e.begin,{key:"beginScope"}),e.begin=h(e.begin,{joinWith:""})}})(e),(e=>{ -if(Array.isArray(e.end)){ -if(e.skip||e.excludeEnd||e.returnEnd)throw K("skip, excludeEnd, returnEnd not compatible with endScope: {}"), -G -;if("object"!=typeof e.endScope||null===e.endScope)throw K("endScope must be object"), -G;Z(e,e.end,{key:"endScope"}),e.end=h(e.end,{joinWith:""})}})(e)}function Q(e){ -function n(n,t){ -return RegExp(c(n),"m"+(e.case_insensitive?"i":"")+(e.unicodeRegex?"u":"")+(t?"g":"")) -}class t{constructor(){ -this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0} -addRule(e,n){ -n.position=this.position++,this.matchIndexes[this.matchAt]=n,this.regexes.push([n,e]), -this.matchAt+=p(e)+1}compile(){0===this.regexes.length&&(this.exec=()=>null) -;const e=this.regexes.map((e=>e[1]));this.matcherRe=n(h(e,{joinWith:"|" -}),!0),this.lastIndex=0}exec(e){this.matcherRe.lastIndex=this.lastIndex -;const n=this.matcherRe.exec(e);if(!n)return null -;const t=n.findIndex(((e,n)=>n>0&&void 0!==e)),a=this.matchIndexes[t] -;return n.splice(0,t),Object.assign(n,a)}}class i{constructor(){ -this.rules=[],this.multiRegexes=[], -this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){ -if(this.multiRegexes[e])return this.multiRegexes[e];const n=new t -;return this.rules.slice(e).forEach((([e,t])=>n.addRule(e,t))), -n.compile(),this.multiRegexes[e]=n,n}resumingScanAtSamePosition(){ -return 0!==this.regexIndex}considerAll(){this.regexIndex=0}addRule(e,n){ -this.rules.push([e,n]),"begin"===n.type&&this.count++}exec(e){ -const n=this.getMatcher(this.regexIndex);n.lastIndex=this.lastIndex -;let t=n.exec(e) -;if(this.resumingScanAtSamePosition())if(t&&t.index===this.lastIndex);else{ -const n=this.getMatcher(0);n.lastIndex=this.lastIndex+1,t=n.exec(e)} -return t&&(this.regexIndex+=t.position+1, -this.regexIndex===this.count&&this.considerAll()),t}} -if(e.compilerExtensions||(e.compilerExtensions=[]), -e.contains&&e.contains.includes("self"))throw Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.") -;return e.classNameAliases=a(e.classNameAliases||{}),function t(r,s){const o=r -;if(r.isCompiled)return o -;[R,L,W,$].forEach((e=>e(r,s))),e.compilerExtensions.forEach((e=>e(r,s))), -r.__beforeBegin=null,[D,I,B].forEach((e=>e(r,s))),r.isCompiled=!0;let l=null -;return"object"==typeof r.keywords&&r.keywords.$pattern&&(r.keywords=Object.assign({},r.keywords), -l=r.keywords.$pattern, -delete r.keywords.$pattern),l=l||/\w+/,r.keywords&&(r.keywords=U(r.keywords,e.case_insensitive)), -o.keywordPatternRe=n(l,!0), -s&&(r.begin||(r.begin=/\B|\b/),o.beginRe=n(o.begin),r.end||r.endsWithParent||(r.end=/\B|\b/), -r.end&&(o.endRe=n(o.end)), -o.terminatorEnd=c(o.end)||"",r.endsWithParent&&s.terminatorEnd&&(o.terminatorEnd+=(r.end?"|":"")+s.terminatorEnd)), -r.illegal&&(o.illegalRe=n(r.illegal)), -r.contains||(r.contains=[]),r.contains=[].concat(...r.contains.map((e=>(e=>(e.variants&&!e.cachedVariants&&(e.cachedVariants=e.variants.map((n=>a(e,{ -variants:null},n)))),e.cachedVariants?e.cachedVariants:X(e)?a(e,{ -starts:e.starts?a(e.starts):null -}):Object.isFrozen(e)?a(e):e))("self"===e?r:e)))),r.contains.forEach((e=>{t(e,o) -})),r.starts&&t(r.starts,s),o.matcher=(e=>{const n=new i -;return e.contains.forEach((e=>n.addRule(e.begin,{rule:e,type:"begin" -}))),e.terminatorEnd&&n.addRule(e.terminatorEnd,{type:"end" -}),e.illegal&&n.addRule(e.illegal,{type:"illegal"}),n})(o),o}(e)}function X(e){ -return!!e&&(e.endsWithParent||X(e.starts))}class V extends Error{ -constructor(e,n){super(e),this.name="HTMLInjectionError",this.html=n}} -const J=t,Y=a,ee=Symbol("nomatch"),ne=t=>{ -const a=Object.create(null),i=Object.create(null),r=[];let s=!0 -;const o="Could not find the language '{}', did you forget to load/include a language module?",c={ -disableAutodetect:!0,name:"Plain text",contains:[]};let p={ -ignoreUnescapedHTML:!1,throwUnescapedHTML:!1,noHighlightRe:/^(no-?highlight)$/i, -languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-", -cssSelector:"pre code",languages:null,__emitter:l};function _(e){ -return p.noHighlightRe.test(e)}function h(e,n,t){let a="",i="" -;"object"==typeof n?(a=e, -t=n.ignoreIllegals,i=n.language):(q("10.7.0","highlight(lang, code, ...args) has been deprecated."), -q("10.7.0","Please use highlight(code, options) instead.\nhttps://github.com/highlightjs/highlight.js/issues/2277"), -i=e,a=n),void 0===t&&(t=!0);const r={code:a,language:i};x("before:highlight",r) -;const s=r.result?r.result:f(r.language,r.code,t) -;return s.code=r.code,x("after:highlight",s),s}function f(e,t,i,r){ -const l=Object.create(null);function c(){if(!x.keywords)return void S.addText(A) -;let e=0;x.keywordPatternRe.lastIndex=0;let n=x.keywordPatternRe.exec(A),t="" -;for(;n;){t+=A.substring(e,n.index) -;const i=w.case_insensitive?n[0].toLowerCase():n[0],r=(a=i,x.keywords[a]);if(r){ -const[e,a]=r -;if(S.addText(t),t="",l[i]=(l[i]||0)+1,l[i]<=7&&(C+=a),e.startsWith("_"))t+=n[0];else{ -const t=w.classNameAliases[e]||e;g(n[0],t)}}else t+=n[0] -;e=x.keywordPatternRe.lastIndex,n=x.keywordPatternRe.exec(A)}var a -;t+=A.substring(e),S.addText(t)}function d(){null!=x.subLanguage?(()=>{ -if(""===A)return;let e=null;if("string"==typeof x.subLanguage){ -if(!a[x.subLanguage])return void S.addText(A) -;e=f(x.subLanguage,A,!0,M[x.subLanguage]),M[x.subLanguage]=e._top -}else e=E(A,x.subLanguage.length?x.subLanguage:null) -;x.relevance>0&&(C+=e.relevance),S.__addSublanguage(e._emitter,e.language) -})():c(),A=""}function g(e,n){ -""!==e&&(S.startScope(n),S.addText(e),S.endScope())}function u(e,n){let t=1 -;const a=n.length-1;for(;t<=a;){if(!e._emit[t]){t++;continue} -const a=w.classNameAliases[e[t]]||e[t],i=n[t];a?g(i,a):(A=i,c(),A=""),t++}} -function b(e,n){ -return e.scope&&"string"==typeof e.scope&&S.openNode(w.classNameAliases[e.scope]||e.scope), -e.beginScope&&(e.beginScope._wrap?(g(A,w.classNameAliases[e.beginScope._wrap]||e.beginScope._wrap), -A=""):e.beginScope._multi&&(u(e.beginScope,n),A="")),x=Object.create(e,{parent:{ -value:x}}),x}function m(e,t,a){let i=((e,n)=>{const t=e&&e.exec(n) -;return t&&0===t.index})(e.endRe,a);if(i){if(e["on:end"]){const a=new n(e) -;e["on:end"](t,a),a.isMatchIgnored&&(i=!1)}if(i){ -for(;e.endsParent&&e.parent;)e=e.parent;return e}} -if(e.endsWithParent)return m(e.parent,t,a)}function _(e){ -return 0===x.matcher.regexIndex?(A+=e[0],1):(D=!0,0)}function h(e){ -const n=e[0],a=t.substring(e.index),i=m(x,e,a);if(!i)return ee;const r=x -;x.endScope&&x.endScope._wrap?(d(), -g(n,x.endScope._wrap)):x.endScope&&x.endScope._multi?(d(), -u(x.endScope,e)):r.skip?A+=n:(r.returnEnd||r.excludeEnd||(A+=n), -d(),r.excludeEnd&&(A=n));do{ -x.scope&&S.closeNode(),x.skip||x.subLanguage||(C+=x.relevance),x=x.parent -}while(x!==i.parent);return i.starts&&b(i.starts,e),r.returnEnd?0:n.length} -let y={};function N(a,r){const o=r&&r[0];if(A+=a,null==o)return d(),0 -;if("begin"===y.type&&"end"===r.type&&y.index===r.index&&""===o){ -if(A+=t.slice(r.index,r.index+1),!s){const n=Error(`0 width match regex (${e})`) -;throw n.languageName=e,n.badRule=y.rule,n}return 1} -if(y=r,"begin"===r.type)return(e=>{ -const t=e[0],a=e.rule,i=new n(a),r=[a.__beforeBegin,a["on:begin"]] -;for(const n of r)if(n&&(n(e,i),i.isMatchIgnored))return _(t) -;return a.skip?A+=t:(a.excludeBegin&&(A+=t), -d(),a.returnBegin||a.excludeBegin||(A=t)),b(a,e),a.returnBegin?0:t.length})(r) -;if("illegal"===r.type&&!i){ -const e=Error('Illegal lexeme "'+o+'" for mode "'+(x.scope||"")+'"') -;throw e.mode=x,e}if("end"===r.type){const e=h(r);if(e!==ee)return e} -if("illegal"===r.type&&""===o)return 1 -;if(R>1e5&&R>3*r.index)throw Error("potential infinite loop, way more iterations than matches") -;return A+=o,o.length}const w=v(e) -;if(!w)throw K(o.replace("{}",e)),Error('Unknown language: "'+e+'"') -;const O=Q(w);let k="",x=r||O;const M={},S=new p.__emitter(p);(()=>{const e=[] -;for(let n=x;n!==w;n=n.parent)n.scope&&e.unshift(n.scope) -;e.forEach((e=>S.openNode(e)))})();let A="",C=0,T=0,R=0,D=!1;try{ -if(w.__emitTokens)w.__emitTokens(t,S);else{for(x.matcher.considerAll();;){ -R++,D?D=!1:x.matcher.considerAll(),x.matcher.lastIndex=T -;const e=x.matcher.exec(t);if(!e)break;const n=N(t.substring(T,e.index),e) -;T=e.index+n}N(t.substring(T))}return S.finalize(),k=S.toHTML(),{language:e, -value:k,relevance:C,illegal:!1,_emitter:S,_top:x}}catch(n){ -if(n.message&&n.message.includes("Illegal"))return{language:e,value:J(t), -illegal:!0,relevance:0,_illegalBy:{message:n.message,index:T, -context:t.slice(T-100,T+100),mode:n.mode,resultSoFar:k},_emitter:S};if(s)return{ -language:e,value:J(t),illegal:!1,relevance:0,errorRaised:n,_emitter:S,_top:x} -;throw n}}function E(e,n){n=n||p.languages||Object.keys(a);const t=(e=>{ -const n={value:J(e),illegal:!1,relevance:0,_top:c,_emitter:new p.__emitter(p)} -;return n._emitter.addText(e),n})(e),i=n.filter(v).filter(k).map((n=>f(n,e,!1))) -;i.unshift(t);const r=i.sort(((e,n)=>{ -if(e.relevance!==n.relevance)return n.relevance-e.relevance -;if(e.language&&n.language){if(v(e.language).supersetOf===n.language)return 1 -;if(v(n.language).supersetOf===e.language)return-1}return 0})),[s,o]=r,l=s -;return l.secondBest=o,l}function y(e){let n=null;const t=(e=>{ -let n=e.className+" ";n+=e.parentNode?e.parentNode.className:"" -;const t=p.languageDetectRe.exec(n);if(t){const n=v(t[1]) -;return n||(H(o.replace("{}",t[1])), -H("Falling back to no-highlight mode for this block.",e)),n?t[1]:"no-highlight"} -return n.split(/\s+/).find((e=>_(e)||v(e)))})(e);if(_(t))return -;if(x("before:highlightElement",{el:e,language:t -}),e.dataset.highlighted)return void console.log("Element previously highlighted. To highlight again, first unset `dataset.highlighted`.",e) -;if(e.children.length>0&&(p.ignoreUnescapedHTML||(console.warn("One of your code blocks includes unescaped HTML. This is a potentially serious security risk."), -console.warn("https://github.com/highlightjs/highlight.js/wiki/security"), -console.warn("The element with unescaped HTML:"), -console.warn(e)),p.throwUnescapedHTML))throw new V("One of your code blocks includes unescaped HTML.",e.innerHTML) -;n=e;const a=n.textContent,r=t?h(a,{language:t,ignoreIllegals:!0}):E(a) -;e.innerHTML=r.value,e.dataset.highlighted="yes",((e,n,t)=>{const a=n&&i[n]||t -;e.classList.add("hljs"),e.classList.add("language-"+a) -})(e,t,r.language),e.result={language:r.language,re:r.relevance, -relevance:r.relevance},r.secondBest&&(e.secondBest={ -language:r.secondBest.language,relevance:r.secondBest.relevance -}),x("after:highlightElement",{el:e,result:r,text:a})}let N=!1;function w(){ -"loading"!==document.readyState?document.querySelectorAll(p.cssSelector).forEach(y):N=!0 -}function v(e){return e=(e||"").toLowerCase(),a[e]||a[i[e]]} -function O(e,{languageName:n}){"string"==typeof e&&(e=[e]),e.forEach((e=>{ -i[e.toLowerCase()]=n}))}function k(e){const n=v(e) -;return n&&!n.disableAutodetect}function x(e,n){const t=e;r.forEach((e=>{ -e[t]&&e[t](n)}))} -"undefined"!=typeof window&&window.addEventListener&&window.addEventListener("DOMContentLoaded",(()=>{ -N&&w()}),!1),Object.assign(t,{highlight:h,highlightAuto:E,highlightAll:w, -highlightElement:y, -highlightBlock:e=>(q("10.7.0","highlightBlock will be removed entirely in v12.0"), -q("10.7.0","Please use highlightElement now."),y(e)),configure:e=>{p=Y(p,e)}, -initHighlighting:()=>{ -w(),q("10.6.0","initHighlighting() deprecated. Use highlightAll() now.")}, -initHighlightingOnLoad:()=>{ -w(),q("10.6.0","initHighlightingOnLoad() deprecated. Use highlightAll() now.") -},registerLanguage:(e,n)=>{let i=null;try{i=n(t)}catch(n){ -if(K("Language definition for '{}' could not be registered.".replace("{}",e)), -!s)throw n;K(n),i=c} -i.name||(i.name=e),a[e]=i,i.rawDefinition=n.bind(null,t),i.aliases&&O(i.aliases,{ -languageName:e})},unregisterLanguage:e=>{delete a[e] -;for(const n of Object.keys(i))i[n]===e&&delete i[n]}, -listLanguages:()=>Object.keys(a),getLanguage:v,registerAliases:O, -autoDetection:k,inherit:Y,addPlugin:e=>{(e=>{ -e["before:highlightBlock"]&&!e["before:highlightElement"]&&(e["before:highlightElement"]=n=>{ -e["before:highlightBlock"](Object.assign({block:n.el},n)) -}),e["after:highlightBlock"]&&!e["after:highlightElement"]&&(e["after:highlightElement"]=n=>{ -e["after:highlightBlock"](Object.assign({block:n.el},n))})})(e),r.push(e)}, -removePlugin:e=>{const n=r.indexOf(e);-1!==n&&r.splice(n,1)}}),t.debugMode=()=>{ -s=!1},t.safeMode=()=>{s=!0},t.versionString="11.9.0",t.regex={concat:b, -lookahead:d,either:m,optional:u,anyNumberOfTimes:g} -;for(const n in C)"object"==typeof C[n]&&e(C[n]);return Object.assign(t,C),t -},te=ne({});te.newInstance=()=>ne({});var ae=te;const ie=e=>({IMPORTANT:{ -scope:"meta",begin:"!important"},BLOCK_COMMENT:e.C_BLOCK_COMMENT_MODE,HEXCOLOR:{ -scope:"number",begin:/#(([0-9a-fA-F]{3,4})|(([0-9a-fA-F]{2}){3,4}))\b/}, -FUNCTION_DISPATCH:{className:"built_in",begin:/[\w-]+(?=\()/}, -ATTRIBUTE_SELECTOR_MODE:{scope:"selector-attr",begin:/\[/,end:/\]/,illegal:"$", -contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},CSS_NUMBER_MODE:{ -scope:"number", -begin:e.NUMBER_RE+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?", -relevance:0},CSS_VARIABLE:{className:"attr",begin:/--[A-Za-z_][A-Za-z0-9_-]*/} -}),re=["a","abbr","address","article","aside","audio","b","blockquote","body","button","canvas","caption","cite","code","dd","del","details","dfn","div","dl","dt","em","fieldset","figcaption","figure","footer","form","h1","h2","h3","h4","h5","h6","header","hgroup","html","i","iframe","img","input","ins","kbd","label","legend","li","main","mark","menu","nav","object","ol","p","q","quote","samp","section","span","strong","summary","sup","table","tbody","td","textarea","tfoot","th","thead","time","tr","ul","var","video"],se=["any-hover","any-pointer","aspect-ratio","color","color-gamut","color-index","device-aspect-ratio","device-height","device-width","display-mode","forced-colors","grid","height","hover","inverted-colors","monochrome","orientation","overflow-block","overflow-inline","pointer","prefers-color-scheme","prefers-contrast","prefers-reduced-motion","prefers-reduced-transparency","resolution","scan","scripting","update","width","min-width","max-width","min-height","max-height"],oe=["active","any-link","blank","checked","current","default","defined","dir","disabled","drop","empty","enabled","first","first-child","first-of-type","fullscreen","future","focus","focus-visible","focus-within","has","host","host-context","hover","indeterminate","in-range","invalid","is","lang","last-child","last-of-type","left","link","local-link","not","nth-child","nth-col","nth-last-child","nth-last-col","nth-last-of-type","nth-of-type","only-child","only-of-type","optional","out-of-range","past","placeholder-shown","read-only","read-write","required","right","root","scope","target","target-within","user-invalid","valid","visited","where"],le=["after","backdrop","before","cue","cue-region","first-letter","first-line","grammar-error","marker","part","placeholder","selection","slotted","spelling-error"],ce=["align-content","align-items","align-self","all","animation","animation-delay","animation-direction","animation-duration","animation-fill-mode","animation-iteration-count","animation-name","animation-play-state","animation-timing-function","backface-visibility","background","background-attachment","background-blend-mode","background-clip","background-color","background-image","background-origin","background-position","background-repeat","background-size","block-size","border","border-block","border-block-color","border-block-end","border-block-end-color","border-block-end-style","border-block-end-width","border-block-start","border-block-start-color","border-block-start-style","border-block-start-width","border-block-style","border-block-width","border-bottom","border-bottom-color","border-bottom-left-radius","border-bottom-right-radius","border-bottom-style","border-bottom-width","border-collapse","border-color","border-image","border-image-outset","border-image-repeat","border-image-slice","border-image-source","border-image-width","border-inline","border-inline-color","border-inline-end","border-inline-end-color","border-inline-end-style","border-inline-end-width","border-inline-start","border-inline-start-color","border-inline-start-style","border-inline-start-width","border-inline-style","border-inline-width","border-left","border-left-color","border-left-style","border-left-width","border-radius","border-right","border-right-color","border-right-style","border-right-width","border-spacing","border-style","border-top","border-top-color","border-top-left-radius","border-top-right-radius","border-top-style","border-top-width","border-width","bottom","box-decoration-break","box-shadow","box-sizing","break-after","break-before","break-inside","caption-side","caret-color","clear","clip","clip-path","clip-rule","color","column-count","column-fill","column-gap","column-rule","column-rule-color","column-rule-style","column-rule-width","column-span","column-width","columns","contain","content","content-visibility","counter-increment","counter-reset","cue","cue-after","cue-before","cursor","direction","display","empty-cells","filter","flex","flex-basis","flex-direction","flex-flow","flex-grow","flex-shrink","flex-wrap","float","flow","font","font-display","font-family","font-feature-settings","font-kerning","font-language-override","font-size","font-size-adjust","font-smoothing","font-stretch","font-style","font-synthesis","font-variant","font-variant-caps","font-variant-east-asian","font-variant-ligatures","font-variant-numeric","font-variant-position","font-variation-settings","font-weight","gap","glyph-orientation-vertical","grid","grid-area","grid-auto-columns","grid-auto-flow","grid-auto-rows","grid-column","grid-column-end","grid-column-start","grid-gap","grid-row","grid-row-end","grid-row-start","grid-template","grid-template-areas","grid-template-columns","grid-template-rows","hanging-punctuation","height","hyphens","icon","image-orientation","image-rendering","image-resolution","ime-mode","inline-size","isolation","justify-content","left","letter-spacing","line-break","line-height","list-style","list-style-image","list-style-position","list-style-type","margin","margin-block","margin-block-end","margin-block-start","margin-bottom","margin-inline","margin-inline-end","margin-inline-start","margin-left","margin-right","margin-top","marks","mask","mask-border","mask-border-mode","mask-border-outset","mask-border-repeat","mask-border-slice","mask-border-source","mask-border-width","mask-clip","mask-composite","mask-image","mask-mode","mask-origin","mask-position","mask-repeat","mask-size","mask-type","max-block-size","max-height","max-inline-size","max-width","min-block-size","min-height","min-inline-size","min-width","mix-blend-mode","nav-down","nav-index","nav-left","nav-right","nav-up","none","normal","object-fit","object-position","opacity","order","orphans","outline","outline-color","outline-offset","outline-style","outline-width","overflow","overflow-wrap","overflow-x","overflow-y","padding","padding-block","padding-block-end","padding-block-start","padding-bottom","padding-inline","padding-inline-end","padding-inline-start","padding-left","padding-right","padding-top","page-break-after","page-break-before","page-break-inside","pause","pause-after","pause-before","perspective","perspective-origin","pointer-events","position","quotes","resize","rest","rest-after","rest-before","right","row-gap","scroll-margin","scroll-margin-block","scroll-margin-block-end","scroll-margin-block-start","scroll-margin-bottom","scroll-margin-inline","scroll-margin-inline-end","scroll-margin-inline-start","scroll-margin-left","scroll-margin-right","scroll-margin-top","scroll-padding","scroll-padding-block","scroll-padding-block-end","scroll-padding-block-start","scroll-padding-bottom","scroll-padding-inline","scroll-padding-inline-end","scroll-padding-inline-start","scroll-padding-left","scroll-padding-right","scroll-padding-top","scroll-snap-align","scroll-snap-stop","scroll-snap-type","scrollbar-color","scrollbar-gutter","scrollbar-width","shape-image-threshold","shape-margin","shape-outside","speak","speak-as","src","tab-size","table-layout","text-align","text-align-all","text-align-last","text-combine-upright","text-decoration","text-decoration-color","text-decoration-line","text-decoration-style","text-emphasis","text-emphasis-color","text-emphasis-position","text-emphasis-style","text-indent","text-justify","text-orientation","text-overflow","text-rendering","text-shadow","text-transform","text-underline-position","top","transform","transform-box","transform-origin","transform-style","transition","transition-delay","transition-duration","transition-property","transition-timing-function","unicode-bidi","vertical-align","visibility","voice-balance","voice-duration","voice-family","voice-pitch","voice-range","voice-rate","voice-stress","voice-volume","white-space","widows","width","will-change","word-break","word-spacing","word-wrap","writing-mode","z-index"].reverse(),de=oe.concat(le) -;var ge="[0-9](_*[0-9])*",ue=`\\.(${ge})`,be="[0-9a-fA-F](_*[0-9a-fA-F])*",me={ -className:"number",variants:[{ -begin:`(\\b(${ge})((${ue})|\\.)?|(${ue}))[eE][+-]?(${ge})[fFdD]?\\b`},{ -begin:`\\b(${ge})((${ue})[fFdD]?\\b|\\.([fFdD]\\b)?)`},{ -begin:`(${ue})[fFdD]?\\b`},{begin:`\\b(${ge})[fFdD]\\b`},{ -begin:`\\b0[xX]((${be})\\.?|(${be})?\\.(${be}))[pP][+-]?(${ge})[fFdD]?\\b`},{ -begin:"\\b(0|[1-9](_*[0-9])*)[lL]?\\b"},{begin:`\\b0[xX](${be})[lL]?\\b`},{ -begin:"\\b0(_*[0-7])*[lL]?\\b"},{begin:"\\b0[bB][01](_*[01])*[lL]?\\b"}], -relevance:0};function pe(e,n,t){return-1===t?"":e.replace(n,(a=>pe(e,n,t-1)))} -const _e="[A-Za-z$_][0-9A-Za-z$_]*",he=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],fe=["true","false","null","undefined","NaN","Infinity"],Ee=["Object","Function","Boolean","Symbol","Math","Date","Number","BigInt","String","RegExp","Array","Float32Array","Float64Array","Int8Array","Uint8Array","Uint8ClampedArray","Int16Array","Int32Array","Uint16Array","Uint32Array","BigInt64Array","BigUint64Array","Set","Map","WeakSet","WeakMap","ArrayBuffer","SharedArrayBuffer","Atomics","DataView","JSON","Promise","Generator","GeneratorFunction","AsyncFunction","Reflect","Proxy","Intl","WebAssembly"],ye=["Error","EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"],Ne=["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],we=["arguments","this","super","console","window","document","localStorage","sessionStorage","module","global"],ve=[].concat(Ne,Ee,ye) -;function Oe(e){const n=e.regex,t=_e,a={begin:/<[A-Za-z0-9\\._:-]+/, -end:/\/[A-Za-z0-9\\._:-]+>|\/>/,isTrulyOpeningTag:(e,n)=>{ -const t=e[0].length+e.index,a=e.input[t] -;if("<"===a||","===a)return void n.ignoreMatch();let i -;">"===a&&(((e,{after:n})=>{const t="",M={ -match:[/const|var|let/,/\s+/,t,/\s*/,/=\s*/,/(async\s*)?/,n.lookahead(x)], -keywords:"async",className:{1:"keyword",3:"title.function"},contains:[f]} -;return{name:"JavaScript",aliases:["js","jsx","mjs","cjs"],keywords:i,exports:{ -PARAMS_CONTAINS:h,CLASS_REFERENCE:y},illegal:/#(?![$_A-z])/, -contains:[e.SHEBANG({label:"shebang",binary:"node",relevance:5}),{ -label:"use_strict",className:"meta",relevance:10, -begin:/^\s*['"]use (strict|asm)['"]/ -},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,d,g,u,b,m,{match:/\$\d+/},l,y,{ -className:"attr",begin:t+n.lookahead(":"),relevance:0},M,{ -begin:"("+e.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*", -keywords:"return throw case",relevance:0,contains:[m,e.REGEXP_MODE,{ -className:"function",begin:x,returnBegin:!0,end:"\\s*=>",contains:[{ -className:"params",variants:[{begin:e.UNDERSCORE_IDENT_RE,relevance:0},{ -className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0, -excludeEnd:!0,keywords:i,contains:h}]}]},{begin:/,/,relevance:0},{match:/\s+/, -relevance:0},{variants:[{begin:"<>",end:""},{ -match:/<[A-Za-z0-9\\._:-]+\s*\/>/},{begin:a.begin, -"on:begin":a.isTrulyOpeningTag,end:a.end}],subLanguage:"xml",contains:[{ -begin:a.begin,end:a.end,skip:!0,contains:["self"]}]}]},N,{ -beginKeywords:"while if switch catch for"},{ -begin:"\\b(?!function)"+e.UNDERSCORE_IDENT_RE+"\\([^()]*(\\([^()]*(\\([^()]*\\)[^()]*)*\\)[^()]*)*\\)\\s*\\{", -returnBegin:!0,label:"func.def",contains:[f,e.inherit(e.TITLE_MODE,{begin:t, -className:"title.function"})]},{match:/\.\.\./,relevance:0},O,{match:"\\$"+t, -relevance:0},{match:[/\bconstructor(?=\s*\()/],className:{1:"title.function"}, -contains:[f]},w,{relevance:0,match:/\b[A-Z][A-Z_0-9]+\b/, -className:"variable.constant"},E,k,{match:/\$[(.]/}]}} -const ke=e=>b(/\b/,e,/\w$/.test(e)?/\b/:/\B/),xe=["Protocol","Type"].map(ke),Me=["init","self"].map(ke),Se=["Any","Self"],Ae=["actor","any","associatedtype","async","await",/as\?/,/as!/,"as","borrowing","break","case","catch","class","consume","consuming","continue","convenience","copy","default","defer","deinit","didSet","distributed","do","dynamic","each","else","enum","extension","fallthrough",/fileprivate\(set\)/,"fileprivate","final","for","func","get","guard","if","import","indirect","infix",/init\?/,/init!/,"inout",/internal\(set\)/,"internal","in","is","isolated","nonisolated","lazy","let","macro","mutating","nonmutating",/open\(set\)/,"open","operator","optional","override","postfix","precedencegroup","prefix",/private\(set\)/,"private","protocol",/public\(set\)/,"public","repeat","required","rethrows","return","set","some","static","struct","subscript","super","switch","throws","throw",/try\?/,/try!/,"try","typealias",/unowned\(safe\)/,/unowned\(unsafe\)/,"unowned","var","weak","where","while","willSet"],Ce=["false","nil","true"],Te=["assignment","associativity","higherThan","left","lowerThan","none","right"],Re=["#colorLiteral","#column","#dsohandle","#else","#elseif","#endif","#error","#file","#fileID","#fileLiteral","#filePath","#function","#if","#imageLiteral","#keyPath","#line","#selector","#sourceLocation","#warning"],De=["abs","all","any","assert","assertionFailure","debugPrint","dump","fatalError","getVaList","isKnownUniquelyReferenced","max","min","numericCast","pointwiseMax","pointwiseMin","precondition","preconditionFailure","print","readLine","repeatElement","sequence","stride","swap","swift_unboxFromSwiftValueWithType","transcode","type","unsafeBitCast","unsafeDowncast","withExtendedLifetime","withUnsafeMutablePointer","withUnsafePointer","withVaList","withoutActuallyEscaping","zip"],Ie=m(/[/=\-+!*%<>&|^~?]/,/[\u00A1-\u00A7]/,/[\u00A9\u00AB]/,/[\u00AC\u00AE]/,/[\u00B0\u00B1]/,/[\u00B6\u00BB\u00BF\u00D7\u00F7]/,/[\u2016-\u2017]/,/[\u2020-\u2027]/,/[\u2030-\u203E]/,/[\u2041-\u2053]/,/[\u2055-\u205E]/,/[\u2190-\u23FF]/,/[\u2500-\u2775]/,/[\u2794-\u2BFF]/,/[\u2E00-\u2E7F]/,/[\u3001-\u3003]/,/[\u3008-\u3020]/,/[\u3030]/),Le=m(Ie,/[\u0300-\u036F]/,/[\u1DC0-\u1DFF]/,/[\u20D0-\u20FF]/,/[\uFE00-\uFE0F]/,/[\uFE20-\uFE2F]/),Be=b(Ie,Le,"*"),$e=m(/[a-zA-Z_]/,/[\u00A8\u00AA\u00AD\u00AF\u00B2-\u00B5\u00B7-\u00BA]/,/[\u00BC-\u00BE\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF]/,/[\u0100-\u02FF\u0370-\u167F\u1681-\u180D\u180F-\u1DBF]/,/[\u1E00-\u1FFF]/,/[\u200B-\u200D\u202A-\u202E\u203F-\u2040\u2054\u2060-\u206F]/,/[\u2070-\u20CF\u2100-\u218F\u2460-\u24FF\u2776-\u2793]/,/[\u2C00-\u2DFF\u2E80-\u2FFF]/,/[\u3004-\u3007\u3021-\u302F\u3031-\u303F\u3040-\uD7FF]/,/[\uF900-\uFD3D\uFD40-\uFDCF\uFDF0-\uFE1F\uFE30-\uFE44]/,/[\uFE47-\uFEFE\uFF00-\uFFFD]/),ze=m($e,/\d/,/[\u0300-\u036F\u1DC0-\u1DFF\u20D0-\u20FF\uFE20-\uFE2F]/),Fe=b($e,ze,"*"),Ue=b(/[A-Z]/,ze,"*"),je=["attached","autoclosure",b(/convention\(/,m("swift","block","c"),/\)/),"discardableResult","dynamicCallable","dynamicMemberLookup","escaping","freestanding","frozen","GKInspectable","IBAction","IBDesignable","IBInspectable","IBOutlet","IBSegueAction","inlinable","main","nonobjc","NSApplicationMain","NSCopying","NSManaged",b(/objc\(/,Fe,/\)/),"objc","objcMembers","propertyWrapper","requires_stored_property_inits","resultBuilder","Sendable","testable","UIApplicationMain","unchecked","unknown","usableFromInline","warn_unqualified_access"],Pe=["iOS","iOSApplicationExtension","macOS","macOSApplicationExtension","macCatalyst","macCatalystApplicationExtension","watchOS","watchOSApplicationExtension","tvOS","tvOSApplicationExtension","swift"] -;var Ke=Object.freeze({__proto__:null,grmr_bash:e=>{const n=e.regex,t={},a={ -begin:/\$\{/,end:/\}/,contains:["self",{begin:/:-/,contains:[t]}]} -;Object.assign(t,{className:"variable",variants:[{ -begin:n.concat(/\$[\w\d#@][\w\d_]*/,"(?![\\w\\d])(?![$])")},a]});const i={ -className:"subst",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]},r={ -begin:/<<-?\s*(?=\w+)/,starts:{contains:[e.END_SAME_AS_BEGIN({begin:/(\w+)/, -end:/(\w+)/,className:"string"})]}},s={className:"string",begin:/"/,end:/"/, -contains:[e.BACKSLASH_ESCAPE,t,i]};i.contains.push(s);const o={begin:/\$?\(\(/, -end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},e.NUMBER_MODE,t] -},l=e.SHEBANG({binary:"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)",relevance:10 -}),c={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0, -contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{ -name:"Bash",aliases:["sh"],keywords:{$pattern:/\b[a-z][a-z0-9._-]+\b/, -keyword:["if","then","else","elif","fi","for","while","until","in","do","done","case","esac","function","select"], -literal:["true","false"], -built_in:["break","cd","continue","eval","exec","exit","export","getopts","hash","pwd","readonly","return","shift","test","times","trap","umask","unset","alias","bind","builtin","caller","command","declare","echo","enable","help","let","local","logout","mapfile","printf","read","readarray","source","type","typeset","ulimit","unalias","set","shopt","autoload","bg","bindkey","bye","cap","chdir","clone","comparguments","compcall","compctl","compdescribe","compfiles","compgroups","compquote","comptags","comptry","compvalues","dirs","disable","disown","echotc","echoti","emulate","fc","fg","float","functions","getcap","getln","history","integer","jobs","kill","limit","log","noglob","popd","print","pushd","pushln","rehash","sched","setcap","setopt","stat","suspend","ttyctl","unfunction","unhash","unlimit","unsetopt","vared","wait","whence","where","which","zcompile","zformat","zftp","zle","zmodload","zparseopts","zprof","zpty","zregexparse","zsocket","zstyle","ztcp","chcon","chgrp","chown","chmod","cp","dd","df","dir","dircolors","ln","ls","mkdir","mkfifo","mknod","mktemp","mv","realpath","rm","rmdir","shred","sync","touch","truncate","vdir","b2sum","base32","base64","cat","cksum","comm","csplit","cut","expand","fmt","fold","head","join","md5sum","nl","numfmt","od","paste","ptx","pr","sha1sum","sha224sum","sha256sum","sha384sum","sha512sum","shuf","sort","split","sum","tac","tail","tr","tsort","unexpand","uniq","wc","arch","basename","chroot","date","dirname","du","echo","env","expr","factor","groups","hostid","id","link","logname","nice","nohup","nproc","pathchk","pinky","printenv","printf","pwd","readlink","runcon","seq","sleep","stat","stdbuf","stty","tee","test","timeout","tty","uname","unlink","uptime","users","who","whoami","yes"] -},contains:[l,e.SHEBANG(),c,o,e.HASH_COMMENT_MODE,r,{match:/(\/[a-z._-]+)+/},s,{ -match:/\\"/},{className:"string",begin:/'/,end:/'/},{match:/\\'/},t]}}, -grmr_c:e=>{const n=e.regex,t=e.COMMENT("//","$",{contains:[{begin:/\\\n/}] -}),a="decltype\\(auto\\)",i="[a-zA-Z_]\\w*::",r="("+a+"|"+n.optional(i)+"[a-zA-Z_]\\w*"+n.optional("<[^<>]+>")+")",s={ -className:"type",variants:[{begin:"\\b[a-z\\d_]*_t\\b"},{ -match:/\batomic_[a-z]{3,6}\b/}]},o={className:"string",variants:[{ -begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{ -begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)", -end:"'",illegal:"."},e.END_SAME_AS_BEGIN({ -begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},l={ -className:"number",variants:[{begin:"\\b(0b[01']+)"},{ -begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)((ll|LL|l|L)(u|U)?|(u|U)(ll|LL|l|L)?|f|F|b|B)" -},{ -begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)" -}],relevance:0},c={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{ -keyword:"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include" -},contains:[{begin:/\\\n/,relevance:0},e.inherit(o,{className:"string"}),{ -className:"string",begin:/<.*?>/},t,e.C_BLOCK_COMMENT_MODE]},d={ -className:"title",begin:n.optional(i)+e.IDENT_RE,relevance:0 -},g=n.optional(i)+e.IDENT_RE+"\\s*\\(",u={ -keyword:["asm","auto","break","case","continue","default","do","else","enum","extern","for","fortran","goto","if","inline","register","restrict","return","sizeof","struct","switch","typedef","union","volatile","while","_Alignas","_Alignof","_Atomic","_Generic","_Noreturn","_Static_assert","_Thread_local","alignas","alignof","noreturn","static_assert","thread_local","_Pragma"], -type:["float","double","signed","unsigned","int","short","long","char","void","_Bool","_Complex","_Imaginary","_Decimal32","_Decimal64","_Decimal128","const","static","complex","bool","imaginary"], -literal:"true false NULL", -built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr" -},b=[c,s,t,e.C_BLOCK_COMMENT_MODE,l,o],m={variants:[{begin:/=/,end:/;/},{ -begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}], -keywords:u,contains:b.concat([{begin:/\(/,end:/\)/,keywords:u, -contains:b.concat(["self"]),relevance:0}]),relevance:0},p={ -begin:"("+r+"[\\*&\\s]+)+"+g,returnBegin:!0,end:/[{;=]/,excludeEnd:!0, -keywords:u,illegal:/[^\w\s\*&:<>.]/,contains:[{begin:a,keywords:u,relevance:0},{ -begin:g,returnBegin:!0,contains:[e.inherit(d,{className:"title.function"})], -relevance:0},{relevance:0,match:/,/},{className:"params",begin:/\(/,end:/\)/, -keywords:u,relevance:0,contains:[t,e.C_BLOCK_COMMENT_MODE,o,l,s,{begin:/\(/, -end:/\)/,keywords:u,relevance:0,contains:["self",t,e.C_BLOCK_COMMENT_MODE,o,l,s] -}]},s,t,e.C_BLOCK_COMMENT_MODE,c]};return{name:"C",aliases:["h"],keywords:u, -disableAutodetect:!0,illegal:"=]/,contains:[{ -beginKeywords:"final class struct"},e.TITLE_MODE]}]),exports:{preprocessor:c, -strings:o,keywords:u}}},grmr_cpp:e=>{const n=e.regex,t=e.COMMENT("//","$",{ -contains:[{begin:/\\\n/}] -}),a="decltype\\(auto\\)",i="[a-zA-Z_]\\w*::",r="(?!struct)("+a+"|"+n.optional(i)+"[a-zA-Z_]\\w*"+n.optional("<[^<>]+>")+")",s={ -className:"type",begin:"\\b[a-z\\d_]*_t\\b"},o={className:"string",variants:[{ -begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{ -begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)", -end:"'",illegal:"."},e.END_SAME_AS_BEGIN({ -begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},l={ -className:"number",variants:[{begin:"\\b(0b[01']+)"},{ -begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)((ll|LL|l|L)(u|U)?|(u|U)(ll|LL|l|L)?|f|F|b|B)" -},{ -begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)" -}],relevance:0},c={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{ -keyword:"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include" -},contains:[{begin:/\\\n/,relevance:0},e.inherit(o,{className:"string"}),{ -className:"string",begin:/<.*?>/},t,e.C_BLOCK_COMMENT_MODE]},d={ -className:"title",begin:n.optional(i)+e.IDENT_RE,relevance:0 -},g=n.optional(i)+e.IDENT_RE+"\\s*\\(",u={ -type:["bool","char","char16_t","char32_t","char8_t","double","float","int","long","short","void","wchar_t","unsigned","signed","const","static"], -keyword:["alignas","alignof","and","and_eq","asm","atomic_cancel","atomic_commit","atomic_noexcept","auto","bitand","bitor","break","case","catch","class","co_await","co_return","co_yield","compl","concept","const_cast|10","consteval","constexpr","constinit","continue","decltype","default","delete","do","dynamic_cast|10","else","enum","explicit","export","extern","false","final","for","friend","goto","if","import","inline","module","mutable","namespace","new","noexcept","not","not_eq","nullptr","operator","or","or_eq","override","private","protected","public","reflexpr","register","reinterpret_cast|10","requires","return","sizeof","static_assert","static_cast|10","struct","switch","synchronized","template","this","thread_local","throw","transaction_safe","transaction_safe_dynamic","true","try","typedef","typeid","typename","union","using","virtual","volatile","while","xor","xor_eq"], -literal:["NULL","false","nullopt","nullptr","true"],built_in:["_Pragma"], -_type_hints:["any","auto_ptr","barrier","binary_semaphore","bitset","complex","condition_variable","condition_variable_any","counting_semaphore","deque","false_type","future","imaginary","initializer_list","istringstream","jthread","latch","lock_guard","multimap","multiset","mutex","optional","ostringstream","packaged_task","pair","promise","priority_queue","queue","recursive_mutex","recursive_timed_mutex","scoped_lock","set","shared_future","shared_lock","shared_mutex","shared_timed_mutex","shared_ptr","stack","string_view","stringstream","timed_mutex","thread","true_type","tuple","unique_lock","unique_ptr","unordered_map","unordered_multimap","unordered_multiset","unordered_set","variant","vector","weak_ptr","wstring","wstring_view"] -},b={className:"function.dispatch",relevance:0,keywords:{ -_hint:["abort","abs","acos","apply","as_const","asin","atan","atan2","calloc","ceil","cerr","cin","clog","cos","cosh","cout","declval","endl","exchange","exit","exp","fabs","floor","fmod","forward","fprintf","fputs","free","frexp","fscanf","future","invoke","isalnum","isalpha","iscntrl","isdigit","isgraph","islower","isprint","ispunct","isspace","isupper","isxdigit","labs","launder","ldexp","log","log10","make_pair","make_shared","make_shared_for_overwrite","make_tuple","make_unique","malloc","memchr","memcmp","memcpy","memset","modf","move","pow","printf","putchar","puts","realloc","scanf","sin","sinh","snprintf","sprintf","sqrt","sscanf","std","stderr","stdin","stdout","strcat","strchr","strcmp","strcpy","strcspn","strlen","strncat","strncmp","strncpy","strpbrk","strrchr","strspn","strstr","swap","tan","tanh","terminate","to_underlying","tolower","toupper","vfprintf","visit","vprintf","vsprintf"] -}, -begin:n.concat(/\b/,/(?!decltype)/,/(?!if)/,/(?!for)/,/(?!switch)/,/(?!while)/,e.IDENT_RE,n.lookahead(/(<[^<>]+>|)\s*\(/)) -},m=[b,c,s,t,e.C_BLOCK_COMMENT_MODE,l,o],p={variants:[{begin:/=/,end:/;/},{ -begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}], -keywords:u,contains:m.concat([{begin:/\(/,end:/\)/,keywords:u, -contains:m.concat(["self"]),relevance:0}]),relevance:0},_={className:"function", -begin:"("+r+"[\\*&\\s]+)+"+g,returnBegin:!0,end:/[{;=]/,excludeEnd:!0, -keywords:u,illegal:/[^\w\s\*&:<>.]/,contains:[{begin:a,keywords:u,relevance:0},{ -begin:g,returnBegin:!0,contains:[d],relevance:0},{begin:/::/,relevance:0},{ -begin:/:/,endsWithParent:!0,contains:[o,l]},{relevance:0,match:/,/},{ -className:"params",begin:/\(/,end:/\)/,keywords:u,relevance:0, -contains:[t,e.C_BLOCK_COMMENT_MODE,o,l,s,{begin:/\(/,end:/\)/,keywords:u, -relevance:0,contains:["self",t,e.C_BLOCK_COMMENT_MODE,o,l,s]}] -},s,t,e.C_BLOCK_COMMENT_MODE,c]};return{name:"C++", -aliases:["cc","c++","h++","hpp","hh","hxx","cxx"],keywords:u,illegal:"",keywords:u,contains:["self",s]},{begin:e.IDENT_RE+"::",keywords:u},{ -match:[/\b(?:enum(?:\s+(?:class|struct))?|class|struct|union)/,/\s+/,/\w+/], -className:{1:"keyword",3:"title.class"}}])}},grmr_csharp:e=>{const n={ -keyword:["abstract","as","base","break","case","catch","class","const","continue","do","else","event","explicit","extern","finally","fixed","for","foreach","goto","if","implicit","in","interface","internal","is","lock","namespace","new","operator","out","override","params","private","protected","public","readonly","record","ref","return","scoped","sealed","sizeof","stackalloc","static","struct","switch","this","throw","try","typeof","unchecked","unsafe","using","virtual","void","volatile","while"].concat(["add","alias","and","ascending","async","await","by","descending","equals","from","get","global","group","init","into","join","let","nameof","not","notnull","on","or","orderby","partial","remove","select","set","unmanaged","value|0","var","when","where","with","yield"]), -built_in:["bool","byte","char","decimal","delegate","double","dynamic","enum","float","int","long","nint","nuint","object","sbyte","short","string","ulong","uint","ushort"], -literal:["default","false","null","true"]},t=e.inherit(e.TITLE_MODE,{ -begin:"[a-zA-Z](\\.?\\w)*"}),a={className:"number",variants:[{ -begin:"\\b(0b[01']+)"},{ -begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{ -begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)" -}],relevance:0},i={className:"string",begin:'@"',end:'"',contains:[{begin:'""'}] -},r=e.inherit(i,{illegal:/\n/}),s={className:"subst",begin:/\{/,end:/\}/, -keywords:n},o=e.inherit(s,{illegal:/\n/}),l={className:"string",begin:/\$"/, -end:'"',illegal:/\n/,contains:[{begin:/\{\{/},{begin:/\}\}/ -},e.BACKSLASH_ESCAPE,o]},c={className:"string",begin:/\$@"/,end:'"',contains:[{ -begin:/\{\{/},{begin:/\}\}/},{begin:'""'},s]},d=e.inherit(c,{illegal:/\n/, -contains:[{begin:/\{\{/},{begin:/\}\}/},{begin:'""'},o]}) -;s.contains=[c,l,i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.C_BLOCK_COMMENT_MODE], -o.contains=[d,l,r,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.inherit(e.C_BLOCK_COMMENT_MODE,{ -illegal:/\n/})];const g={variants:[c,l,i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE] -},u={begin:"<",end:">",contains:[{beginKeywords:"in out"},t] -},b=e.IDENT_RE+"(<"+e.IDENT_RE+"(\\s*,\\s*"+e.IDENT_RE+")*>)?(\\[\\])?",m={ -begin:"@"+e.IDENT_RE,relevance:0};return{name:"C#",aliases:["cs","c#"], -keywords:n,illegal:/::/,contains:[e.COMMENT("///","$",{returnBegin:!0, -contains:[{className:"doctag",variants:[{begin:"///",relevance:0},{ -begin:"\x3c!--|--\x3e"},{begin:""}]}] -}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"meta",begin:"#", -end:"$",keywords:{ -keyword:"if else elif endif define undef warning error line region endregion pragma checksum" -}},g,a,{beginKeywords:"class interface",relevance:0,end:/[{;=]/, -illegal:/[^\s:,]/,contains:[{beginKeywords:"where class" -},t,u,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"namespace", -relevance:0,end:/[{;=]/,illegal:/[^\s:]/, -contains:[t,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{ -beginKeywords:"record",relevance:0,end:/[{;=]/,illegal:/[^\s:]/, -contains:[t,u,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"meta", -begin:"^\\s*\\[(?=[\\w])",excludeBegin:!0,end:"\\]",excludeEnd:!0,contains:[{ -className:"string",begin:/"/,end:/"/}]},{ -beginKeywords:"new return throw await else",relevance:0},{className:"function", -begin:"("+b+"\\s+)+"+e.IDENT_RE+"\\s*(<[^=]+>\\s*)?\\(",returnBegin:!0, -end:/\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{ -beginKeywords:"public private protected static internal protected abstract async extern override unsafe virtual new sealed partial", -relevance:0},{begin:e.IDENT_RE+"\\s*(<[^=]+>\\s*)?\\(",returnBegin:!0, -contains:[e.TITLE_MODE,u],relevance:0},{match:/\(\)/},{className:"params", -begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0, -contains:[g,a,e.C_BLOCK_COMMENT_MODE] -},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},m]}},grmr_css:e=>{ -const n=e.regex,t=ie(e),a=[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE];return{ -name:"CSS",case_insensitive:!0,illegal:/[=|'\$]/,keywords:{ -keyframePosition:"from to"},classNameAliases:{keyframePosition:"selector-tag"}, -contains:[t.BLOCK_COMMENT,{begin:/-(webkit|moz|ms|o)-(?=[a-z])/ -},t.CSS_NUMBER_MODE,{className:"selector-id",begin:/#[A-Za-z0-9_-]+/,relevance:0 -},{className:"selector-class",begin:"\\.[a-zA-Z-][a-zA-Z0-9_-]*",relevance:0 -},t.ATTRIBUTE_SELECTOR_MODE,{className:"selector-pseudo",variants:[{ -begin:":("+oe.join("|")+")"},{begin:":(:)?("+le.join("|")+")"}] -},t.CSS_VARIABLE,{className:"attribute",begin:"\\b("+ce.join("|")+")\\b"},{ -begin:/:/,end:/[;}{]/, -contains:[t.BLOCK_COMMENT,t.HEXCOLOR,t.IMPORTANT,t.CSS_NUMBER_MODE,...a,{ -begin:/(url|data-uri)\(/,end:/\)/,relevance:0,keywords:{built_in:"url data-uri" -},contains:[...a,{className:"string",begin:/[^)]/,endsWithParent:!0, -excludeEnd:!0}]},t.FUNCTION_DISPATCH]},{begin:n.lookahead(/@/),end:"[{;]", -relevance:0,illegal:/:/,contains:[{className:"keyword",begin:/@-?\w[\w]*(-\w+)*/ -},{begin:/\s/,endsWithParent:!0,excludeEnd:!0,relevance:0,keywords:{ -$pattern:/[a-z-]+/,keyword:"and or not only",attribute:se.join(" ")},contains:[{ -begin:/[a-z-]+(?=:)/,className:"attribute"},...a,t.CSS_NUMBER_MODE]}]},{ -className:"selector-tag",begin:"\\b("+re.join("|")+")\\b"}]}},grmr_diff:e=>{ -const n=e.regex;return{name:"Diff",aliases:["patch"],contains:[{ -className:"meta",relevance:10, -match:n.either(/^@@ +-\d+,\d+ +\+\d+,\d+ +@@/,/^\*\*\* +\d+,\d+ +\*\*\*\*$/,/^--- +\d+,\d+ +----$/) -},{className:"comment",variants:[{ -begin:n.either(/Index: /,/^index/,/={3,}/,/^-{3}/,/^\*{3} /,/^\+{3}/,/^diff --git/), -end:/$/},{match:/^\*{15}$/}]},{className:"addition",begin:/^\+/,end:/$/},{ -className:"deletion",begin:/^-/,end:/$/},{className:"addition",begin:/^!/, -end:/$/}]}},grmr_go:e=>{const n={ -keyword:["break","case","chan","const","continue","default","defer","else","fallthrough","for","func","go","goto","if","import","interface","map","package","range","return","select","struct","switch","type","var"], -type:["bool","byte","complex64","complex128","error","float32","float64","int8","int16","int32","int64","string","uint8","uint16","uint32","uint64","int","uint","uintptr","rune"], -literal:["true","false","iota","nil"], -built_in:["append","cap","close","complex","copy","imag","len","make","new","panic","print","println","real","recover","delete"] -};return{name:"Go",aliases:["golang"],keywords:n,illegal:"{const n=e.regex;return{name:"GraphQL",aliases:["gql"], -case_insensitive:!0,disableAutodetect:!1,keywords:{ -keyword:["query","mutation","subscription","type","input","schema","directive","interface","union","scalar","fragment","enum","on"], -literal:["true","false","null"]}, -contains:[e.HASH_COMMENT_MODE,e.QUOTE_STRING_MODE,e.NUMBER_MODE,{ -scope:"punctuation",match:/[.]{3}/,relevance:0},{scope:"punctuation", -begin:/[\!\(\)\:\=\[\]\{\|\}]{1}/,relevance:0},{scope:"variable",begin:/\$/, -end:/\W/,excludeEnd:!0,relevance:0},{scope:"meta",match:/@\w+/,excludeEnd:!0},{ -scope:"symbol",begin:n.concat(/[_A-Za-z][_0-9A-Za-z]*/,n.lookahead(/\s*:/)), -relevance:0}],illegal:[/[;<']/,/BEGIN/]}},grmr_ini:e=>{const n=e.regex,t={ -className:"number",relevance:0,variants:[{begin:/([+-]+)?[\d]+_[\d_]+/},{ -begin:e.NUMBER_RE}]},a=e.COMMENT();a.variants=[{begin:/;/,end:/$/},{begin:/#/, -end:/$/}];const i={className:"variable",variants:[{begin:/\$[\w\d"][\w\d_]*/},{ -begin:/\$\{(.*?)\}/}]},r={className:"literal", -begin:/\bon|off|true|false|yes|no\b/},s={className:"string", -contains:[e.BACKSLASH_ESCAPE],variants:[{begin:"'''",end:"'''",relevance:10},{ -begin:'"""',end:'"""',relevance:10},{begin:'"',end:'"'},{begin:"'",end:"'"}] -},o={begin:/\[/,end:/\]/,contains:[a,r,i,s,t,"self"],relevance:0 -},l=n.either(/[A-Za-z0-9_-]+/,/"(\\"|[^"])*"/,/'[^']*'/);return{ -name:"TOML, also INI",aliases:["toml"],case_insensitive:!0,illegal:/\S/, -contains:[a,{className:"section",begin:/\[+/,end:/\]+/},{ -begin:n.concat(l,"(\\s*\\.\\s*",l,")*",n.lookahead(/\s*=\s*[^#\s]/)), -className:"attr",starts:{end:/$/,contains:[a,o,r,i,s,t]}}]}},grmr_java:e=>{ -const n=e.regex,t="[\xc0-\u02b8a-zA-Z_$][\xc0-\u02b8a-zA-Z_$0-9]*",a=t+pe("(?:<"+t+"~~~(?:\\s*,\\s*"+t+"~~~)*>)?",/~~~/g,2),i={ -keyword:["synchronized","abstract","private","var","static","if","const ","for","while","strictfp","finally","protected","import","native","final","void","enum","else","break","transient","catch","instanceof","volatile","case","assert","package","default","public","try","switch","continue","throws","protected","public","private","module","requires","exports","do","sealed","yield","permits"], -literal:["false","true","null"], -type:["char","boolean","long","float","int","byte","short","double"], -built_in:["super","this"]},r={className:"meta",begin:"@"+t,contains:[{ -begin:/\(/,end:/\)/,contains:["self"]}]},s={className:"params",begin:/\(/, -end:/\)/,keywords:i,relevance:0,contains:[e.C_BLOCK_COMMENT_MODE],endsParent:!0} -;return{name:"Java",aliases:["jsp"],keywords:i,illegal:/<\/|#/, -contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/, -relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),{ -begin:/import java\.[a-z]+\./,keywords:"import",relevance:2 -},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{begin:/"""/,end:/"""/, -className:"string",contains:[e.BACKSLASH_ESCAPE] -},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{ -match:[/\b(?:class|interface|enum|extends|implements|new)/,/\s+/,t],className:{ -1:"keyword",3:"title.class"}},{match:/non-sealed/,scope:"keyword"},{ -begin:[n.concat(/(?!else)/,t),/\s+/,t,/\s+/,/=(?!=)/],className:{1:"type", -3:"variable",5:"operator"}},{begin:[/record/,/\s+/,t],className:{1:"keyword", -3:"title.class"},contains:[s,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{ -beginKeywords:"new throw return else",relevance:0},{ -begin:["(?:"+a+"\\s+)",e.UNDERSCORE_IDENT_RE,/\s*(?=\()/],className:{ -2:"title.function"},keywords:i,contains:[{className:"params",begin:/\(/, -end:/\)/,keywords:i,relevance:0, -contains:[r,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,me,e.C_BLOCK_COMMENT_MODE] -},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},me,r]}},grmr_javascript:Oe, -grmr_json:e=>{const n=["true","false","null"],t={scope:"literal", -beginKeywords:n.join(" ")};return{name:"JSON",keywords:{literal:n},contains:[{ -className:"attr",begin:/"(\\.|[^\\"\r\n])*"(?=\s*:)/,relevance:1.01},{ -match:/[{}[\],:]/,className:"punctuation",relevance:0 -},e.QUOTE_STRING_MODE,t,e.C_NUMBER_MODE,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE], -illegal:"\\S"}},grmr_kotlin:e=>{const n={ -keyword:"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual", -built_in:"Byte Short Char Int Long Boolean Float Double Void Unit Nothing", -literal:"true false null"},t={className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"@" -},a={className:"subst",begin:/\$\{/,end:/\}/,contains:[e.C_NUMBER_MODE]},i={ -className:"variable",begin:"\\$"+e.UNDERSCORE_IDENT_RE},r={className:"string", -variants:[{begin:'"""',end:'"""(?=[^"])',contains:[i,a]},{begin:"'",end:"'", -illegal:/\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"',illegal:/\n/, -contains:[e.BACKSLASH_ESCAPE,i,a]}]};a.contains.push(r);const s={ -className:"meta", -begin:"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*"+e.UNDERSCORE_IDENT_RE+")?" -},o={className:"meta",begin:"@"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\(/, -end:/\)/,contains:[e.inherit(r,{className:"string"}),"self"]}] -},l=me,c=e.COMMENT("/\\*","\\*/",{contains:[e.C_BLOCK_COMMENT_MODE]}),d={ -variants:[{className:"type",begin:e.UNDERSCORE_IDENT_RE},{begin:/\(/,end:/\)/, -contains:[]}]},g=d;return g.variants[1].contains=[d],d.variants[1].contains=[g], -{name:"Kotlin",aliases:["kt","kts"],keywords:n, -contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag", -begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,c,{className:"keyword", -begin:/\b(break|continue|return|this)\b/,starts:{contains:[{className:"symbol", -begin:/@\w+/}]}},t,s,o,{className:"function",beginKeywords:"fun",end:"[(]|$", -returnBegin:!0,excludeEnd:!0,keywords:n,relevance:5,contains:[{ -begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0, -contains:[e.UNDERSCORE_TITLE_MODE]},{className:"type",begin://, -keywords:"reified",relevance:0},{className:"params",begin:/\(/,end:/\)/, -endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\/]/, -endsWithParent:!0,contains:[d,e.C_LINE_COMMENT_MODE,c],relevance:0 -},e.C_LINE_COMMENT_MODE,c,s,o,r,e.C_NUMBER_MODE]},c]},{ -begin:[/class|interface|trait/,/\s+/,e.UNDERSCORE_IDENT_RE],beginScope:{ -3:"title.class"},keywords:"class interface trait",end:/[:\{(]|$/,excludeEnd:!0, -illegal:"extends implements",contains:[{ -beginKeywords:"public protected internal private constructor" -},e.UNDERSCORE_TITLE_MODE,{className:"type",begin://,excludeBegin:!0, -excludeEnd:!0,relevance:0},{className:"type",begin:/[,:]\s*/,end:/[<\(,){\s]|$/, -excludeBegin:!0,returnEnd:!0},s,o]},r,{className:"meta",begin:"^#!/usr/bin/env", -end:"$",illegal:"\n"},l]}},grmr_less:e=>{ -const n=ie(e),t=de,a="[\\w-]+",i="("+a+"|@\\{"+a+"\\})",r=[],s=[],o=e=>({ -className:"string",begin:"~?"+e+".*?"+e}),l=(e,n,t)=>({className:e,begin:n, -relevance:t}),c={$pattern:/[a-z-]+/,keyword:"and or not only", -attribute:se.join(" ")},d={begin:"\\(",end:"\\)",contains:s,keywords:c, -relevance:0} -;s.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,o("'"),o('"'),n.CSS_NUMBER_MODE,{ -begin:"(url|data-uri)\\(",starts:{className:"string",end:"[\\)\\n]", -excludeEnd:!0} -},n.HEXCOLOR,d,l("variable","@@?"+a,10),l("variable","@\\{"+a+"\\}"),l("built_in","~?`[^`]*?`"),{ -className:"attribute",begin:a+"\\s*:",end:":",returnBegin:!0,excludeEnd:!0 -},n.IMPORTANT,{beginKeywords:"and not"},n.FUNCTION_DISPATCH);const g=s.concat({ -begin:/\{/,end:/\}/,contains:r}),u={beginKeywords:"when",endsWithParent:!0, -contains:[{beginKeywords:"and not"}].concat(s)},b={begin:i+"\\s*:", -returnBegin:!0,end:/[;}]/,relevance:0,contains:[{begin:/-(webkit|moz|ms|o)-/ -},n.CSS_VARIABLE,{className:"attribute",begin:"\\b("+ce.join("|")+")\\b", -end:/(?=:)/,starts:{endsWithParent:!0,illegal:"[<=$]",relevance:0,contains:s}}] -},m={className:"keyword", -begin:"@(import|media|charset|font-face|(-[a-z]+-)?keyframes|supports|document|namespace|page|viewport|host)\\b", -starts:{end:"[;{}]",keywords:c,returnEnd:!0,contains:s,relevance:0}},p={ -className:"variable",variants:[{begin:"@"+a+"\\s*:",relevance:15},{begin:"@"+a -}],starts:{end:"[;}]",returnEnd:!0,contains:g}},_={variants:[{ -begin:"[\\.#:&\\[>]",end:"[;{}]"},{begin:i,end:/\{/}],returnBegin:!0, -returnEnd:!0,illegal:"[<='$\"]",relevance:0, -contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,u,l("keyword","all\\b"),l("variable","@\\{"+a+"\\}"),{ -begin:"\\b("+re.join("|")+")\\b",className:"selector-tag" -},n.CSS_NUMBER_MODE,l("selector-tag",i,0),l("selector-id","#"+i),l("selector-class","\\."+i,0),l("selector-tag","&",0),n.ATTRIBUTE_SELECTOR_MODE,{ -className:"selector-pseudo",begin:":("+oe.join("|")+")"},{ -className:"selector-pseudo",begin:":(:)?("+le.join("|")+")"},{begin:/\(/, -end:/\)/,relevance:0,contains:g},{begin:"!important"},n.FUNCTION_DISPATCH]},h={ -begin:a+":(:)?"+`(${t.join("|")})`,returnBegin:!0,contains:[_]} -;return r.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,m,p,h,b,_,u,n.FUNCTION_DISPATCH), -{name:"Less",case_insensitive:!0,illegal:"[=>'/<($\"]",contains:r}}, -grmr_lua:e=>{const n="\\[=*\\[",t="\\]=*\\]",a={begin:n,end:t,contains:["self"] -},i=[e.COMMENT("--(?!"+n+")","$"),e.COMMENT("--"+n,t,{contains:[a],relevance:10 -})];return{name:"Lua",keywords:{$pattern:e.UNDERSCORE_IDENT_RE, -literal:"true false nil", -keyword:"and break do else elseif end for goto if in local not or repeat return then until while", -built_in:"_G _ENV _VERSION __index __newindex __mode __call __metatable __tostring __len __gc __add __sub __mul __div __mod __pow __concat __unm __eq __lt __le assert collectgarbage dofile error getfenv getmetatable ipairs load loadfile loadstring module next pairs pcall print rawequal rawget rawset require select setfenv setmetatable tonumber tostring type unpack xpcall arg self coroutine resume yield status wrap create running debug getupvalue debug sethook getmetatable gethook setmetatable setlocal traceback setfenv getinfo setupvalue getlocal getregistry getfenv io lines write close flush open output type read stderr stdin input stdout popen tmpfile math log max acos huge ldexp pi cos tanh pow deg tan cosh sinh random randomseed frexp ceil floor rad abs sqrt modf asin min mod fmod log10 atan2 exp sin atan os exit setlocale date getenv difftime remove time clock tmpname rename execute package preload loadlib loaded loaders cpath config path seeall string sub upper len gfind rep find match char dump gmatch reverse byte format gsub lower table setn insert getn foreachi maxn foreach concat sort remove" -},contains:i.concat([{className:"function",beginKeywords:"function",end:"\\)", -contains:[e.inherit(e.TITLE_MODE,{ -begin:"([_a-zA-Z]\\w*\\.)*([_a-zA-Z]\\w*:)?[_a-zA-Z]\\w*"}),{className:"params", -begin:"\\(",endsWithParent:!0,contains:i}].concat(i) -},e.C_NUMBER_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"string", -begin:n,end:t,contains:[a],relevance:5}])}},grmr_makefile:e=>{const n={ -className:"variable",variants:[{begin:"\\$\\("+e.UNDERSCORE_IDENT_RE+"\\)", -contains:[e.BACKSLASH_ESCAPE]},{begin:/\$[@%{ -const n={begin:/<\/?[A-Za-z_]/,end:">",subLanguage:"xml",relevance:0},t={ -variants:[{begin:/\[.+?\]\[.*?\]/,relevance:0},{ -begin:/\[.+?\]\(((data|javascript|mailto):|(?:http|ftp)s?:\/\/).*?\)/, -relevance:2},{ -begin:e.regex.concat(/\[.+?\]\(/,/[A-Za-z][A-Za-z0-9+.-]*/,/:\/\/.*?\)/), -relevance:2},{begin:/\[.+?\]\([./?&#].*?\)/,relevance:1},{ -begin:/\[.*?\]\(.*?\)/,relevance:0}],returnBegin:!0,contains:[{match:/\[(?=\])/ -},{className:"string",relevance:0,begin:"\\[",end:"\\]",excludeBegin:!0, -returnEnd:!0},{className:"link",relevance:0,begin:"\\]\\(",end:"\\)", -excludeBegin:!0,excludeEnd:!0},{className:"symbol",relevance:0,begin:"\\]\\[", -end:"\\]",excludeBegin:!0,excludeEnd:!0}]},a={className:"strong",contains:[], -variants:[{begin:/_{2}(?!\s)/,end:/_{2}/},{begin:/\*{2}(?!\s)/,end:/\*{2}/}] -},i={className:"emphasis",contains:[],variants:[{begin:/\*(?![*\s])/,end:/\*/},{ -begin:/_(?![_\s])/,end:/_/,relevance:0}]},r=e.inherit(a,{contains:[] -}),s=e.inherit(i,{contains:[]});a.contains.push(s),i.contains.push(r) -;let o=[n,t];return[a,i,r,s].forEach((e=>{e.contains=e.contains.concat(o) -})),o=o.concat(a,i),{name:"Markdown",aliases:["md","mkdown","mkd"],contains:[{ -className:"section",variants:[{begin:"^#{1,6}",end:"$",contains:o},{ -begin:"(?=^.+?\\n[=-]{2,}$)",contains:[{begin:"^[=-]*$"},{begin:"^",end:"\\n", -contains:o}]}]},n,{className:"bullet",begin:"^[ \t]*([*+-]|(\\d+\\.))(?=\\s+)", -end:"\\s+",excludeEnd:!0},a,i,{className:"quote",begin:"^>\\s+",contains:o, -end:"$"},{className:"code",variants:[{begin:"(`{3,})[^`](.|\\n)*?\\1`*[ ]*"},{ -begin:"(~{3,})[^~](.|\\n)*?\\1~*[ ]*"},{begin:"```",end:"```+[ ]*$"},{ -begin:"~~~",end:"~~~+[ ]*$"},{begin:"`.+?`"},{begin:"(?=^( {4}|\\t))", -contains:[{begin:"^( {4}|\\t)",end:"(\\n)$"}],relevance:0}]},{ -begin:"^[-\\*]{3,}",end:"$"},t,{begin:/^\[[^\n]+\]:/,returnBegin:!0,contains:[{ -className:"symbol",begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0},{ -className:"link",begin:/:\s*/,end:/$/,excludeBegin:!0}]}]}},grmr_objectivec:e=>{ -const n=/[a-zA-Z@][a-zA-Z0-9_]*/,t={$pattern:n, -keyword:["@interface","@class","@protocol","@implementation"]};return{ -name:"Objective-C",aliases:["mm","objc","obj-c","obj-c++","objective-c++"], -keywords:{"variable.language":["this","super"],$pattern:n, -keyword:["while","export","sizeof","typedef","const","struct","for","union","volatile","static","mutable","if","do","return","goto","enum","else","break","extern","asm","case","default","register","explicit","typename","switch","continue","inline","readonly","assign","readwrite","self","@synchronized","id","typeof","nonatomic","IBOutlet","IBAction","strong","weak","copy","in","out","inout","bycopy","byref","oneway","__strong","__weak","__block","__autoreleasing","@private","@protected","@public","@try","@property","@end","@throw","@catch","@finally","@autoreleasepool","@synthesize","@dynamic","@selector","@optional","@required","@encode","@package","@import","@defs","@compatibility_alias","__bridge","__bridge_transfer","__bridge_retained","__bridge_retain","__covariant","__contravariant","__kindof","_Nonnull","_Nullable","_Null_unspecified","__FUNCTION__","__PRETTY_FUNCTION__","__attribute__","getter","setter","retain","unsafe_unretained","nonnull","nullable","null_unspecified","null_resettable","class","instancetype","NS_DESIGNATED_INITIALIZER","NS_UNAVAILABLE","NS_REQUIRES_SUPER","NS_RETURNS_INNER_POINTER","NS_INLINE","NS_AVAILABLE","NS_DEPRECATED","NS_ENUM","NS_OPTIONS","NS_SWIFT_UNAVAILABLE","NS_ASSUME_NONNULL_BEGIN","NS_ASSUME_NONNULL_END","NS_REFINED_FOR_SWIFT","NS_SWIFT_NAME","NS_SWIFT_NOTHROW","NS_DURING","NS_HANDLER","NS_ENDHANDLER","NS_VALUERETURN","NS_VOIDRETURN"], -literal:["false","true","FALSE","TRUE","nil","YES","NO","NULL"], -built_in:["dispatch_once_t","dispatch_queue_t","dispatch_sync","dispatch_async","dispatch_once"], -type:["int","float","char","unsigned","signed","short","long","double","wchar_t","unichar","void","bool","BOOL","id|0","_Bool"] -},illegal:"/,end:/$/,illegal:"\\n" -},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"class", -begin:"("+t.keyword.join("|")+")\\b",end:/(\{|$)/,excludeEnd:!0,keywords:t, -contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"\\."+e.UNDERSCORE_IDENT_RE, -relevance:0}]}},grmr_perl:e=>{const n=e.regex,t=/[dualxmsipngr]{0,12}/,a={ -$pattern:/[\w.]+/, -keyword:"abs accept alarm and atan2 bind binmode bless break caller chdir chmod chomp chop chown chr chroot close closedir connect continue cos crypt dbmclose dbmopen defined delete die do dump each else elsif endgrent endhostent endnetent endprotoent endpwent endservent eof eval exec exists exit exp fcntl fileno flock for foreach fork format formline getc getgrent getgrgid getgrnam gethostbyaddr gethostbyname gethostent getlogin getnetbyaddr getnetbyname getnetent getpeername getpgrp getpriority getprotobyname getprotobynumber getprotoent getpwent getpwnam getpwuid getservbyname getservbyport getservent getsockname getsockopt given glob gmtime goto grep gt hex if index int ioctl join keys kill last lc lcfirst length link listen local localtime log lstat lt ma map mkdir msgctl msgget msgrcv msgsnd my ne next no not oct open opendir or ord our pack package pipe pop pos print printf prototype push q|0 qq quotemeta qw qx rand read readdir readline readlink readpipe recv redo ref rename require reset return reverse rewinddir rindex rmdir say scalar seek seekdir select semctl semget semop send setgrent sethostent setnetent setpgrp setpriority setprotoent setpwent setservent setsockopt shift shmctl shmget shmread shmwrite shutdown sin sleep socket socketpair sort splice split sprintf sqrt srand stat state study sub substr symlink syscall sysopen sysread sysseek system syswrite tell telldir tie tied time times tr truncate uc ucfirst umask undef unless unlink unpack unshift untie until use utime values vec wait waitpid wantarray warn when while write x|0 xor y|0" -},i={className:"subst",begin:"[$@]\\{",end:"\\}",keywords:a},r={begin:/->\{/, -end:/\}/},s={variants:[{begin:/\$\d/},{ -begin:n.concat(/[$%@](\^\w\b|#\w+(::\w+)*|\{\w+\}|\w+(::\w*)*)/,"(?![A-Za-z])(?![@$%])") -},{begin:/[$%@][^\s\w{]/,relevance:0}] -},o=[e.BACKSLASH_ESCAPE,i,s],l=[/!/,/\//,/\|/,/\?/,/'/,/"/,/#/],c=(e,a,i="\\1")=>{ -const r="\\1"===i?i:n.concat(i,a) -;return n.concat(n.concat("(?:",e,")"),a,/(?:\\.|[^\\\/])*?/,r,/(?:\\.|[^\\\/])*?/,i,t) -},d=(e,a,i)=>n.concat(n.concat("(?:",e,")"),a,/(?:\\.|[^\\\/])*?/,i,t),g=[s,e.HASH_COMMENT_MODE,e.COMMENT(/^=\w/,/=cut/,{ -endsWithParent:!0}),r,{className:"string",contains:o,variants:[{ -begin:"q[qwxr]?\\s*\\(",end:"\\)",relevance:5},{begin:"q[qwxr]?\\s*\\[", -end:"\\]",relevance:5},{begin:"q[qwxr]?\\s*\\{",end:"\\}",relevance:5},{ -begin:"q[qwxr]?\\s*\\|",end:"\\|",relevance:5},{begin:"q[qwxr]?\\s*<",end:">", -relevance:5},{begin:"qw\\s+q",end:"q",relevance:5},{begin:"'",end:"'", -contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"'},{begin:"`",end:"`", -contains:[e.BACKSLASH_ESCAPE]},{begin:/\{\w+\}/,relevance:0},{ -begin:"-?\\w+\\s*=>",relevance:0}]},{className:"number", -begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b", -relevance:0},{ -begin:"(\\/\\/|"+e.RE_STARTERS_RE+"|\\b(split|return|print|reverse|grep)\\b)\\s*", -keywords:"split return print reverse grep",relevance:0, -contains:[e.HASH_COMMENT_MODE,{className:"regexp",variants:[{ -begin:c("s|tr|y",n.either(...l,{capture:!0}))},{begin:c("s|tr|y","\\(","\\)")},{ -begin:c("s|tr|y","\\[","\\]")},{begin:c("s|tr|y","\\{","\\}")}],relevance:2},{ -className:"regexp",variants:[{begin:/(m|qr)\/\//,relevance:0},{ -begin:d("(?:m|qr)?",/\//,/\//)},{begin:d("m|qr",n.either(...l,{capture:!0 -}),/\1/)},{begin:d("m|qr",/\(/,/\)/)},{begin:d("m|qr",/\[/,/\]/)},{ -begin:d("m|qr",/\{/,/\}/)}]}]},{className:"function",beginKeywords:"sub", -end:"(\\s*\\(.*?\\))?[;{]",excludeEnd:!0,relevance:5,contains:[e.TITLE_MODE]},{ -begin:"-\\w\\b",relevance:0},{begin:"^__DATA__$",end:"^__END__$", -subLanguage:"mojolicious",contains:[{begin:"^@@.*",end:"$",className:"comment"}] -}];return i.contains=g,r.contains=g,{name:"Perl",aliases:["pl","pm"],keywords:a, -contains:g}},grmr_php:e=>{ -const n=e.regex,t=/(?![A-Za-z0-9])(?![$])/,a=n.concat(/[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/,t),i=n.concat(/(\\?[A-Z][a-z0-9_\x7f-\xff]+|\\?[A-Z]+(?=[A-Z][a-z0-9_\x7f-\xff])){1,}/,t),r={ -scope:"variable",match:"\\$+"+a},s={scope:"subst",variants:[{begin:/\$\w+/},{ -begin:/\{\$/,end:/\}/}]},o=e.inherit(e.APOS_STRING_MODE,{illegal:null -}),l="[ \t\n]",c={scope:"string",variants:[e.inherit(e.QUOTE_STRING_MODE,{ -illegal:null,contains:e.QUOTE_STRING_MODE.contains.concat(s)}),o,{ -begin:/<<<[ \t]*(?:(\w+)|"(\w+)")\n/,end:/[ \t]*(\w+)\b/, -contains:e.QUOTE_STRING_MODE.contains.concat(s),"on:begin":(e,n)=>{ -n.data._beginMatch=e[1]||e[2]},"on:end":(e,n)=>{ -n.data._beginMatch!==e[1]&&n.ignoreMatch()}},e.END_SAME_AS_BEGIN({ -begin:/<<<[ \t]*'(\w+)'\n/,end:/[ \t]*(\w+)\b/})]},d={scope:"number",variants:[{ -begin:"\\b0[bB][01]+(?:_[01]+)*\\b"},{begin:"\\b0[oO][0-7]+(?:_[0-7]+)*\\b"},{ -begin:"\\b0[xX][\\da-fA-F]+(?:_[\\da-fA-F]+)*\\b"},{ -begin:"(?:\\b\\d+(?:_\\d+)*(\\.(?:\\d+(?:_\\d+)*))?|\\B\\.\\d+)(?:[eE][+-]?\\d+)?" -}],relevance:0 -},g=["false","null","true"],u=["__CLASS__","__DIR__","__FILE__","__FUNCTION__","__COMPILER_HALT_OFFSET__","__LINE__","__METHOD__","__NAMESPACE__","__TRAIT__","die","echo","exit","include","include_once","print","require","require_once","array","abstract","and","as","binary","bool","boolean","break","callable","case","catch","class","clone","const","continue","declare","default","do","double","else","elseif","empty","enddeclare","endfor","endforeach","endif","endswitch","endwhile","enum","eval","extends","final","finally","float","for","foreach","from","global","goto","if","implements","instanceof","insteadof","int","integer","interface","isset","iterable","list","match|0","mixed","new","never","object","or","private","protected","public","readonly","real","return","string","switch","throw","trait","try","unset","use","var","void","while","xor","yield"],b=["Error|0","AppendIterator","ArgumentCountError","ArithmeticError","ArrayIterator","ArrayObject","AssertionError","BadFunctionCallException","BadMethodCallException","CachingIterator","CallbackFilterIterator","CompileError","Countable","DirectoryIterator","DivisionByZeroError","DomainException","EmptyIterator","ErrorException","Exception","FilesystemIterator","FilterIterator","GlobIterator","InfiniteIterator","InvalidArgumentException","IteratorIterator","LengthException","LimitIterator","LogicException","MultipleIterator","NoRewindIterator","OutOfBoundsException","OutOfRangeException","OuterIterator","OverflowException","ParentIterator","ParseError","RangeException","RecursiveArrayIterator","RecursiveCachingIterator","RecursiveCallbackFilterIterator","RecursiveDirectoryIterator","RecursiveFilterIterator","RecursiveIterator","RecursiveIteratorIterator","RecursiveRegexIterator","RecursiveTreeIterator","RegexIterator","RuntimeException","SeekableIterator","SplDoublyLinkedList","SplFileInfo","SplFileObject","SplFixedArray","SplHeap","SplMaxHeap","SplMinHeap","SplObjectStorage","SplObserver","SplPriorityQueue","SplQueue","SplStack","SplSubject","SplTempFileObject","TypeError","UnderflowException","UnexpectedValueException","UnhandledMatchError","ArrayAccess","BackedEnum","Closure","Fiber","Generator","Iterator","IteratorAggregate","Serializable","Stringable","Throwable","Traversable","UnitEnum","WeakReference","WeakMap","Directory","__PHP_Incomplete_Class","parent","php_user_filter","self","static","stdClass"],m={ -keyword:u,literal:(e=>{const n=[];return e.forEach((e=>{ -n.push(e),e.toLowerCase()===e?n.push(e.toUpperCase()):n.push(e.toLowerCase()) -})),n})(g),built_in:b},p=e=>e.map((e=>e.replace(/\|\d+$/,""))),_={variants:[{ -match:[/new/,n.concat(l,"+"),n.concat("(?!",p(b).join("\\b|"),"\\b)"),i],scope:{ -1:"keyword",4:"title.class"}}]},h=n.concat(a,"\\b(?!\\()"),f={variants:[{ -match:[n.concat(/::/,n.lookahead(/(?!class\b)/)),h],scope:{2:"variable.constant" -}},{match:[/::/,/class/],scope:{2:"variable.language"}},{ -match:[i,n.concat(/::/,n.lookahead(/(?!class\b)/)),h],scope:{1:"title.class", -3:"variable.constant"}},{match:[i,n.concat("::",n.lookahead(/(?!class\b)/))], -scope:{1:"title.class"}},{match:[i,/::/,/class/],scope:{1:"title.class", -3:"variable.language"}}]},E={scope:"attr", -match:n.concat(a,n.lookahead(":"),n.lookahead(/(?!::)/))},y={relevance:0, -begin:/\(/,end:/\)/,keywords:m,contains:[E,r,f,e.C_BLOCK_COMMENT_MODE,c,d,_] -},N={relevance:0, -match:[/\b/,n.concat("(?!fn\\b|function\\b|",p(u).join("\\b|"),"|",p(b).join("\\b|"),"\\b)"),a,n.concat(l,"*"),n.lookahead(/(?=\()/)], -scope:{3:"title.function.invoke"},contains:[y]};y.contains.push(N) -;const w=[E,f,e.C_BLOCK_COMMENT_MODE,c,d,_];return{case_insensitive:!1, -keywords:m,contains:[{begin:n.concat(/#\[\s*/,i),beginScope:"meta",end:/]/, -endScope:"meta",keywords:{literal:g,keyword:["new","array"]},contains:[{ -begin:/\[/,end:/]/,keywords:{literal:g,keyword:["new","array"]}, -contains:["self",...w]},...w,{scope:"meta",match:i}] -},e.HASH_COMMENT_MODE,e.COMMENT("//","$"),e.COMMENT("/\\*","\\*/",{contains:[{ -scope:"doctag",match:"@[A-Za-z]+"}]}),{match:/__halt_compiler\(\);/, -keywords:"__halt_compiler",starts:{scope:"comment",end:e.MATCH_NOTHING_RE, -contains:[{match:/\?>/,scope:"meta",endsParent:!0}]}},{scope:"meta",variants:[{ -begin:/<\?php/,relevance:10},{begin:/<\?=/},{begin:/<\?/,relevance:.1},{ -begin:/\?>/}]},{scope:"variable.language",match:/\$this\b/},r,N,f,{ -match:[/const/,/\s/,a],scope:{1:"keyword",3:"variable.constant"}},_,{ -scope:"function",relevance:0,beginKeywords:"fn function",end:/[;{]/, -excludeEnd:!0,illegal:"[$%\\[]",contains:[{beginKeywords:"use" -},e.UNDERSCORE_TITLE_MODE,{begin:"=>",endsParent:!0},{scope:"params", -begin:"\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0,keywords:m, -contains:["self",r,f,e.C_BLOCK_COMMENT_MODE,c,d]}]},{scope:"class",variants:[{ -beginKeywords:"enum",illegal:/[($"]/},{beginKeywords:"class interface trait", -illegal:/[:($"]/}],relevance:0,end:/\{/,excludeEnd:!0,contains:[{ -beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{ -beginKeywords:"namespace",relevance:0,end:";",illegal:/[.']/, -contains:[e.inherit(e.UNDERSCORE_TITLE_MODE,{scope:"title.class"})]},{ -beginKeywords:"use",relevance:0,end:";",contains:[{ -match:/\b(as|const|function)\b/,scope:"keyword"},e.UNDERSCORE_TITLE_MODE]},c,d]} -},grmr_php_template:e=>({name:"PHP template",subLanguage:"xml",contains:[{ -begin:/<\?(php|=)?/,end:/\?>/,subLanguage:"php",contains:[{begin:"/\\*", -end:"\\*/",skip:!0},{begin:'b"',end:'"',skip:!0},{begin:"b'",end:"'",skip:!0 -},e.inherit(e.APOS_STRING_MODE,{illegal:null,className:null,contains:null, -skip:!0}),e.inherit(e.QUOTE_STRING_MODE,{illegal:null,className:null, -contains:null,skip:!0})]}]}),grmr_plaintext:e=>({name:"Plain text", -aliases:["text","txt"],disableAutodetect:!0}),grmr_python:e=>{ -const n=e.regex,t=/[\p{XID_Start}_]\p{XID_Continue}*/u,a=["and","as","assert","async","await","break","case","class","continue","def","del","elif","else","except","finally","for","from","global","if","import","in","is","lambda","match","nonlocal|10","not","or","pass","raise","return","try","while","with","yield"],i={ -$pattern:/[A-Za-z]\w+|__\w+__/,keyword:a, -built_in:["__import__","abs","all","any","ascii","bin","bool","breakpoint","bytearray","bytes","callable","chr","classmethod","compile","complex","delattr","dict","dir","divmod","enumerate","eval","exec","filter","float","format","frozenset","getattr","globals","hasattr","hash","help","hex","id","input","int","isinstance","issubclass","iter","len","list","locals","map","max","memoryview","min","next","object","oct","open","ord","pow","print","property","range","repr","reversed","round","set","setattr","slice","sorted","staticmethod","str","sum","super","tuple","type","vars","zip"], -literal:["__debug__","Ellipsis","False","None","NotImplemented","True"], -type:["Any","Callable","Coroutine","Dict","List","Literal","Generic","Optional","Sequence","Set","Tuple","Type","Union"] -},r={className:"meta",begin:/^(>>>|\.\.\.) /},s={className:"subst",begin:/\{/, -end:/\}/,keywords:i,illegal:/#/},o={begin:/\{\{/,relevance:0},l={ -className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{ -begin:/([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?'''/,end:/'''/, -contains:[e.BACKSLASH_ESCAPE,r],relevance:10},{ -begin:/([uU]|[bB]|[rR]|[bB][rR]|[rR][bB])?"""/,end:/"""/, -contains:[e.BACKSLASH_ESCAPE,r],relevance:10},{ -begin:/([fF][rR]|[rR][fF]|[fF])'''/,end:/'''/, -contains:[e.BACKSLASH_ESCAPE,r,o,s]},{begin:/([fF][rR]|[rR][fF]|[fF])"""/, -end:/"""/,contains:[e.BACKSLASH_ESCAPE,r,o,s]},{begin:/([uU]|[rR])'/,end:/'/, -relevance:10},{begin:/([uU]|[rR])"/,end:/"/,relevance:10},{ -begin:/([bB]|[bB][rR]|[rR][bB])'/,end:/'/},{begin:/([bB]|[bB][rR]|[rR][bB])"/, -end:/"/},{begin:/([fF][rR]|[rR][fF]|[fF])'/,end:/'/, -contains:[e.BACKSLASH_ESCAPE,o,s]},{begin:/([fF][rR]|[rR][fF]|[fF])"/,end:/"/, -contains:[e.BACKSLASH_ESCAPE,o,s]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE] -},c="[0-9](_?[0-9])*",d=`(\\b(${c}))?\\.(${c})|\\b(${c})\\.`,g="\\b|"+a.join("|"),u={ -className:"number",relevance:0,variants:[{ -begin:`(\\b(${c})|(${d}))[eE][+-]?(${c})[jJ]?(?=${g})`},{begin:`(${d})[jJ]?`},{ -begin:`\\b([1-9](_?[0-9])*|0+(_?0)*)[lLjJ]?(?=${g})`},{ -begin:`\\b0[bB](_?[01])+[lL]?(?=${g})`},{begin:`\\b0[oO](_?[0-7])+[lL]?(?=${g})` -},{begin:`\\b0[xX](_?[0-9a-fA-F])+[lL]?(?=${g})`},{begin:`\\b(${c})[jJ](?=${g})` -}]},b={className:"comment",begin:n.lookahead(/# type:/),end:/$/,keywords:i, -contains:[{begin:/# type:/},{begin:/#/,end:/\b\B/,endsWithParent:!0}]},m={ -className:"params",variants:[{className:"",begin:/\(\s*\)/,skip:!0},{begin:/\(/, -end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:i, -contains:["self",r,u,l,e.HASH_COMMENT_MODE]}]};return s.contains=[l,u,r],{ -name:"Python",aliases:["py","gyp","ipython"],unicodeRegex:!0,keywords:i, -illegal:/(<\/|\?)|=>/,contains:[r,u,{begin:/\bself\b/},{beginKeywords:"if", -relevance:0},l,b,e.HASH_COMMENT_MODE,{match:[/\bdef/,/\s+/,t],scope:{ -1:"keyword",3:"title.function"},contains:[m]},{variants:[{ -match:[/\bclass/,/\s+/,t,/\s*/,/\(\s*/,t,/\s*\)/]},{match:[/\bclass/,/\s+/,t]}], -scope:{1:"keyword",3:"title.class",6:"title.class.inherited"}},{ -className:"meta",begin:/^[\t ]*@/,end:/(?=#)|$/,contains:[u,m,l]}]}}, -grmr_python_repl:e=>({aliases:["pycon"],contains:[{className:"meta.prompt", -starts:{end:/ |$/,starts:{end:"$",subLanguage:"python"}},variants:[{ -begin:/^>>>(?=[ ]|$)/},{begin:/^\.\.\.(?=[ ]|$)/}]}]}),grmr_r:e=>{ -const n=e.regex,t=/(?:(?:[a-zA-Z]|\.[._a-zA-Z])[._a-zA-Z0-9]*)|\.(?!\d)/,a=n.either(/0[xX][0-9a-fA-F]+\.[0-9a-fA-F]*[pP][+-]?\d+i?/,/0[xX][0-9a-fA-F]+(?:[pP][+-]?\d+)?[Li]?/,/(?:\d+(?:\.\d*)?|\.\d+)(?:[eE][+-]?\d+)?[Li]?/),i=/[=!<>:]=|\|\||&&|:::?|<-|<<-|->>|->|\|>|[-+*\/?!$&|:<=>@^~]|\*\*/,r=n.either(/[()]/,/[{}]/,/\[\[/,/[[\]]/,/\\/,/,/) -;return{name:"R",keywords:{$pattern:t, -keyword:"function if in break next repeat else for while", -literal:"NULL NA TRUE FALSE Inf NaN NA_integer_|10 NA_real_|10 NA_character_|10 NA_complex_|10", -built_in:"LETTERS letters month.abb month.name pi T F abs acos acosh all any anyNA Arg as.call as.character as.complex as.double as.environment as.integer as.logical as.null.default as.numeric as.raw asin asinh atan atanh attr attributes baseenv browser c call ceiling class Conj cos cosh cospi cummax cummin cumprod cumsum digamma dim dimnames emptyenv exp expression floor forceAndCall gamma gc.time globalenv Im interactive invisible is.array is.atomic is.call is.character is.complex is.double is.environment is.expression is.finite is.function is.infinite is.integer is.language is.list is.logical is.matrix is.na is.name is.nan is.null is.numeric is.object is.pairlist is.raw is.recursive is.single is.symbol lazyLoadDBfetch length lgamma list log max min missing Mod names nargs nzchar oldClass on.exit pos.to.env proc.time prod quote range Re rep retracemem return round seq_along seq_len seq.int sign signif sin sinh sinpi sqrt standardGeneric substitute sum switch tan tanh tanpi tracemem trigamma trunc unclass untracemem UseMethod xtfrm" -},contains:[e.COMMENT(/#'/,/$/,{contains:[{scope:"doctag",match:/@examples/, -starts:{end:n.lookahead(n.either(/\n^#'\s*(?=@[a-zA-Z]+)/,/\n^(?!#')/)), -endsParent:!0}},{scope:"doctag",begin:"@param",end:/$/,contains:[{ -scope:"variable",variants:[{match:t},{match:/`(?:\\.|[^`\\])+`/}],endsParent:!0 -}]},{scope:"doctag",match:/@[a-zA-Z]+/},{scope:"keyword",match:/\\[a-zA-Z]+/}] -}),e.HASH_COMMENT_MODE,{scope:"string",contains:[e.BACKSLASH_ESCAPE], -variants:[e.END_SAME_AS_BEGIN({begin:/[rR]"(-*)\(/,end:/\)(-*)"/ -}),e.END_SAME_AS_BEGIN({begin:/[rR]"(-*)\{/,end:/\}(-*)"/ -}),e.END_SAME_AS_BEGIN({begin:/[rR]"(-*)\[/,end:/\](-*)"/ -}),e.END_SAME_AS_BEGIN({begin:/[rR]'(-*)\(/,end:/\)(-*)'/ -}),e.END_SAME_AS_BEGIN({begin:/[rR]'(-*)\{/,end:/\}(-*)'/ -}),e.END_SAME_AS_BEGIN({begin:/[rR]'(-*)\[/,end:/\](-*)'/}),{begin:'"',end:'"', -relevance:0},{begin:"'",end:"'",relevance:0}]},{relevance:0,variants:[{scope:{ -1:"operator",2:"number"},match:[i,a]},{scope:{1:"operator",2:"number"}, -match:[/%[^%]*%/,a]},{scope:{1:"punctuation",2:"number"},match:[r,a]},{scope:{ -2:"number"},match:[/[^a-zA-Z0-9._]|^/,a]}]},{scope:{3:"operator"}, -match:[t,/\s+/,/<-/,/\s+/]},{scope:"operator",relevance:0,variants:[{match:i},{ -match:/%[^%]*%/}]},{scope:"punctuation",relevance:0,match:r},{begin:"`",end:"`", -contains:[{begin:/\\./}]}]}},grmr_ruby:e=>{ -const n=e.regex,t="([a-zA-Z_]\\w*[!?=]?|[-+~]@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?)",a=n.either(/\b([A-Z]+[a-z0-9]+)+/,/\b([A-Z]+[a-z0-9]+)+[A-Z]+/),i=n.concat(a,/(::\w+)*/),r={ -"variable.constant":["__FILE__","__LINE__","__ENCODING__"], -"variable.language":["self","super"], -keyword:["alias","and","begin","BEGIN","break","case","class","defined","do","else","elsif","end","END","ensure","for","if","in","module","next","not","or","redo","require","rescue","retry","return","then","undef","unless","until","when","while","yield","include","extend","prepend","public","private","protected","raise","throw"], -built_in:["proc","lambda","attr_accessor","attr_reader","attr_writer","define_method","private_constant","module_function"], -literal:["true","false","nil"]},s={className:"doctag",begin:"@[A-Za-z]+"},o={ -begin:"#<",end:">"},l=[e.COMMENT("#","$",{contains:[s] -}),e.COMMENT("^=begin","^=end",{contains:[s],relevance:10 -}),e.COMMENT("^__END__",e.MATCH_NOTHING_RE)],c={className:"subst",begin:/#\{/, -end:/\}/,keywords:r},d={className:"string",contains:[e.BACKSLASH_ESCAPE,c], -variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/`/,end:/`/},{ -begin:/%[qQwWx]?\(/,end:/\)/},{begin:/%[qQwWx]?\[/,end:/\]/},{ -begin:/%[qQwWx]?\{/,end:/\}/},{begin:/%[qQwWx]?/},{begin:/%[qQwWx]?\//, -end:/\//},{begin:/%[qQwWx]?%/,end:/%/},{begin:/%[qQwWx]?-/,end:/-/},{ -begin:/%[qQwWx]?\|/,end:/\|/},{begin:/\B\?(\\\d{1,3})/},{ -begin:/\B\?(\\x[A-Fa-f0-9]{1,2})/},{begin:/\B\?(\\u\{?[A-Fa-f0-9]{1,6}\}?)/},{ -begin:/\B\?(\\M-\\C-|\\M-\\c|\\c\\M-|\\M-|\\C-\\M-)[\x20-\x7e]/},{ -begin:/\B\?\\(c|C-)[\x20-\x7e]/},{begin:/\B\?\\?\S/},{ -begin:n.concat(/<<[-~]?'?/,n.lookahead(/(\w+)(?=\W)[^\n]*\n(?:[^\n]*\n)*?\s*\1\b/)), -contains:[e.END_SAME_AS_BEGIN({begin:/(\w+)/,end:/(\w+)/, -contains:[e.BACKSLASH_ESCAPE,c]})]}]},g="[0-9](_?[0-9])*",u={className:"number", -relevance:0,variants:[{ -begin:`\\b([1-9](_?[0-9])*|0)(\\.(${g}))?([eE][+-]?(${g})|r)?i?\\b`},{ -begin:"\\b0[dD][0-9](_?[0-9])*r?i?\\b"},{begin:"\\b0[bB][0-1](_?[0-1])*r?i?\\b" -},{begin:"\\b0[oO][0-7](_?[0-7])*r?i?\\b"},{ -begin:"\\b0[xX][0-9a-fA-F](_?[0-9a-fA-F])*r?i?\\b"},{ -begin:"\\b0(_?[0-7])+r?i?\\b"}]},b={variants:[{match:/\(\)/},{ -className:"params",begin:/\(/,end:/(?=\))/,excludeBegin:!0,endsParent:!0, -keywords:r}]},m=[d,{variants:[{match:[/class\s+/,i,/\s+<\s+/,i]},{ -match:[/\b(class|module)\s+/,i]}],scope:{2:"title.class", -4:"title.class.inherited"},keywords:r},{match:[/(include|extend)\s+/,i],scope:{ -2:"title.class"},keywords:r},{relevance:0,match:[i,/\.new[. (]/],scope:{ -1:"title.class"}},{relevance:0,match:/\b[A-Z][A-Z_0-9]+\b/, -className:"variable.constant"},{relevance:0,match:a,scope:"title.class"},{ -match:[/def/,/\s+/,t],scope:{1:"keyword",3:"title.function"},contains:[b]},{ -begin:e.IDENT_RE+"::"},{className:"symbol", -begin:e.UNDERSCORE_IDENT_RE+"(!|\\?)?:",relevance:0},{className:"symbol", -begin:":(?!\\s)",contains:[d,{begin:t}],relevance:0},u,{className:"variable", -begin:"(\\$\\W)|((\\$|@@?)(\\w+))(?=[^@$?])(?![A-Za-z])(?![@$?'])"},{ -className:"params",begin:/\|/,end:/\|/,excludeBegin:!0,excludeEnd:!0, -relevance:0,keywords:r},{begin:"("+e.RE_STARTERS_RE+"|unless)\\s*", -keywords:"unless",contains:[{className:"regexp",contains:[e.BACKSLASH_ESCAPE,c], -illegal:/\n/,variants:[{begin:"/",end:"/[a-z]*"},{begin:/%r\{/,end:/\}[a-z]*/},{ -begin:"%r\\(",end:"\\)[a-z]*"},{begin:"%r!",end:"![a-z]*"},{begin:"%r\\[", -end:"\\][a-z]*"}]}].concat(o,l),relevance:0}].concat(o,l) -;c.contains=m,b.contains=m;const p=[{begin:/^\s*=>/,starts:{end:"$",contains:m} -},{className:"meta.prompt", -begin:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+[>*]|(\\w+-)?\\d+\\.\\d+\\.\\d+(p\\d+)?[^\\d][^>]+>)(?=[ ])", -starts:{end:"$",keywords:r,contains:m}}];return l.unshift(o),{name:"Ruby", -aliases:["rb","gemspec","podspec","thor","irb"],keywords:r,illegal:/\/\*/, -contains:[e.SHEBANG({binary:"ruby"})].concat(p).concat(l).concat(m)}}, -grmr_rust:e=>{const n=e.regex,t={className:"title.function.invoke",relevance:0, -begin:n.concat(/\b/,/(?!let|for|while|if|else|match\b)/,e.IDENT_RE,n.lookahead(/\s*\(/)) -},a="([ui](8|16|32|64|128|size)|f(32|64))?",i=["drop ","Copy","Send","Sized","Sync","Drop","Fn","FnMut","FnOnce","ToOwned","Clone","Debug","PartialEq","PartialOrd","Eq","Ord","AsRef","AsMut","Into","From","Default","Iterator","Extend","IntoIterator","DoubleEndedIterator","ExactSizeIterator","SliceConcatExt","ToString","assert!","assert_eq!","bitflags!","bytes!","cfg!","col!","concat!","concat_idents!","debug_assert!","debug_assert_eq!","env!","eprintln!","panic!","file!","format!","format_args!","include_bytes!","include_str!","line!","local_data_key!","module_path!","option_env!","print!","println!","select!","stringify!","try!","unimplemented!","unreachable!","vec!","write!","writeln!","macro_rules!","assert_ne!","debug_assert_ne!"],r=["i8","i16","i32","i64","i128","isize","u8","u16","u32","u64","u128","usize","f32","f64","str","char","bool","Box","Option","Result","String","Vec"] -;return{name:"Rust",aliases:["rs"],keywords:{$pattern:e.IDENT_RE+"!?",type:r, -keyword:["abstract","as","async","await","become","box","break","const","continue","crate","do","dyn","else","enum","extern","false","final","fn","for","if","impl","in","let","loop","macro","match","mod","move","mut","override","priv","pub","ref","return","self","Self","static","struct","super","trait","true","try","type","typeof","unsafe","unsized","use","virtual","where","while","yield"], -literal:["true","false","Some","None","Ok","Err"],built_in:i},illegal:""},t]}}, -grmr_scss:e=>{const n=ie(e),t=le,a=oe,i="@[a-z-]+",r={className:"variable", -begin:"(\\$[a-zA-Z-][a-zA-Z0-9_-]*)\\b",relevance:0};return{name:"SCSS", -case_insensitive:!0,illegal:"[=/|']", -contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,n.CSS_NUMBER_MODE,{ -className:"selector-id",begin:"#[A-Za-z0-9_-]+",relevance:0},{ -className:"selector-class",begin:"\\.[A-Za-z0-9_-]+",relevance:0 -},n.ATTRIBUTE_SELECTOR_MODE,{className:"selector-tag", -begin:"\\b("+re.join("|")+")\\b",relevance:0},{className:"selector-pseudo", -begin:":("+a.join("|")+")"},{className:"selector-pseudo", -begin:":(:)?("+t.join("|")+")"},r,{begin:/\(/,end:/\)/, -contains:[n.CSS_NUMBER_MODE]},n.CSS_VARIABLE,{className:"attribute", -begin:"\\b("+ce.join("|")+")\\b"},{ -begin:"\\b(whitespace|wait|w-resize|visible|vertical-text|vertical-ideographic|uppercase|upper-roman|upper-alpha|underline|transparent|top|thin|thick|text|text-top|text-bottom|tb-rl|table-header-group|table-footer-group|sw-resize|super|strict|static|square|solid|small-caps|separate|se-resize|scroll|s-resize|rtl|row-resize|ridge|right|repeat|repeat-y|repeat-x|relative|progress|pointer|overline|outside|outset|oblique|nowrap|not-allowed|normal|none|nw-resize|no-repeat|no-drop|newspaper|ne-resize|n-resize|move|middle|medium|ltr|lr-tb|lowercase|lower-roman|lower-alpha|loose|list-item|line|line-through|line-edge|lighter|left|keep-all|justify|italic|inter-word|inter-ideograph|inside|inset|inline|inline-block|inherit|inactive|ideograph-space|ideograph-parenthesis|ideograph-numeric|ideograph-alpha|horizontal|hidden|help|hand|groove|fixed|ellipsis|e-resize|double|dotted|distribute|distribute-space|distribute-letter|distribute-all-lines|disc|disabled|default|decimal|dashed|crosshair|collapse|col-resize|circle|char|center|capitalize|break-word|break-all|bottom|both|bolder|bold|block|bidi-override|below|baseline|auto|always|all-scroll|absolute|table|table-cell)\\b" -},{begin:/:/,end:/[;}{]/,relevance:0, -contains:[n.BLOCK_COMMENT,r,n.HEXCOLOR,n.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,n.IMPORTANT,n.FUNCTION_DISPATCH] -},{begin:"@(page|font-face)",keywords:{$pattern:i,keyword:"@page @font-face"}},{ -begin:"@",end:"[{;]",returnBegin:!0,keywords:{$pattern:/[a-z-]+/, -keyword:"and or not only",attribute:se.join(" ")},contains:[{begin:i, -className:"keyword"},{begin:/[a-z-]+(?=:)/,className:"attribute" -},r,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,n.HEXCOLOR,n.CSS_NUMBER_MODE] -},n.FUNCTION_DISPATCH]}},grmr_shell:e=>({name:"Shell Session", -aliases:["console","shellsession"],contains:[{className:"meta.prompt", -begin:/^\s{0,3}[/~\w\d[\]()@-]*[>%$#][ ]?/,starts:{end:/[^\\](?=\s*$)/, -subLanguage:"bash"}}]}),grmr_sql:e=>{ -const n=e.regex,t=e.COMMENT("--","$"),a=["true","false","unknown"],i=["bigint","binary","blob","boolean","char","character","clob","date","dec","decfloat","decimal","float","int","integer","interval","nchar","nclob","national","numeric","real","row","smallint","time","timestamp","varchar","varying","varbinary"],r=["abs","acos","array_agg","asin","atan","avg","cast","ceil","ceiling","coalesce","corr","cos","cosh","count","covar_pop","covar_samp","cume_dist","dense_rank","deref","element","exp","extract","first_value","floor","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","last_value","lead","listagg","ln","log","log10","lower","max","min","mod","nth_value","ntile","nullif","percent_rank","percentile_cont","percentile_disc","position","position_regex","power","rank","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","row_number","sin","sinh","sqrt","stddev_pop","stddev_samp","substring","substring_regex","sum","tan","tanh","translate","translate_regex","treat","trim","trim_array","unnest","upper","value_of","var_pop","var_samp","width_bucket"],s=["create table","insert into","primary key","foreign key","not null","alter table","add constraint","grouping sets","on overflow","character set","respect nulls","ignore nulls","nulls first","nulls last","depth first","breadth first"],o=r,l=["abs","acos","all","allocate","alter","and","any","are","array","array_agg","array_max_cardinality","as","asensitive","asin","asymmetric","at","atan","atomic","authorization","avg","begin","begin_frame","begin_partition","between","bigint","binary","blob","boolean","both","by","call","called","cardinality","cascaded","case","cast","ceil","ceiling","char","char_length","character","character_length","check","classifier","clob","close","coalesce","collate","collect","column","commit","condition","connect","constraint","contains","convert","copy","corr","corresponding","cos","cosh","count","covar_pop","covar_samp","create","cross","cube","cume_dist","current","current_catalog","current_date","current_default_transform_group","current_path","current_role","current_row","current_schema","current_time","current_timestamp","current_path","current_role","current_transform_group_for_type","current_user","cursor","cycle","date","day","deallocate","dec","decimal","decfloat","declare","default","define","delete","dense_rank","deref","describe","deterministic","disconnect","distinct","double","drop","dynamic","each","element","else","empty","end","end_frame","end_partition","end-exec","equals","escape","every","except","exec","execute","exists","exp","external","extract","false","fetch","filter","first_value","float","floor","for","foreign","frame_row","free","from","full","function","fusion","get","global","grant","group","grouping","groups","having","hold","hour","identity","in","indicator","initial","inner","inout","insensitive","insert","int","integer","intersect","intersection","interval","into","is","join","json_array","json_arrayagg","json_exists","json_object","json_objectagg","json_query","json_table","json_table_primitive","json_value","lag","language","large","last_value","lateral","lead","leading","left","like","like_regex","listagg","ln","local","localtime","localtimestamp","log","log10","lower","match","match_number","match_recognize","matches","max","member","merge","method","min","minute","mod","modifies","module","month","multiset","national","natural","nchar","nclob","new","no","none","normalize","not","nth_value","ntile","null","nullif","numeric","octet_length","occurrences_regex","of","offset","old","omit","on","one","only","open","or","order","out","outer","over","overlaps","overlay","parameter","partition","pattern","per","percent","percent_rank","percentile_cont","percentile_disc","period","portion","position","position_regex","power","precedes","precision","prepare","primary","procedure","ptf","range","rank","reads","real","recursive","ref","references","referencing","regr_avgx","regr_avgy","regr_count","regr_intercept","regr_r2","regr_slope","regr_sxx","regr_sxy","regr_syy","release","result","return","returns","revoke","right","rollback","rollup","row","row_number","rows","running","savepoint","scope","scroll","search","second","seek","select","sensitive","session_user","set","show","similar","sin","sinh","skip","smallint","some","specific","specifictype","sql","sqlexception","sqlstate","sqlwarning","sqrt","start","static","stddev_pop","stddev_samp","submultiset","subset","substring","substring_regex","succeeds","sum","symmetric","system","system_time","system_user","table","tablesample","tan","tanh","then","time","timestamp","timezone_hour","timezone_minute","to","trailing","translate","translate_regex","translation","treat","trigger","trim","trim_array","true","truncate","uescape","union","unique","unknown","unnest","update","upper","user","using","value","values","value_of","var_pop","var_samp","varbinary","varchar","varying","versioning","when","whenever","where","width_bucket","window","with","within","without","year","add","asc","collation","desc","final","first","last","view"].filter((e=>!r.includes(e))),c={ -begin:n.concat(/\b/,n.either(...o),/\s*\(/),relevance:0,keywords:{built_in:o}} -;return{name:"SQL",case_insensitive:!0,illegal:/[{}]|<\//,keywords:{ -$pattern:/\b[\w\.]+/,keyword:((e,{exceptions:n,when:t}={})=>{const a=t -;return n=n||[],e.map((e=>e.match(/\|\d+$/)||n.includes(e)?e:a(e)?e+"|0":e)) -})(l,{when:e=>e.length<3}),literal:a,type:i, -built_in:["current_catalog","current_date","current_default_transform_group","current_path","current_role","current_schema","current_transform_group_for_type","current_user","session_user","system_time","system_user","current_time","localtime","current_timestamp","localtimestamp"] -},contains:[{begin:n.either(...s),relevance:0,keywords:{$pattern:/[\w\.]+/, -keyword:l.concat(s),literal:a,type:i}},{className:"type", -begin:n.either("double precision","large object","with timezone","without timezone") -},c,{className:"variable",begin:/@[a-z0-9][a-z0-9_]*/},{className:"string", -variants:[{begin:/'/,end:/'/,contains:[{begin:/''/}]}]},{begin:/"/,end:/"/, -contains:[{begin:/""/}]},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,t,{ -className:"operator",begin:/[-+*/=%^~]|&&?|\|\|?|!=?|<(?:=>?|<|>)?|>[>=]?/, -relevance:0}]}},grmr_swift:e=>{const n={match:/\s+/,relevance:0 -},t=e.COMMENT("/\\*","\\*/",{contains:["self"]}),a=[e.C_LINE_COMMENT_MODE,t],i={ -match:[/\./,m(...xe,...Me)],className:{2:"keyword"}},r={match:b(/\./,m(...Ae)), -relevance:0},s=Ae.filter((e=>"string"==typeof e)).concat(["_|0"]),o={variants:[{ -className:"keyword", -match:m(...Ae.filter((e=>"string"!=typeof e)).concat(Se).map(ke),...Me)}]},l={ -$pattern:m(/\b\w+/,/#\w+/),keyword:s.concat(Re),literal:Ce},c=[i,r,o],g=[{ -match:b(/\./,m(...De)),relevance:0},{className:"built_in", -match:b(/\b/,m(...De),/(?=\()/)}],u={match:/->/,relevance:0},p=[u,{ -className:"operator",relevance:0,variants:[{match:Be},{match:`\\.(\\.|${Le})+`}] -}],_="([0-9]_*)+",h="([0-9a-fA-F]_*)+",f={className:"number",relevance:0, -variants:[{match:`\\b(${_})(\\.(${_}))?([eE][+-]?(${_}))?\\b`},{ -match:`\\b0x(${h})(\\.(${h}))?([pP][+-]?(${_}))?\\b`},{match:/\b0o([0-7]_*)+\b/ -},{match:/\b0b([01]_*)+\b/}]},E=(e="")=>({className:"subst",variants:[{ -match:b(/\\/,e,/[0\\tnr"']/)},{match:b(/\\/,e,/u\{[0-9a-fA-F]{1,8}\}/)}] -}),y=(e="")=>({className:"subst",match:b(/\\/,e,/[\t ]*(?:[\r\n]|\r\n)/) -}),N=(e="")=>({className:"subst",label:"interpol",begin:b(/\\/,e,/\(/),end:/\)/ -}),w=(e="")=>({begin:b(e,/"""/),end:b(/"""/,e),contains:[E(e),y(e),N(e)] -}),v=(e="")=>({begin:b(e,/"/),end:b(/"/,e),contains:[E(e),N(e)]}),O={ -className:"string", -variants:[w(),w("#"),w("##"),w("###"),v(),v("#"),v("##"),v("###")] -},k=[e.BACKSLASH_ESCAPE,{begin:/\[/,end:/\]/,relevance:0, -contains:[e.BACKSLASH_ESCAPE]}],x={begin:/\/[^\s](?=[^/\n]*\/)/,end:/\//, -contains:k},M=e=>{const n=b(e,/\//),t=b(/\//,e);return{begin:n,end:t, -contains:[...k,{scope:"comment",begin:`#(?!.*${t})`,end:/$/}]}},S={ -scope:"regexp",variants:[M("###"),M("##"),M("#"),x]},A={match:b(/`/,Fe,/`/) -},C=[A,{className:"variable",match:/\$\d+/},{className:"variable", -match:`\\$${ze}+`}],T=[{match:/(@|#(un)?)available/,scope:"keyword",starts:{ -contains:[{begin:/\(/,end:/\)/,keywords:Pe,contains:[...p,f,O]}]}},{ -scope:"keyword",match:b(/@/,m(...je))},{scope:"meta",match:b(/@/,Fe)}],R={ -match:d(/\b[A-Z]/),relevance:0,contains:[{className:"type", -match:b(/(AV|CA|CF|CG|CI|CL|CM|CN|CT|MK|MP|MTK|MTL|NS|SCN|SK|UI|WK|XC)/,ze,"+") -},{className:"type",match:Ue,relevance:0},{match:/[?!]+/,relevance:0},{ -match:/\.\.\./,relevance:0},{match:b(/\s+&\s+/,d(Ue)),relevance:0}]},D={ -begin://,keywords:l,contains:[...a,...c,...T,u,R]};R.contains.push(D) -;const I={begin:/\(/,end:/\)/,relevance:0,keywords:l,contains:["self",{ -match:b(Fe,/\s*:/),keywords:"_|0",relevance:0 -},...a,S,...c,...g,...p,f,O,...C,...T,R]},L={begin://, -keywords:"repeat each",contains:[...a,R]},B={begin:/\(/,end:/\)/,keywords:l, -contains:[{begin:m(d(b(Fe,/\s*:/)),d(b(Fe,/\s+/,Fe,/\s*:/))),end:/:/, -relevance:0,contains:[{className:"keyword",match:/\b_\b/},{className:"params", -match:Fe}]},...a,...c,...p,f,O,...T,R,I],endsParent:!0,illegal:/["']/},$={ -match:[/(func|macro)/,/\s+/,m(A.match,Fe,Be)],className:{1:"keyword", -3:"title.function"},contains:[L,B,n],illegal:[/\[/,/%/]},z={ -match:[/\b(?:subscript|init[?!]?)/,/\s*(?=[<(])/],className:{1:"keyword"}, -contains:[L,B,n],illegal:/\[|%/},F={match:[/operator/,/\s+/,Be],className:{ -1:"keyword",3:"title"}},U={begin:[/precedencegroup/,/\s+/,Ue],className:{ -1:"keyword",3:"title"},contains:[R],keywords:[...Te,...Ce],end:/}/} -;for(const e of O.variants){const n=e.contains.find((e=>"interpol"===e.label)) -;n.keywords=l;const t=[...c,...g,...p,f,O,...C];n.contains=[...t,{begin:/\(/, -end:/\)/,contains:["self",...t]}]}return{name:"Swift",keywords:l, -contains:[...a,$,z,{beginKeywords:"struct protocol class extension enum actor", -end:"\\{",excludeEnd:!0,keywords:l,contains:[e.inherit(e.TITLE_MODE,{ -className:"title.class",begin:/[A-Za-z$_][\u00C0-\u02B80-9A-Za-z$_]*/}),...c] -},F,U,{beginKeywords:"import",end:/$/,contains:[...a],relevance:0 -},S,...c,...g,...p,f,O,...C,...T,R,I]}},grmr_typescript:e=>{ -const n=Oe(e),t=_e,a=["any","void","number","boolean","string","object","never","symbol","bigint","unknown"],i={ -beginKeywords:"namespace",end:/\{/,excludeEnd:!0, -contains:[n.exports.CLASS_REFERENCE]},r={beginKeywords:"interface",end:/\{/, -excludeEnd:!0,keywords:{keyword:"interface extends",built_in:a}, -contains:[n.exports.CLASS_REFERENCE]},s={$pattern:_e, -keyword:he.concat(["type","namespace","interface","public","private","protected","implements","declare","abstract","readonly","enum","override"]), -literal:fe,built_in:ve.concat(a),"variable.language":we},o={className:"meta", -begin:"@"+t},l=(e,n,t)=>{const a=e.contains.findIndex((e=>e.label===n)) -;if(-1===a)throw Error("can not find mode to replace");e.contains.splice(a,1,t)} -;return Object.assign(n.keywords,s), -n.exports.PARAMS_CONTAINS.push(o),n.contains=n.contains.concat([o,i,r]), -l(n,"shebang",e.SHEBANG()),l(n,"use_strict",{className:"meta",relevance:10, -begin:/^\s*['"]use strict['"]/ -}),n.contains.find((e=>"func.def"===e.label)).relevance=0,Object.assign(n,{ -name:"TypeScript",aliases:["ts","tsx","mts","cts"]}),n},grmr_vbnet:e=>{ -const n=e.regex,t=/\d{1,2}\/\d{1,2}\/\d{4}/,a=/\d{4}-\d{1,2}-\d{1,2}/,i=/(\d|1[012])(:\d+){0,2} *(AM|PM)/,r=/\d{1,2}(:\d{1,2}){1,2}/,s={ -className:"literal",variants:[{begin:n.concat(/# */,n.either(a,t),/ *#/)},{ -begin:n.concat(/# */,r,/ *#/)},{begin:n.concat(/# */,i,/ *#/)},{ -begin:n.concat(/# */,n.either(a,t),/ +/,n.either(i,r),/ *#/)}] -},o=e.COMMENT(/'''/,/$/,{contains:[{className:"doctag",begin:/<\/?/,end:/>/}] -}),l=e.COMMENT(null,/$/,{variants:[{begin:/'/},{begin:/([\t ]|^)REM(?=\s)/}]}) -;return{name:"Visual Basic .NET",aliases:["vb"],case_insensitive:!0, -classNameAliases:{label:"symbol"},keywords:{ -keyword:"addhandler alias aggregate ansi as async assembly auto binary by byref byval call case catch class compare const continue custom declare default delegate dim distinct do each equals else elseif end enum erase error event exit explicit finally for friend from function get global goto group handles if implements imports in inherits interface into iterator join key let lib loop me mid module mustinherit mustoverride mybase myclass namespace narrowing new next notinheritable notoverridable of off on operator option optional order overloads overridable overrides paramarray partial preserve private property protected public raiseevent readonly redim removehandler resume return select set shadows shared skip static step stop structure strict sub synclock take text then throw to try unicode until using when where while widening with withevents writeonly yield", -built_in:"addressof and andalso await directcast gettype getxmlnamespace is isfalse isnot istrue like mod nameof new not or orelse trycast typeof xor cbool cbyte cchar cdate cdbl cdec cint clng cobj csbyte cshort csng cstr cuint culng cushort", -type:"boolean byte char date decimal double integer long object sbyte short single string uinteger ulong ushort", -literal:"true false nothing"}, -illegal:"//|\\{|\\}|endif|gosub|variant|wend|^\\$ ",contains:[{ -className:"string",begin:/"(""|[^/n])"C\b/},{className:"string",begin:/"/, -end:/"/,illegal:/\n/,contains:[{begin:/""/}]},s,{className:"number",relevance:0, -variants:[{begin:/\b\d[\d_]*((\.[\d_]+(E[+-]?[\d_]+)?)|(E[+-]?[\d_]+))[RFD@!#]?/ -},{begin:/\b\d[\d_]*((U?[SIL])|[%&])?/},{begin:/&H[\dA-F_]+((U?[SIL])|[%&])?/},{ -begin:/&O[0-7_]+((U?[SIL])|[%&])?/},{begin:/&B[01_]+((U?[SIL])|[%&])?/}]},{ -className:"label",begin:/^\w+:/},o,l,{className:"meta", -begin:/[\t ]*#(const|disable|else|elseif|enable|end|externalsource|if|region)\b/, -end:/$/,keywords:{ -keyword:"const disable else elseif enable end externalsource if region then"}, -contains:[l]}]}},grmr_wasm:e=>{e.regex;const n=e.COMMENT(/\(;/,/;\)/) -;return n.contains.push("self"),{name:"WebAssembly",keywords:{$pattern:/[\w.]+/, -keyword:["anyfunc","block","br","br_if","br_table","call","call_indirect","data","drop","elem","else","end","export","func","global.get","global.set","local.get","local.set","local.tee","get_global","get_local","global","if","import","local","loop","memory","memory.grow","memory.size","module","mut","nop","offset","param","result","return","select","set_global","set_local","start","table","tee_local","then","type","unreachable"] -},contains:[e.COMMENT(/;;/,/$/),n,{match:[/(?:offset|align)/,/\s*/,/=/], -className:{1:"keyword",3:"operator"}},{className:"variable",begin:/\$[\w_]+/},{ -match:/(\((?!;)|\))+/,className:"punctuation",relevance:0},{ -begin:[/(?:func|call|call_indirect)/,/\s+/,/\$[^\s)]+/],className:{1:"keyword", -3:"title.function"}},e.QUOTE_STRING_MODE,{match:/(i32|i64|f32|f64)(?!\.)/, -className:"type"},{className:"keyword", -match:/\b(f32|f64|i32|i64)(?:\.(?:abs|add|and|ceil|clz|const|convert_[su]\/i(?:32|64)|copysign|ctz|demote\/f64|div(?:_[su])?|eqz?|extend_[su]\/i32|floor|ge(?:_[su])?|gt(?:_[su])?|le(?:_[su])?|load(?:(?:8|16|32)_[su])?|lt(?:_[su])?|max|min|mul|nearest|neg?|or|popcnt|promote\/f32|reinterpret\/[fi](?:32|64)|rem_[su]|rot[lr]|shl|shr_[su]|store(?:8|16|32)?|sqrt|sub|trunc(?:_[su]\/f(?:32|64))?|wrap\/i64|xor))\b/ -},{className:"number",relevance:0, -match:/[+-]?\b(?:\d(?:_?\d)*(?:\.\d(?:_?\d)*)?(?:[eE][+-]?\d(?:_?\d)*)?|0x[\da-fA-F](?:_?[\da-fA-F])*(?:\.[\da-fA-F](?:_?[\da-fA-D])*)?(?:[pP][+-]?\d(?:_?\d)*)?)\b|\binf\b|\bnan(?::0x[\da-fA-F](?:_?[\da-fA-D])*)?\b/ -}]}},grmr_xml:e=>{ -const n=e.regex,t=n.concat(/[\p{L}_]/u,n.optional(/[\p{L}0-9_.-]*:/u),/[\p{L}0-9_.-]*/u),a={ -className:"symbol",begin:/&[a-z]+;|&#[0-9]+;|&#x[a-f0-9]+;/},i={begin:/\s/, -contains:[{className:"keyword",begin:/#?[a-z_][a-z1-9_-]+/,illegal:/\n/}] -},r=e.inherit(i,{begin:/\(/,end:/\)/}),s=e.inherit(e.APOS_STRING_MODE,{ -className:"string"}),o=e.inherit(e.QUOTE_STRING_MODE,{className:"string"}),l={ -endsWithParent:!0,illegal:/`]+/}]}]}]};return{ -name:"HTML, XML", -aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"], -case_insensitive:!0,unicodeRegex:!0,contains:[{className:"meta",begin://,relevance:10,contains:[i,o,s,r,{begin:/\[/,end:/\]/,contains:[{ -className:"meta",begin://,contains:[i,r,o,s]}]}] -},e.COMMENT(//,{relevance:10}),{begin://, -relevance:10},a,{className:"meta",end:/\?>/,variants:[{begin:/<\?xml/, -relevance:10,contains:[o]},{begin:/<\?[a-z][a-z0-9]+/}]},{className:"tag", -begin:/)/,end:/>/,keywords:{name:"style"},contains:[l],starts:{ -end:/<\/style>/,returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag", -begin:/)/,end:/>/,keywords:{name:"script"},contains:[l],starts:{ -end:/<\/script>/,returnEnd:!0,subLanguage:["javascript","handlebars","xml"]}},{ -className:"tag",begin:/<>|<\/>/},{className:"tag", -begin:n.concat(//,/>/,/\s/)))), -end:/\/?>/,contains:[{className:"name",begin:t,relevance:0,starts:l}]},{ -className:"tag",begin:n.concat(/<\//,n.lookahead(n.concat(t,/>/))),contains:[{ -className:"name",begin:t,relevance:0},{begin:/>/,relevance:0,endsParent:!0}]}]} -},grmr_yaml:e=>{ -const n="true false yes no null",t="[\\w#;/?:@&=+$,.~*'()[\\]]+",a={ -className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/ -},{begin:/\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:"template-variable", -variants:[{begin:/\{\{/,end:/\}\}/},{begin:/%\{/,end:/\}/}]}]},i=e.inherit(a,{ -variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/[^\s,{}[\]]+/}]}),r={ -end:",",endsWithParent:!0,excludeEnd:!0,keywords:n,relevance:0},s={begin:/\{/, -end:/\}/,contains:[r],illegal:"\\n",relevance:0},o={begin:"\\[",end:"\\]", -contains:[r],illegal:"\\n",relevance:0},l=[{className:"attr",variants:[{ -begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{ -begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---\\s*$", -relevance:10},{className:"string", -begin:"[\\|>]([1-9]?[+-])?[ ]*\\n( +)[^ ][^\\n]*\\n(\\2[^\\n]+\\n?)*"},{ -begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0, -relevance:0},{className:"type",begin:"!\\w+!"+t},{className:"type", -begin:"!<"+t+">"},{className:"type",begin:"!"+t},{className:"type",begin:"!!"+t -},{className:"meta",begin:"&"+e.UNDERSCORE_IDENT_RE+"$"},{className:"meta", -begin:"\\*"+e.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"-(?=[ ]|$)", -relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{ -className:"number", -begin:"\\b[0-9]{4}(-[0-9][0-9]){0,2}([Tt \\t][0-9][0-9]?(:[0-9][0-9]){2})?(\\.[0-9]*)?([ \\t])*(Z|[-+][0-9][0-9]?(:[0-9][0-9])?)?\\b" -},{className:"number",begin:e.C_NUMBER_RE+"\\b",relevance:0},s,o,a],c=[...l] -;return c.pop(),c.push(i),r.contains=c,{name:"YAML",case_insensitive:!0, -aliases:["yml"],contains:l}}});const He=ae;for(const e of Object.keys(Ke)){ -const n=e.replace("grmr_","").replace("_","-");He.registerLanguage(n,Ke[e])} -return He}() -;"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=hljs);/*! `erlang` grammar compiled for Highlight.js 11.9.0 */ -(()=>{var e=(()=>{"use strict";return e=>{ -const n="[a-z'][a-zA-Z0-9_']*",r="("+n+":"+n+"|"+n+")",a={ -keyword:"after and andalso|10 band begin bnot bor bsl bzr bxor case catch cond div end fun if let not of orelse|10 query receive rem try when xor", -literal:"false true"},i=e.COMMENT("%","$"),s={className:"number", -begin:"\\b(\\d+(_\\d+)*#[a-fA-F0-9]+(_[a-fA-F0-9]+)*|\\d+(_\\d+)*(\\.\\d+(_\\d+)*)?([eE][-+]?\\d+)?)", -relevance:0},c={begin:"fun\\s+"+n+"/\\d+"},t={begin:r+"\\(",end:"\\)", -returnBegin:!0,relevance:0,contains:[{begin:r,relevance:0},{begin:"\\(", -end:"\\)",endsWithParent:!0,returnEnd:!0,relevance:0}]},d={begin:/\{/,end:/\}/, -relevance:0},o={begin:"\\b_([A-Z][A-Za-z0-9_]*)?",relevance:0},l={ -begin:"[A-Z][a-zA-Z0-9_]*",relevance:0},b={begin:"#"+e.UNDERSCORE_IDENT_RE, -relevance:0,returnBegin:!0,contains:[{begin:"#"+e.UNDERSCORE_IDENT_RE, -relevance:0},{begin:/\{/,end:/\}/,relevance:0}]},g={ -beginKeywords:"fun receive if try case",end:"end",keywords:a} -;g.contains=[i,c,e.inherit(e.APOS_STRING_MODE,{className:"" -}),g,t,e.QUOTE_STRING_MODE,s,d,o,l,b] -;const E=[i,c,g,t,e.QUOTE_STRING_MODE,s,d,o,l,b] -;t.contains[1].contains=E,d.contains=E,b.contains[1].contains=E;const u={ -className:"params",begin:"\\(",end:"\\)",contains:E};return{name:"Erlang", -aliases:["erl"],keywords:a,illegal:"(",returnBegin:!0, -illegal:"\\(|#|//|/\\*|\\\\|:|;",contains:[u,e.inherit(e.TITLE_MODE,{begin:n})], -starts:{end:";|\\.",keywords:a,contains:E}},i,{begin:"^-",end:"\\.",relevance:0, -excludeEnd:!0,returnBegin:!0,keywords:{$pattern:"-"+e.IDENT_RE, -keyword:["-module","-record","-undef","-export","-ifdef","-ifndef","-author","-copyright","-doc","-vsn","-import","-include","-include_lib","-compile","-define","-else","-endif","-file","-behaviour","-behavior","-spec"].map((e=>e+"|1.5")).join(" ") -},contains:[u]},s,e.QUOTE_STRING_MODE,b,o,l,d,{begin:/\.$/}]}}})() -;hljs.registerLanguage("erlang",e)})(); \ No newline at end of file diff --git a/docs/erlang-literate-parser.js b/docs/erlang-literate-parser.js new file mode 100755 index 000000000..32506209b --- /dev/null +++ b/docs/erlang-literate-parser.js @@ -0,0 +1,580 @@ +#!/usr/bin/env node + +/** + * Comprehensive Erlang Literate Documentation Generator + * + * Handles all comment types: + * - %%% Module documentation + * - %% Function/section documentation + * - % Inline comments (converted to prose) + * - @doc, @param, @returns annotations + */ + +import fs from 'fs'; +import path from 'path'; +import { execSync } from 'child_process'; + +class ErlangLiterateParser { + constructor(options = {}) { + this.options = { + githubBase: 'https://github.com/permaweb/HyperBEAM/blob/edge/src', + verbose: false, + ...options + }; + this.reset(); + } + + reset() { + this.lines = []; + this.moduleInfo = {}; + this.functions = []; + this.currentState = { + inFunction: false, + functionName: '', + functionSpec: '', + functionDoc: '', + functionLines: [], + pendingDoc: '', + specFunctionName: '', + braceDepth: 0, + parenDepth: 0 + }; + } + + parseFile(filePath) { + const content = fs.readFileSync(filePath, 'utf8'); + this.reset(); + this.lines = content.split('\n'); + + // Extract module-level information + this.extractModuleInfo(); + + // Process all lines for functions + this.processFunctions(); + + // Generate the markdown + return this.generateMarkdown(path.basename(filePath)); + } + + extractModuleInfo() { + let moduleDoc = []; + let inModuleDoc = false; + + for (let i = 0; i < this.lines.length; i++) { + const line = this.lines[i]; + const trimmed = line.trim(); + + // Module name + if (trimmed.match(/^-module\(([^)]+)\)/)) { + this.moduleInfo.name = trimmed.match(/^-module\(([^)]+)\)/)[1]; + } + + // Exports + if (trimmed.match(/^-export\(\[/)) { + const exportMatch = trimmed.match(/^-export\(\[([^\]]+)\]\)/); + if (exportMatch) { + this.moduleInfo.exports = exportMatch[1] + .split(',') + .map(e => e.trim()) + .filter(e => e); + } + } + + // Module documentation (%%% comments at the top) + if (trimmed.startsWith('%%%')) { + inModuleDoc = true; + let docLine = trimmed.substring(3).trim(); + // Remove @doc if present + docLine = docLine.replace(/^@doc\s*/, ''); + if (docLine) moduleDoc.push(docLine); + } else if (inModuleDoc && trimmed === '') { + // Empty line in module doc, continue + continue; + } else if (inModuleDoc && trimmed.startsWith('%')) { + // Continue with other comment types but end module doc + inModuleDoc = false; + break; + } else if (inModuleDoc && trimmed.startsWith('-')) { + // Hit a module directive, end of module doc + break; + } + } + + this.moduleInfo.doc = this.cleanDocumentation(moduleDoc.join('\n')); + } + + processFunctions() { + for (let i = 0; i < this.lines.length; i++) { + const line = this.lines[i]; + const trimmed = line.trim(); + + // Check for start of function documentation block + if (trimmed.startsWith('%% @doc')) { + this.collectFunctionDoc(i); + continue; + } + + // Check for -spec + if (trimmed.startsWith('-spec ')) { + this.collectSpec(i); + // Extract function name from spec line + const specMatch = trimmed.match(/-spec\s+([a-z][a-z0-9_]*)\s*\(/); + if (specMatch) { + this.currentState.specFunctionName = specMatch[1]; + } + continue; + } + + // Check for function start (handles both start-of-line and indented functions) + const funcMatch = trimmed.match(/^([a-z][a-z0-9_]*)\s*\(/); + if (funcMatch && !this.currentState.inFunction) { + // Save any pending doc + if (this.currentState.pendingDoc) { + this.currentState.functionDoc = this.currentState.pendingDoc; + this.currentState.pendingDoc = ''; + } + + // Use function name from spec if available, otherwise use detected name + const functionName = this.currentState.specFunctionName || funcMatch[1]; + this.startFunction(functionName, i); + + // Clear the spec function name after use + this.currentState.specFunctionName = ''; + } + + // If in function, collect lines + if (this.currentState.inFunction) { + this.collectFunctionLine(line, i); + + // Check for function end + if (this.isFunctionEnd(line)) { + this.endFunction(); + } + } + } + + // Handle any remaining function + if (this.currentState.inFunction) { + this.endFunction(); + } + } + + isFunctionDoc(line) { + return line.startsWith('%% @doc') || + (line.startsWith('%%') && !line.startsWith('%%%')); + } + + collectFunctionDoc(startIdx) { + const docLines = []; + + for (let i = startIdx; i < this.lines.length; i++) { + const line = this.lines[i]; + const trimmed = line.trim(); + + if (trimmed.startsWith('%%')) { + let docLine = trimmed.substring(2).trim(); + // Remove @doc prefix if on first line + if (i === startIdx) { + docLine = docLine.replace(/^@doc\s*/, ''); + } + docLines.push(docLine); + } else if (trimmed === '') { + // Empty line - might be part of doc block or end + // Look ahead to see if more %% comments follow + let j = i + 1; + let foundMoreDoc = false; + while (j < this.lines.length && this.lines[j].trim() === '') { + j++; + } + if (j < this.lines.length && this.lines[j].trim().startsWith('%%')) { + // More doc coming, include empty line + docLines.push(''); + foundMoreDoc = true; + } + if (!foundMoreDoc) { + // End of doc block + break; + } + } else if (trimmed.startsWith('%')) { + // Single % comment, continue but don't include + continue; + } else { + // End of doc block + break; + } + } + + this.currentState.pendingDoc = docLines.join('\n'); + } + + collectSpec(startIdx) { + const specLines = []; + let depth = 0; + + for (let i = startIdx; i < this.lines.length; i++) { + const line = this.lines[i]; + specLines.push(line); + + // Track parentheses to handle multi-line specs + for (const char of line) { + if (char === '(') depth++; + if (char === ')') depth--; + } + + // Check if spec is complete + if (line.trim().endsWith('.') && depth === 0) { + break; + } + } + + this.currentState.functionSpec = specLines.join('\n'); + + // After collecting spec, look for the actual function definition + // that should follow shortly after + for (let j = startIdx + specLines.length; j < this.lines.length; j++) { + const nextLine = this.lines[j].trim(); + + // Skip empty lines and comments + if (nextLine === '' || nextLine.startsWith('%')) { + continue; + } + + // Look for function definition + const funcMatch = nextLine.match(/^([a-z][a-z0-9_]*)\s*\(/); + if (funcMatch) { + // This is likely the function that corresponds to this spec + // But don't start the function here, let the main loop handle it + break; + } + + // If we hit another -spec or module directive, stop looking + if (nextLine.startsWith('-spec') || nextLine.startsWith('-')) { + break; + } + } + } + + startFunction(name, lineIdx) { + this.currentState.inFunction = true; + this.currentState.functionName = name; + this.currentState.functionLines = []; + this.currentState.braceDepth = 0; + this.currentState.parenDepth = 0; + } + + collectFunctionLine(line, lineIdx) { + this.currentState.functionLines.push(line); + + // Track depth for function end detection + for (const char of line) { + if (char === '{' || char === '[') this.currentState.braceDepth++; + if (char === '}' || char === ']') this.currentState.braceDepth--; + if (char === '(') this.currentState.parenDepth++; + if (char === ')') this.currentState.parenDepth--; + } + } + + isFunctionEnd(line) { + const trimmed = line.trim(); + + // Function ends with . at depth 0 + if (this.currentState.braceDepth === 0 && + this.currentState.parenDepth === 0 && + trimmed.endsWith('.') && + !trimmed.startsWith('%')) { + return true; + } + + return false; + } + + endFunction() { + // Process the function body to extract inline comments + const processedBody = this.processFunctionBody(this.currentState.functionLines); + + this.functions.push({ + name: this.currentState.functionName, + spec: this.currentState.functionSpec, + doc: this.currentState.functionDoc, + body: processedBody + }); + + // Reset state + this.currentState.inFunction = false; + this.currentState.functionName = ''; + this.currentState.functionSpec = ''; + this.currentState.functionDoc = ''; + this.currentState.functionLines = []; + this.currentState.specFunctionName = ''; + } + + processFunctionBody(lines) { + const segments = []; + let currentCode = []; + let inCodeBlock = false; + + for (const line of lines) { + const trimmed = line.trim(); + + // Check for inline comment (% or %% but not %%%) + if (trimmed.match(/^\s*%[^%]/) || trimmed.match(/^\s*%%[^%]/)) { + // Save any accumulated code + if (currentCode.length > 0) { + segments.push({ + type: 'code', + content: currentCode.join('\n') + }); + currentCode = []; + } + + // Extract comment text (remove % or %% prefix) + let commentText; + if (trimmed.match(/^\s*%%[^%]/)) { + commentText = line.replace(/^\s*%%\s?/, ''); + } else { + commentText = line.replace(/^\s*%\s?/, ''); + } + segments.push({ + type: 'comment', + content: this.cleanInlineComment(commentText) + }); + } else { + // Regular code line + currentCode.push(line); + } + } + + // Add any remaining code + if (currentCode.length > 0) { + segments.push({ + type: 'code', + content: currentCode.join('\n') + }); + } + + return segments; + } + + cleanInlineComment(text) { + // Convert `thing' to `thing` + return text.replace(/`([^']*?)'/g, '`$1`').trim(); + } + + cleanDocumentation(text) { + if (!text) return ''; + + // Convert Erlang doc syntax to Markdown + return text + .replace(/`([^']*?)'/g, '`$1`') // Convert `code' to `code` + .replace(/<</g, '<<') // Fix HTML entities + .replace(/>>/g, '>>') + .replace(/@doc\s*/g, '') // Remove @doc tags + .trim(); + } + + parseDocumentation(docText) { + const lines = docText.split('\n'); + const result = { + description: [], + params: [], + returns: [] + }; + + let currentSection = 'description'; + let currentParam = null; + + for (const line of lines) { + const trimmed = line.trim(); + + // Check for @param + const paramMatch = trimmed.match(/^@param\s+(\S+)\s*(.*)/); + if (paramMatch) { + if (currentParam) { + result.params.push(currentParam); + } + currentParam = { + name: paramMatch[1], + description: paramMatch[2] || '' + }; + currentSection = 'param'; + continue; + } + + // Check for @returns + if (trimmed.match(/^@returns?\s/)) { + if (currentParam) { + result.params.push(currentParam); + currentParam = null; + } + const returnsText = trimmed.replace(/^@returns?\s*/, ''); + result.returns.push(returnsText); + currentSection = 'returns'; + continue; + } + + // Add to current section + if (currentSection === 'description' && trimmed) { + result.description.push(trimmed); + } else if (currentSection === 'param' && currentParam && trimmed) { + currentParam.description += ' ' + trimmed; + } else if (currentSection === 'returns' && trimmed) { + result.returns.push(trimmed); + } + } + + // Save final param if exists + if (currentParam) { + result.params.push(currentParam); + } + + return result; + } + + generateMarkdown(fileName) { + const githubUrl = `${this.options.githubBase}/${fileName}`; + let md = []; + + // Header + md.push(`# ${this.moduleInfo.name || fileName.replace('.erl', '')}`); + md.push(''); + md.push(`[View source on GitHub](${githubUrl})`); + md.push(''); + + // Module documentation + if (this.moduleInfo.doc) { + md.push(this.moduleInfo.doc); + md.push(''); + md.push('---'); + md.push(''); + } + + // Exports + if (this.moduleInfo.exports && this.moduleInfo.exports.length > 0) { + md.push('## Exported Functions'); + md.push(''); + for (const exp of this.moduleInfo.exports) { + md.push(`- \`${exp}\``); + } + md.push(''); + md.push('---'); + md.push(''); + } + + // Functions + for (const func of this.functions) { + md.push(`## ${func.name}`); + md.push(''); + + // Parse and format documentation + if (func.doc) { + const parsed = this.parseDocumentation(func.doc); + + // Description + if (parsed.description.length > 0) { + md.push(this.cleanDocumentation(parsed.description.join(' '))); + md.push(''); + } + + // Parameters + if (parsed.params.length > 0) { + md.push('### Parameters'); + md.push(''); + for (const param of parsed.params) { + const desc = this.cleanDocumentation(param.description); + md.push(`- \`${param.name}\` - ${desc}`); + } + md.push(''); + } + + // Returns + if (parsed.returns.length > 0) { + md.push('### Returns'); + md.push(''); + for (const ret of parsed.returns) { + md.push(`- ${this.cleanDocumentation(ret)}`); + } + md.push(''); + } + } + + // Spec + if (func.spec) { + md.push('```erlang'); + md.push(func.spec.trim()); + md.push('```'); + md.push(''); + } + + // Function body with inline comments + if (func.body && func.body.length > 0) { + // md.push('### Function'); + md.push(''); + + for (const segment of func.body) { + if (segment.type === 'comment') { + md.push(segment.content); + md.push(''); + } else if (segment.type === 'code') { + md.push('```erlang'); + md.push(segment.content.trim()); + md.push('```'); + md.push(''); + } + } + } + + md.push(''); + } + + // Footer + md.push('---'); + md.push(''); + md.push(`*Generated from [${fileName}](${githubUrl})*`); + + return md.join('\n'); + } +} + +// CLI Interface +function main() { + const args = process.argv.slice(2); + const verbose = args.includes('-v') || args.includes('--verbose'); + + // Get source directory + const srcDir = process.env.SRC_DIR || path.join(process.cwd(), 'src'); + const outputDir = process.env.OUTPUT_DIR || path.join(process.cwd(), 'docs/literate-erlang'); + + // Ensure output directory exists + if (!fs.existsSync(outputDir)) { + fs.mkdirSync(outputDir, { recursive: true }); + } + + // Process all .erl files + const files = fs.readdirSync(srcDir).filter(f => f.endsWith('.erl')); + const parser = new ErlangLiterateParser({ verbose }); + + console.log(`Processing ${files.length} Erlang files...`); + + for (const file of files) { + if (verbose) console.log(` Processing ${file}...`); + + try { + const inputPath = path.join(srcDir, file); + const outputPath = path.join(outputDir, file + '.md'); + + const markdown = parser.parseFile(inputPath); + fs.writeFileSync(outputPath, markdown); + + } catch (error) { + console.error(`Error processing ${file}:`, error.message); + } + } + + console.log(`✓ Generated documentation in ${outputDir}`); +} + +if (import.meta.url === `file://${process.argv[1]}`) { + main(); +} + +export default ErlangLiterateParser; \ No newline at end of file From c8ec9669312bb2a87902afa358ad976661a3760e Mon Sep 17 00:00:00 2001 From: Dylan Shade <63427984+dpshade@users.noreply.github.com> Date: Fri, 19 Sep 2025 16:34:26 -0400 Subject: [PATCH 13/17] docs: Enhance Erlang literate parser and update documentation structure - Introduce `collectParamTagsBeforeSpec` method to gather parameter and return tags before specifications, improving documentation accuracy. - Modify `processFunctionBody` to handle inline documentation tags more effectively, ensuring proper formatting in generated output. - Update `cleanDocumentation` and `formatPreContent` methods for better handling of structured content and documentation formatting. - Revise `introduction.md` and `SUMMARY.md` to reflect updated terminology and improve organization of documentation sections. --- docs/book/src/SUMMARY.md | 93 ++++----- docs/book/src/introduction.md | 4 +- docs/erlang-literate-parser.js | 369 ++++++++++++++++++++++++++------- 3 files changed, 345 insertions(+), 121 deletions(-) diff --git a/docs/book/src/SUMMARY.md b/docs/book/src/SUMMARY.md index 93adc7dee..b2f5260c3 100644 --- a/docs/book/src/SUMMARY.md +++ b/docs/book/src/SUMMARY.md @@ -11,7 +11,7 @@ - [ar_tx](ar_tx.erl.md) - [ar_wallet](ar_wallet.erl.md) -# Device Framework +# Devices - [dev_apply](dev_apply.erl.md) - [dev_cache](dev_cache.erl.md) @@ -49,52 +49,51 @@ - [dev_codec_json](dev_codec_json.erl.md) - [dev_codec_structured](dev_codec_structured.erl.md) -# Core Services - -- [dev_delegated_compute](dev_delegated_compute.erl.md) -- [dev_genesis_wasm](dev_genesis_wasm.erl.md) -- [dev_green_zone](dev_green_zone.erl.md) -- [dev_hook](dev_hook.erl.md) -- [dev_hyperbuddy](dev_hyperbuddy.erl.md) -- [dev_json_iface](dev_json_iface.erl.md) -- [dev_local_name](dev_local_name.erl.md) -- [dev_lookup](dev_lookup.erl.md) -- [dev_lua](dev_lua.erl.md) -- [dev_lua_lib](dev_lua_lib.erl.md) -- [dev_lua_test](dev_lua_test.erl.md) -- [dev_lua_test_ledgers](dev_lua_test_ledgers.erl.md) -- [dev_manifest](dev_manifest.erl.md) -- [dev_message](dev_message.erl.md) -- [dev_meta](dev_meta.erl.md) -- [dev_name](dev_name.erl.md) -- [dev_node_process](dev_node_process.erl.md) -- [dev_p4](dev_p4.erl.md) -- [dev_patch](dev_patch.erl.md) -- [dev_poda](dev_poda.erl.md) -- [dev_process](dev_process.erl.md) -- [dev_process_cache](dev_process_cache.erl.md) -- [dev_process_worker](dev_process_worker.erl.md) -- [dev_profile](dev_profile.erl.md) -- [dev_push](dev_push.erl.md) -- [dev_query](dev_query.erl.md) -- [dev_query_arweave](dev_query_arweave.erl.md) -- [dev_query_graphql](dev_query_graphql.erl.md) -- [dev_query_test_vectors](dev_query_test_vectors.erl.md) -- [dev_relay](dev_relay.erl.md) -- [dev_router](dev_router.erl.md) -- [dev_scheduler](dev_scheduler.erl.md) -- [dev_scheduler_cache](dev_scheduler_cache.erl.md) -- [dev_scheduler_formats](dev_scheduler_formats.erl.md) -- [dev_scheduler_registry](dev_scheduler_registry.erl.md) -- [dev_scheduler_server](dev_scheduler_server.erl.md) -- [dev_simple_pay](dev_simple_pay.erl.md) -- [dev_snp](dev_snp.erl.md) -- [dev_snp_nif](dev_snp_nif.erl.md) -- [dev_stack](dev_stack.erl.md) -- [dev_volume](dev_volume.erl.md) -- [dev_wasi](dev_wasi.erl.md) -- [dev_wasm](dev_wasm.erl.md) -- [dev_whois](dev_whois.erl.md) + + - [dev_delegated_compute](dev_delegated_compute.erl.md) + - [dev_genesis_wasm](dev_genesis_wasm.erl.md) + - [dev_green_zone](dev_green_zone.erl.md) + - [dev_hook](dev_hook.erl.md) + - [dev_hyperbuddy](dev_hyperbuddy.erl.md) + - [dev_json_iface](dev_json_iface.erl.md) + - [dev_local_name](dev_local_name.erl.md) + - [dev_lookup](dev_lookup.erl.md) + - [dev_lua](dev_lua.erl.md) + - [dev_lua_lib](dev_lua_lib.erl.md) + - [dev_lua_test](dev_lua_test.erl.md) + - [dev_lua_test_ledgers](dev_lua_test_ledgers.erl.md) + - [dev_manifest](dev_manifest.erl.md) + - [dev_message](dev_message.erl.md) + - [dev_meta](dev_meta.erl.md) + - [dev_name](dev_name.erl.md) + - [dev_node_process](dev_node_process.erl.md) + - [dev_p4](dev_p4.erl.md) + - [dev_patch](dev_patch.erl.md) + - [dev_poda](dev_poda.erl.md) + - [dev_process](dev_process.erl.md) + - [dev_process_cache](dev_process_cache.erl.md) + - [dev_process_worker](dev_process_worker.erl.md) + - [dev_profile](dev_profile.erl.md) + - [dev_push](dev_push.erl.md) + - [dev_query](dev_query.erl.md) + - [dev_query_arweave](dev_query_arweave.erl.md) + - [dev_query_graphql](dev_query_graphql.erl.md) + - [dev_query_test_vectors](dev_query_test_vectors.erl.md) + - [dev_relay](dev_relay.erl.md) + - [dev_router](dev_router.erl.md) + - [dev_scheduler](dev_scheduler.erl.md) + - [dev_scheduler_cache](dev_scheduler_cache.erl.md) + - [dev_scheduler_formats](dev_scheduler_formats.erl.md) + - [dev_scheduler_registry](dev_scheduler_registry.erl.md) + - [dev_scheduler_server](dev_scheduler_server.erl.md) + - [dev_simple_pay](dev_simple_pay.erl.md) + - [dev_snp](dev_snp.erl.md) + - [dev_snp_nif](dev_snp_nif.erl.md) + - [dev_stack](dev_stack.erl.md) + - [dev_volume](dev_volume.erl.md) + - [dev_wasi](dev_wasi.erl.md) + - [dev_wasm](dev_wasm.erl.md) + - [dev_whois](dev_whois.erl.md) # HyperBEAM Core diff --git a/docs/book/src/introduction.md b/docs/book/src/introduction.md index 396cbdc09..7603aba13 100644 --- a/docs/book/src/introduction.md +++ b/docs/book/src/introduction.md @@ -16,9 +16,9 @@ This documentation combines source code with comprehensive explanations using a Use the sidebar to browse modules organized by category: - **Arweave Foundation**: Core Arweave protocol implementations -- **Device Framework**: AO device implementations and utilities +- **Devices**: AO device implementations and utilities - **Codec Modules**: Data encoding/decoding functionality -- **Core Services**: Essential HyperBEAM services and components + (Core Services grouped under Devices) - **HyperBEAM Core**: Foundation modules and utilities ## About HyperBEAM diff --git a/docs/erlang-literate-parser.js b/docs/erlang-literate-parser.js index 32506209b..82cf91ea3 100755 --- a/docs/erlang-literate-parser.js +++ b/docs/erlang-literate-parser.js @@ -37,7 +37,8 @@ class ErlangLiterateParser { pendingDoc: '', specFunctionName: '', braceDepth: 0, - parenDepth: 0 + parenDepth: 0, + inlineDocTags: [] }; } @@ -86,10 +87,11 @@ class ErlangLiterateParser { let docLine = trimmed.substring(3).trim(); // Remove @doc if present docLine = docLine.replace(/^@doc\s*/, ''); - if (docLine) moduleDoc.push(docLine); + // Always push the line, even if empty (for paragraph breaks) + moduleDoc.push(docLine); } else if (inModuleDoc && trimmed === '') { - // Empty line in module doc, continue - continue; + // Empty line in module doc, preserve it for paragraph breaks + moduleDoc.push(''); } else if (inModuleDoc && trimmed.startsWith('%')) { // Continue with other comment types but end module doc inModuleDoc = false; @@ -116,6 +118,8 @@ class ErlangLiterateParser { // Check for -spec if (trimmed.startsWith('-spec ')) { + // Before collecting the spec, check if there are @param/@returns tags immediately before + this.collectParamTagsBeforeSpec(i); this.collectSpec(i); // Extract function name from spec line const specMatch = trimmed.match(/-spec\s+([a-z][a-z0-9_]*)\s*\(/); @@ -207,6 +211,46 @@ class ErlangLiterateParser { this.currentState.pendingDoc = docLines.join('\n'); } + collectParamTagsBeforeSpec(specIdx) { + const paramLines = []; + let hitDocBlock = false; + + // Look backwards from the -spec line to find @param and @returns tags + for (let i = specIdx - 1; i >= 0; i--) { + const line = this.lines[i]; + const trimmed = line.trim(); + + // If we hit a @doc line, we already collected this documentation + if (trimmed.startsWith('%% @doc')) { + hitDocBlock = true; + break; + } + + // If we hit an empty line or a non-comment line, stop looking backwards + if (trimmed === '' || (!trimmed.startsWith('%%') && !trimmed.startsWith('%'))) { + break; + } + + // Check if this line contains @param or @returns + if (trimmed.startsWith('%%') && (trimmed.includes('@param') || trimmed.includes('@returns'))) { + let docLine = trimmed.substring(2).trim(); + paramLines.unshift(docLine); // Add to beginning to maintain order + } + } + + // Only add the tags if we didn't find a @doc block (meaning these are standalone tags) + if (paramLines.length > 0 && !hitDocBlock) { + const existingDoc = this.currentState.pendingDoc || ''; + const newDoc = paramLines.join('\n'); + + if (existingDoc) { + this.currentState.pendingDoc = existingDoc + '\n' + newDoc; + } else { + this.currentState.pendingDoc = newDoc; + } + } + } + collectSpec(startIdx) { const specLines = []; let depth = 0; @@ -260,6 +304,7 @@ class ErlangLiterateParser { this.currentState.functionLines = []; this.currentState.braceDepth = 0; this.currentState.parenDepth = 0; + this.currentState.inlineDocTags = []; } collectFunctionLine(line, lineIdx) { @@ -306,25 +351,58 @@ class ErlangLiterateParser { this.currentState.functionDoc = ''; this.currentState.functionLines = []; this.currentState.specFunctionName = ''; + this.currentState.inlineDocTags = []; } processFunctionBody(lines) { const segments = []; let currentCode = []; - let inCodeBlock = false; + let pendingTagLines = []; + + const flushCode = () => { + if (currentCode.length > 0) { + segments.push({ type: 'code', content: currentCode.join('\n') }); + currentCode = []; + } + }; + + const flushTags = () => { + if (pendingTagLines.length > 0) { + const tagText = pendingTagLines.join('\n'); + const parsed = this.parseDocumentation(tagText); + let docParts = []; + if (parsed.params.length > 0) { + docParts.push('### Parameters'); + docParts.push(''); + for (const p of parsed.params) { + const desc = this.cleanDocumentation(p.description || ''); + docParts.push(`- \`${p.name}\` - ${desc}`); + } + docParts.push(''); + } + if (parsed.returns.length > 0) { + docParts.push('### Returns'); + docParts.push(''); + for (const r of parsed.returns) { + docParts.push(`- ${this.cleanDocumentation(r)}`); + } + docParts.push(''); + } + if (docParts.length > 0) { + segments.push({ type: 'doc', content: docParts.join('\n') }); + } + pendingTagLines = []; + } + }; for (const line of lines) { const trimmed = line.trim(); - // Check for inline comment (% or %% but not %%%) if (trimmed.match(/^\s*%[^%]/) || trimmed.match(/^\s*%%[^%]/)) { - // Save any accumulated code + // It's a comment line + // Save any accumulated code block first if (currentCode.length > 0) { - segments.push({ - type: 'code', - content: currentCode.join('\n') - }); - currentCode = []; + flushCode(); } // Extract comment text (remove % or %% prefix) @@ -334,23 +412,35 @@ class ErlangLiterateParser { } else { commentText = line.replace(/^\s*%\s?/, ''); } - segments.push({ - type: 'comment', - content: this.cleanInlineComment(commentText) - }); + const cleaned = this.cleanInlineComment(commentText); + + // Heuristic: returns-like lines (e.g., `{ok, Binary}` / `{error, Binary}` ...) + const returnsLikeTuple = /^`?\{[^}]+\}`?/.test(cleaned); + const returnsLikeAtom = /^`?(ok|error|not_found|true|false)\b/i.test(cleaned); + const isTagParam = /^\s*@param\b/i.test(cleaned); + const isTagReturns = /^\s*@returns?\b/i.test(cleaned); + + if (isTagParam || isTagReturns || returnsLikeTuple || returnsLikeAtom) { + const lineAsTag = isTagParam || isTagReturns + ? cleaned.trim() + : `@returns ${cleaned.trim()}`; + // Accumulate tag lines + pendingTagLines.push(lineAsTag); + } else { + // Flush any pending tag block before emitting a normal comment + flushTags(); + segments.push({ type: 'comment', content: cleaned }); + } } else { - // Regular code line + // Non-comment code line; flush any pending tags first, then add code + flushTags(); currentCode.push(line); } } - // Add any remaining code - if (currentCode.length > 0) { - segments.push({ - type: 'code', - content: currentCode.join('\n') - }); - } + // Flush any remaining tag or code blocks + flushTags(); + flushCode(); return segments; } @@ -363,13 +453,102 @@ class ErlangLiterateParser { cleanDocumentation(text) { if (!text) return ''; + // Handle
 tags with structured content
+        text = text.replace(/
([\s\S]*?)<\/pre>/g, (match, content) => {
+            return this.formatPreContent(content);
+        });
+
         // Convert Erlang doc syntax to Markdown
         return text
             .replace(/`([^']*?)'/g, '`$1`')  // Convert `code' to `code`
             .replace(/<</g, '<<')       // Fix HTML entities
             .replace(/>>/g, '>>')
             .replace(/@doc\s*/g, '')          // Remove @doc tags
-            .trim();
+            .replace(/\n\s*\n\s*\n/g, '\n\n')   // Normalize multiple empty lines to double newlines
+            .replace(/[ \t]+$/gm, '')               // Trim trailing spaces per line
+            .replace(/^\s+|\s+$/g, '');            // Final trim
+    }
+
+    formatPreContent(content) {
+        // First, let's look at the actual structure of the content more carefully
+        // The issue is that definitions span multiple lines with varying indentation
+
+        const lines = content.trim().split('\n');
+        const formatted = [];
+
+        let i = 0;
+        while (i < lines.length) {
+            const line = lines[i].trim();
+
+            if (!line) {
+                i++;
+                continue;
+            }
+
+            // Look for definition pattern: starts with word(s), colon, then description
+            // Pattern: "DevMod:ExportedFunc : Description" or "info/exports : Description"
+            const defMatch = line.match(/^(\S+(?:\s*:\s*\S+)?)\s*:\s*(.*)$/);
+
+            if (defMatch) {
+                const [, term, initialDesc] = defMatch;
+                let fullDescription = initialDesc.trim();
+
+                // Collect continuation lines for this definition
+                let j = i + 1;
+                while (j < lines.length) {
+                    const nextLine = lines[j];
+
+                    // Empty line - check if there's more content
+                    if (!nextLine.trim()) {
+                        j++;
+                        continue;
+                    }
+
+                    // If it looks like a new definition, stop
+                    if (nextLine.trim().match(/^\S+(?:\s*:\s*\S+)?\s*:\s*/)) {
+                        break;
+                    }
+
+                    // This is a continuation line - add it to the description
+                    if (nextLine.trim()) {
+                        fullDescription += ' ' + nextLine.trim();
+                    }
+                    j++;
+                }
+
+                // Format the definition
+                formatted.push('');
+                formatted.push(`**${term.trim()}**`);
+                formatted.push('');
+                formatted.push(fullDescription);
+
+                i = j; // Move to the next unprocessed line
+            } else {
+                // Not a definition - handle as regular content
+                if (line.toLowerCase().includes('hyperbeam') && line.includes('options')) {
+                    formatted.push('');
+                    formatted.push(`### ${line}`);
+                    formatted.push('');
+                } else if (line.match(/^`[^`]+`\s*:/)) {
+                    // Special case for option definitions like `update_hashpath`:
+                    const optMatch = line.match(/^(`[^`]+`)\s*:\s*(.*)$/);
+                    if (optMatch) {
+                        const [, optName, optDesc] = optMatch;
+                        formatted.push('');
+                        formatted.push(`**${optName}**`);
+                        formatted.push('');
+                        formatted.push(optDesc);
+                    } else {
+                        formatted.push(line);
+                    }
+                } else {
+                    formatted.push(line);
+                }
+                i++;
+            }
+        }
+
+        return formatted.join('\n');
     }
 
     parseDocumentation(docText) {
@@ -460,78 +639,124 @@ class ErlangLiterateParser {
             md.push('');
         }
 
+        // Group functions by name to merge overloaded functions
+        const groupedFunctions = this.groupFunctionsByName(this.functions);
+
         // Functions
-        for (const func of this.functions) {
-            md.push(`## ${func.name}`);
+        for (const group of groupedFunctions) {
+            md.push(`## ${group.name}`);
             md.push('');
 
-            // Parse and format documentation
+            // Combine documentation from all functions in the group
+            const combinedDoc = this.combineFunctionDocs(group.functions);
+            if (combinedDoc) {
+                md.push(combinedDoc);
+                md.push('');
+            }
+
+            // Add all specs and bodies for the function group
+            for (const func of group.functions) {
+                // Spec
+                if (func.spec) {
+                    md.push('```erlang');
+                    md.push(func.spec.trim());
+                    md.push('```');
+                    md.push('');
+                }
+
+                // Function body with inline comments
+                if (func.body && func.body.length > 0) {
+                    md.push('');
+
+                    for (const segment of func.body) {
+                        if (segment.type === 'comment') {
+                            md.push(segment.content);
+                            md.push('');
+                        } else if (segment.type === 'doc') {
+                            // Insert structured params/returns adjacent to the preceding code
+                            md.push(segment.content);
+                            md.push('');
+                        } else if (segment.type === 'code') {
+                            md.push('```erlang');
+                            md.push(segment.content.trim());
+                            md.push('```');
+                            md.push('');
+                        }
+                    }
+                }
+            }
+
+            md.push('');
+        }
+
+        // Footer
+        md.push('---');
+        md.push('');
+        md.push(`*Generated from [${fileName}](${githubUrl})*`);
+
+        return md.join('\n');
+    }
+
+    groupFunctionsByName(functions) {
+        const groups = [];
+        let currentGroup = null;
+
+        for (const func of functions) {
+            if (!currentGroup || currentGroup.name !== func.name) {
+                // Start a new group
+                currentGroup = {
+                    name: func.name,
+                    functions: [func]
+                };
+                groups.push(currentGroup);
+            } else {
+                // Add to current group
+                currentGroup.functions.push(func);
+            }
+        }
+
+        return groups;
+    }
+
+    combineFunctionDocs(functions) {
+        // Use the documentation from the first function that has it
+        // In practice, usually only the first clause of an overloaded function has detailed docs
+        for (const func of functions) {
             if (func.doc) {
                 const parsed = this.parseDocumentation(func.doc);
+                let combinedDoc = [];
 
                 // Description
                 if (parsed.description.length > 0) {
-                    md.push(this.cleanDocumentation(parsed.description.join(' ')));
-                    md.push('');
+                    combinedDoc.push(this.cleanDocumentation(parsed.description.join('\n')));
+                    combinedDoc.push('');
                 }
 
                 // Parameters
                 if (parsed.params.length > 0) {
-                    md.push('### Parameters');
-                    md.push('');
+                    combinedDoc.push('### Parameters');
+                    combinedDoc.push('');
                     for (const param of parsed.params) {
                         const desc = this.cleanDocumentation(param.description);
-                        md.push(`- \`${param.name}\` - ${desc}`);
+                        combinedDoc.push(`- \`${param.name}\` - ${desc}`);
                     }
-                    md.push('');
+                    combinedDoc.push('');
                 }
 
                 // Returns
                 if (parsed.returns.length > 0) {
-                    md.push('### Returns');
-                    md.push('');
+                    combinedDoc.push('### Returns');
+                    combinedDoc.push('');
                     for (const ret of parsed.returns) {
-                        md.push(`- ${this.cleanDocumentation(ret)}`);
+                        combinedDoc.push(`- ${this.cleanDocumentation(ret)}`);
                     }
-                    md.push('');
+                    combinedDoc.push('');
                 }
-            }
-
-            // Spec
-            if (func.spec) {
-                md.push('```erlang');
-                md.push(func.spec.trim());
-                md.push('```');
-                md.push('');
-            }
-
-            // Function body with inline comments
-            if (func.body && func.body.length > 0) {
-                // md.push('### Function');
-                md.push('');
 
-                for (const segment of func.body) {
-                    if (segment.type === 'comment') {
-                        md.push(segment.content);
-                        md.push('');
-                    } else if (segment.type === 'code') {
-                        md.push('```erlang');
-                        md.push(segment.content.trim());
-                        md.push('```');
-                        md.push('');
-                    }
-                }
+                return combinedDoc.join('\n');
             }
-
-            md.push('');
         }
-
-        // Footer
-        md.push('---');
-        md.push('');
-        md.push(`*Generated from [${fileName}](${githubUrl})*`);
-
-        return md.join('\n');
+        return null;
     }
 }
 

From 621269882ebbd0cefa95f3a5f94e4928f9d08055 Mon Sep 17 00:00:00 2001
From: Dylan Shade <63427984+dpshade@users.noreply.github.com>
Date: Fri, 19 Sep 2025 16:57:52 -0400
Subject: [PATCH 14/17] docs: Refactor Erlang literate parser for improved
 return formatting and documentation handling

- Update return processing to utilize `splitReturnsIntoOutcomes` and `formatReturnsText` for better output formatting.
- Enhance handling of continuation lines in return and parameter sections to maintain clarity in documentation.
- Introduce `reflowNumberedLists` method to ensure proper formatting of numbered lists in generated documentation.
---
 docs/erlang-literate-parser.js | 154 +++++++++++++++++++++++++++++++--
 1 file changed, 146 insertions(+), 8 deletions(-)

diff --git a/docs/erlang-literate-parser.js b/docs/erlang-literate-parser.js
index 82cf91ea3..5f276307c 100755
--- a/docs/erlang-literate-parser.js
+++ b/docs/erlang-literate-parser.js
@@ -383,8 +383,9 @@ class ErlangLiterateParser {
                 if (parsed.returns.length > 0) {
                     docParts.push('### Returns');
                     docParts.push('');
-                    for (const r of parsed.returns) {
-                        docParts.push(`- ${this.cleanDocumentation(r)}`);
+                    const expanded = parsed.returns.flatMap(r => this.splitReturnsIntoOutcomes(r));
+                    for (const r of expanded) {
+                        docParts.push(`- ${this.formatReturnsText(r)}`);
                     }
                     docParts.push('');
                 }
@@ -426,6 +427,13 @@ class ErlangLiterateParser {
                         : `@returns ${cleaned.trim()}`;
                     // Accumulate tag lines
                     pendingTagLines.push(lineAsTag);
+                } else if (
+                    pendingTagLines.length > 0 &&
+                    (pendingTagLines[pendingTagLines.length - 1].startsWith('@returns') ||
+                     pendingTagLines[pendingTagLines.length - 1].startsWith('@param'))
+                ) {
+                    // Continuation of the previous @returns/@param block; append line
+                    pendingTagLines.push(cleaned.trim());
                 } else {
                     // Flush any pending tag block before emitting a normal comment
                     flushTags();
@@ -459,7 +467,7 @@ class ErlangLiterateParser {
         });
 
         // Convert Erlang doc syntax to Markdown
-        return text
+        let cleaned = text
             .replace(/`([^']*?)'/g, '`$1`')  // Convert `code' to `code`
             .replace(/<</g, '<<')       // Fix HTML entities
             .replace(/>>/g, '>>')
@@ -467,6 +475,118 @@ class ErlangLiterateParser {
             .replace(/\n\s*\n\s*\n/g, '\n\n')   // Normalize multiple empty lines to double newlines
             .replace(/[ \t]+$/gm, '')               // Trim trailing spaces per line
             .replace(/^\s+|\s+$/g, '');            // Final trim
+
+        // Reflow numbered lists and ensure separation from following headings/labels
+        cleaned = this.reflowNumberedLists(cleaned);
+
+        return cleaned;
+    }
+
+    formatReturnsText(text) {
+        if (!text) return '';
+        // First, clean the documentation text
+        let result = this.cleanDocumentation(text);
+
+        // Wrap leading return token if it's a tuple/list or common atom
+        const leadingMatch = result.match(/^(\s*)(\{[^}]+\}|\[[^\]]+\]|ok|error|not_found|true|false)(\b|\s|$)/i);
+        if (leadingMatch) {
+            const [, leadSpace, token, trail] = leadingMatch;
+            result = leadSpace + '`' + token + '`' + result.slice(leadSpace.length + token.length);
+        }
+
+        // Wrap any standalone tuple occurrences not already inside backticks
+        result = result.replace(/(^|[^`])(\{[^}]+\})([^`]|$)/g, (m, pre, tuple, post) => {
+            return `${pre}\`${tuple}\`${post}`;
+        });
+
+        return result;
+    }
+
+    splitReturnsIntoOutcomes(text) {
+        if (!text) return [];
+        const s = this.cleanDocumentation(text);
+        const tokenRegex = /(\{[^}]+\}|\bok\b|\berror\b|\bnot_found\b|\btrue\b|\bfalse\b)/gi;
+        const parts = [];
+        let match;
+        const matches = [];
+        while ((match = tokenRegex.exec(s)) !== null) {
+            matches.push({ index: match.index, token: match[0] });
+        }
+        // If no tokens or prose exists before the first token, don't split; keep as one descriptive line
+        if (matches.length === 0 || (matches.length > 0 && matches[0].index > 0)) {
+            return [s.trim()];
+        }
+        for (let i = 0; i < matches.length; i++) {
+            const start = matches[i].index;
+            const nextStart = (i + 1 < matches.length) ? matches[i + 1].index : s.length;
+            let segment = s.slice(start, nextStart).trim();
+            // Remove leading commas that were used as separators
+            segment = segment.replace(/^,\s*/, '');
+            // If there's trailing comma before next token, trim it but keep sentence end
+            segment = segment.replace(/,\s*$/, '');
+            if (segment) parts.push(segment.trim());
+        }
+        // If we accidentally merged two outcomes without clear token boundaries, ensure uniqueness
+        return parts.filter(p => p.length > 0);
+    }
+
+    reflowNumberedLists(text) {
+        if (!text) return '';
+        const lines = text.split('\n');
+        const out = [];
+        let inNumbered = false;
+        let lastNumIndex = -1;
+        for (let i = 0; i < lines.length; i++) {
+            const raw = lines[i];
+            const trimmed = raw.trim();
+
+            const isNumbered = /^\d+\.\s/.test(trimmed);
+            const isBullet = /^[-*]\s/.test(trimmed);
+            const isHeading = /^#{1,6}\s/.test(trimmed);
+            const isCodeFence = /^```/.test(trimmed);
+
+            if (isNumbered) {
+                out.push(trimmed);
+                inNumbered = true;
+                lastNumIndex = out.length - 1;
+                continue;
+            }
+
+            if (inNumbered) {
+                if (trimmed === '') {
+                    out.push('');
+                    inNumbered = false;
+                    lastNumIndex = -1;
+                    continue;
+                }
+                if (!isNumbered && !isBullet && !isHeading && !isCodeFence) {
+                    // Continuation of previous numbered item; append
+                    out[lastNumIndex] = out[lastNumIndex] + ' ' + trimmed;
+                    continue;
+                }
+                // Different kind of line; end numbered block and fall through
+                inNumbered = false;
+                lastNumIndex = -1;
+            }
+
+            out.push(raw);
+        }
+
+        // Ensure a blank line between last numbered item and a label/heading line like 'Config options ...:'
+        const separated = [];
+        for (let i = 0; i < out.length; i++) {
+            const cur = out[i];
+            const next = i + 1 < out.length ? out[i + 1] : '';
+            separated.push(cur);
+            if (/^\d+\.\s/.test(cur.trim()) && next && !/^\s*$/.test(next) && /:\s*$/.test(next.trim())) {
+                // Insert a blank line if not already present
+                if (separated[separated.length - 1] !== '') {
+                    separated.push('');
+                }
+            }
+        }
+
+        return separated.join('\n');
     }
 
     formatPreContent(content) {
@@ -561,6 +681,7 @@ class ErlangLiterateParser {
 
         let currentSection = 'description';
         let currentParam = null;
+        let lastReturnIndex = -1;
 
         for (const line of lines) {
             const trimmed = line.trim();
@@ -587,17 +708,33 @@ class ErlangLiterateParser {
                 }
                 const returnsText = trimmed.replace(/^@returns?\s*/, '');
                 result.returns.push(returnsText);
+                lastReturnIndex = result.returns.length - 1;
                 currentSection = 'returns';
                 continue;
             }
 
             // Add to current section
-            if (currentSection === 'description' && trimmed) {
-                result.description.push(trimmed);
+            if (currentSection === 'description') {
+                if (trimmed) {
+                    result.description.push(trimmed);
+                } else {
+                    // Preserve a single blank line to break paragraphs/lists
+                    const last = result.description[result.description.length - 1];
+                    if (last !== '') {
+                        result.description.push('');
+                    }
+                }
             } else if (currentSection === 'param' && currentParam && trimmed) {
                 currentParam.description += ' ' + trimmed;
             } else if (currentSection === 'returns' && trimmed) {
-                result.returns.push(trimmed);
+                if (lastReturnIndex >= 0) {
+                    // Append continuation text to the last returns entry
+                    result.returns[lastReturnIndex] =
+                        (result.returns[lastReturnIndex] + ' ' + trimmed).replace(/\s+/g, ' ').trim();
+                } else {
+                    result.returns.push(trimmed);
+                    lastReturnIndex = result.returns.length - 1;
+                }
             }
         }
 
@@ -747,8 +884,9 @@ class ErlangLiterateParser {
                 if (parsed.returns.length > 0) {
                     combinedDoc.push('### Returns');
                     combinedDoc.push('');
-                    for (const ret of parsed.returns) {
-                        combinedDoc.push(`- ${this.cleanDocumentation(ret)}`);
+                    const expanded = parsed.returns.flatMap(r => this.splitReturnsIntoOutcomes(r));
+                    for (const ret of expanded) {
+                        combinedDoc.push(`- ${this.formatReturnsText(ret)}`);
                     }
                     combinedDoc.push('');
                 }

From 1dd667e6516b7b9f65ffb612a47c47bf5bb220a6 Mon Sep 17 00:00:00 2001
From: Dylan Shade <63427984+dpshade@users.noreply.github.com>
Date: Fri, 19 Sep 2025 17:27:39 -0400
Subject: [PATCH 15/17] docs: Refactor Erlang literate parser for improved
 performance and documentation handling

- Introduce character codes and string constants for faster comparisons and reduced memory allocations.
- Optimize regex patterns for performance and clarity in parsing Erlang files.
- Enhance the reset method to initialize module information with default values.
- Update function processing to utilize constants for improved readability and maintainability.
- Revise documentation generation methods to ensure consistent formatting and structure in output.
---
 docs/book/src/SUMMARY.md       | 126 +++---
 docs/erlang-literate-parser.js | 773 ++++++++++++++++-----------------
 2 files changed, 429 insertions(+), 470 deletions(-)

diff --git a/docs/book/src/SUMMARY.md b/docs/book/src/SUMMARY.md
index b2f5260c3..64394767e 100644
--- a/docs/book/src/SUMMARY.md
+++ b/docs/book/src/SUMMARY.md
@@ -11,6 +11,24 @@
 - [ar_tx](ar_tx.erl.md)
 - [ar_wallet](ar_wallet.erl.md)
 
+# Codec Modules
+
+- [dev_codec_ans104](dev_codec_ans104.erl.md)
+- [dev_codec_ans104_from](dev_codec_ans104_from.erl.md)
+- [dev_codec_ans104_to](dev_codec_ans104_to.erl.md)
+- [dev_codec_cookie](dev_codec_cookie.erl.md)
+- [dev_codec_cookie_auth](dev_codec_cookie_auth.erl.md)
+- [dev_codec_cookie_test_vectors](dev_codec_cookie_test_vectors.erl.md)
+- [dev_codec_flat](dev_codec_flat.erl.md)
+- [dev_codec_http_auth](dev_codec_http_auth.erl.md)
+- [dev_codec_httpsig](dev_codec_httpsig.erl.md)
+- [dev_codec_httpsig_conv](dev_codec_httpsig_conv.erl.md)
+- [dev_codec_httpsig_keyid](dev_codec_httpsig_keyid.erl.md)
+- [dev_codec_httpsig_proxy](dev_codec_httpsig_proxy.erl.md)
+- [dev_codec_httpsig_siginfo](dev_codec_httpsig_siginfo.erl.md)
+- [dev_codec_json](dev_codec_json.erl.md)
+- [dev_codec_structured](dev_codec_structured.erl.md)
+
 # Devices
 
 - [dev_apply](dev_apply.erl.md)
@@ -30,70 +48,50 @@
 - [dev_arweave_block_cache](dev_arweave_block_cache.erl.md)
 - [dev_auth_hook](dev_auth_hook.erl.md)
 - [dev_secret](dev_secret.erl.md)
-
-# Codec Modules
-
-- [dev_codec_ans104](dev_codec_ans104.erl.md)
-- [dev_codec_ans104_from](dev_codec_ans104_from.erl.md)
-- [dev_codec_ans104_to](dev_codec_ans104_to.erl.md)
-- [dev_codec_cookie](dev_codec_cookie.erl.md)
-- [dev_codec_cookie_auth](dev_codec_cookie_auth.erl.md)
-- [dev_codec_cookie_test_vectors](dev_codec_cookie_test_vectors.erl.md)
-- [dev_codec_flat](dev_codec_flat.erl.md)
-- [dev_codec_http_auth](dev_codec_http_auth.erl.md)
-- [dev_codec_httpsig](dev_codec_httpsig.erl.md)
-- [dev_codec_httpsig_conv](dev_codec_httpsig_conv.erl.md)
-- [dev_codec_httpsig_keyid](dev_codec_httpsig_keyid.erl.md)
-- [dev_codec_httpsig_proxy](dev_codec_httpsig_proxy.erl.md)
-- [dev_codec_httpsig_siginfo](dev_codec_httpsig_siginfo.erl.md)
-- [dev_codec_json](dev_codec_json.erl.md)
-- [dev_codec_structured](dev_codec_structured.erl.md)
-
- 
- - [dev_delegated_compute](dev_delegated_compute.erl.md)
- - [dev_genesis_wasm](dev_genesis_wasm.erl.md)
- - [dev_green_zone](dev_green_zone.erl.md)
- - [dev_hook](dev_hook.erl.md)
- - [dev_hyperbuddy](dev_hyperbuddy.erl.md)
- - [dev_json_iface](dev_json_iface.erl.md)
- - [dev_local_name](dev_local_name.erl.md)
- - [dev_lookup](dev_lookup.erl.md)
- - [dev_lua](dev_lua.erl.md)
- - [dev_lua_lib](dev_lua_lib.erl.md)
- - [dev_lua_test](dev_lua_test.erl.md)
- - [dev_lua_test_ledgers](dev_lua_test_ledgers.erl.md)
- - [dev_manifest](dev_manifest.erl.md)
- - [dev_message](dev_message.erl.md)
- - [dev_meta](dev_meta.erl.md)
- - [dev_name](dev_name.erl.md)
- - [dev_node_process](dev_node_process.erl.md)
- - [dev_p4](dev_p4.erl.md)
- - [dev_patch](dev_patch.erl.md)
- - [dev_poda](dev_poda.erl.md)
- - [dev_process](dev_process.erl.md)
- - [dev_process_cache](dev_process_cache.erl.md)
- - [dev_process_worker](dev_process_worker.erl.md)
- - [dev_profile](dev_profile.erl.md)
- - [dev_push](dev_push.erl.md)
- - [dev_query](dev_query.erl.md)
- - [dev_query_arweave](dev_query_arweave.erl.md)
- - [dev_query_graphql](dev_query_graphql.erl.md)
- - [dev_query_test_vectors](dev_query_test_vectors.erl.md)
- - [dev_relay](dev_relay.erl.md)
- - [dev_router](dev_router.erl.md)
- - [dev_scheduler](dev_scheduler.erl.md)
- - [dev_scheduler_cache](dev_scheduler_cache.erl.md)
- - [dev_scheduler_formats](dev_scheduler_formats.erl.md)
- - [dev_scheduler_registry](dev_scheduler_registry.erl.md)
- - [dev_scheduler_server](dev_scheduler_server.erl.md)
- - [dev_simple_pay](dev_simple_pay.erl.md)
- - [dev_snp](dev_snp.erl.md)
- - [dev_snp_nif](dev_snp_nif.erl.md)
- - [dev_stack](dev_stack.erl.md)
- - [dev_volume](dev_volume.erl.md)
- - [dev_wasi](dev_wasi.erl.md)
- - [dev_wasm](dev_wasm.erl.md)
- - [dev_whois](dev_whois.erl.md)
+- [dev_delegated_compute](dev_delegated_compute.erl.md)
+- [dev_genesis_wasm](dev_genesis_wasm.erl.md)
+- [dev_green_zone](dev_green_zone.erl.md)
+- [dev_hook](dev_hook.erl.md)
+- [dev_hyperbuddy](dev_hyperbuddy.erl.md)
+- [dev_json_iface](dev_json_iface.erl.md)
+- [dev_local_name](dev_local_name.erl.md)
+- [dev_lookup](dev_lookup.erl.md)
+- [dev_lua](dev_lua.erl.md)
+- [dev_lua_lib](dev_lua_lib.erl.md)
+- [dev_lua_test](dev_lua_test.erl.md)
+- [dev_lua_test_ledgers](dev_lua_test_ledgers.erl.md)
+- [dev_manifest](dev_manifest.erl.md)
+- [dev_message](dev_message.erl.md)
+- [dev_meta](dev_meta.erl.md)
+- [dev_name](dev_name.erl.md)
+- [dev_node_process](dev_node_process.erl.md)
+- [dev_p4](dev_p4.erl.md)
+- [dev_patch](dev_patch.erl.md)
+- [dev_poda](dev_poda.erl.md)
+- [dev_process](dev_process.erl.md)
+- [dev_process_cache](dev_process_cache.erl.md)
+- [dev_process_worker](dev_process_worker.erl.md)
+- [dev_profile](dev_profile.erl.md)
+- [dev_push](dev_push.erl.md)
+- [dev_query](dev_query.erl.md)
+- [dev_query_arweave](dev_query_arweave.erl.md)
+- [dev_query_graphql](dev_query_graphql.erl.md)
+- [dev_query_test_vectors](dev_query_test_vectors.erl.md)
+- [dev_relay](dev_relay.erl.md)
+- [dev_router](dev_router.erl.md)
+- [dev_scheduler](dev_scheduler.erl.md)
+- [dev_scheduler_cache](dev_scheduler_cache.erl.md)
+- [dev_scheduler_formats](dev_scheduler_formats.erl.md)
+- [dev_scheduler_registry](dev_scheduler_registry.erl.md)
+- [dev_scheduler_server](dev_scheduler_server.erl.md)
+- [dev_simple_pay](dev_simple_pay.erl.md)
+- [dev_snp](dev_snp.erl.md)
+- [dev_snp_nif](dev_snp_nif.erl.md)
+- [dev_stack](dev_stack.erl.md)
+- [dev_volume](dev_volume.erl.md)
+- [dev_wasi](dev_wasi.erl.md)
+- [dev_wasm](dev_wasm.erl.md)
+- [dev_whois](dev_whois.erl.md)
 
 # HyperBEAM Core
 
diff --git a/docs/erlang-literate-parser.js b/docs/erlang-literate-parser.js
index 5f276307c..56dfd914c 100755
--- a/docs/erlang-literate-parser.js
+++ b/docs/erlang-literate-parser.js
@@ -12,7 +12,79 @@
 
 import fs from 'fs';
 import path from 'path';
-import { execSync } from 'child_process';
+
+// Character codes for faster comparisons
+const CHAR_CODES = {
+    PERCENT: 37,        // '%'
+    DASH: 45,          // '-'
+    OPEN_PAREN: 40,    // '('
+    CLOSE_PAREN: 41,   // ')'
+    OPEN_BRACE: 123,   // '{'
+    CLOSE_BRACE: 125,  // '}'
+    OPEN_BRACKET: 91,  // '['
+    CLOSE_BRACKET: 93, // ']'
+    DOT: 46            // '.'
+};
+
+// String constants to avoid repeated allocations
+const STRINGS = {
+    EMPTY: '',
+    SPACE: ' ',
+    NEWLINE: '\n',
+    TRIPLE_PERCENT: '%%%',
+    DOUBLE_PERCENT: '%%',
+    SINGLE_PERCENT: '%',
+    SPEC_PREFIX: '-spec ',
+    ERLANG: 'erlang',
+    BACKTICK: '`',
+    PARAM_TAG: '@param',
+    RETURNS_TAG: '@returns',
+    PARAMETERS_HEADER: '### Parameters',
+    RETURNS_HEADER: '### Returns',
+    EXPORTED_FUNCTIONS: '## Exported Functions',
+    SEPARATOR: '---'
+};
+
+// Precompiled regex patterns for performance
+const REGEX = {
+    MODULE: /^-module\(([^)]+)\)/,
+    EXPORT: /^-export\(\[([^\]]+)\]\)/,
+    EXPORT_PREFIX: /^-export\(\[/,
+    SPEC: /^-spec\s+([a-z][a-z0-9_]*)\s*\(/,
+    FUNCTION: /^([a-z][a-z0-9_]*)\s*\(/,
+    DOC_BLOCK_START: /^%% @doc/,
+    COMMENT_SINGLE: /^\s*%[^%]/,
+    COMMENT_DOUBLE: /^\s*%%[^%]/,
+    BACKTICK_QUOTE: /`([^']*?)'/g,
+    HTML_ENTITIES_LT: /<</g,
+    HTML_ENTITIES_GT: />>/g,
+    PRE_TAG: /
([\s\S]*?)<\/pre>/g,
+    // Match numbered list items that start with either "1." or "1:" (allow optional space before the punctuation)
+    NUMBERED_LIST: /^\d+\s*[.:]\s/,
+    BULLET_LIST: /^[-*]\s/,
+    HEADING: /^#{1,6}\s/,
+    CODE_FENCE: /^```/,
+    RETURNS_TOKENS: /(\{[^}]+\}|\bok\b|\berror\b|\bnot_found\b|\btrue\b|\bfalse\b)/gi,
+    LEADING_RETURN_TOKEN: /^(\s*)(\{[^}]+\}|\[[^\]]+\]|ok|error|not_found|true|false)(\b|\s|$)/i,
+    STANDALONE_TUPLE: /(^|[^`])(\{[^}]+\})([^`]|$)/g,
+    PARAM: /^@param\s+(\S+)\s*(.*)/,
+    RETURNS: /^@returns?\s*/,
+    OPTION_DEF: /^(`[^`]+`)\s*:\s*(.*)$/,
+    DEFINITION: /^(\S+(?:\s*:\s*\S+)?)\s*:\s*(.*)$/,
+    MULTIPLE_NEWLINES: /\n\s*\n\s*\n/g,
+    TRAILING_SPACES: /[ \t]+$/gm,
+    TRIM: /^\s+|\s+$/g,
+    RETURNS_LIKE_TUPLE: /^`?\{[^}]+\}`?/,
+    RETURNS_LIKE_ATOM: /^`?(ok|error|not_found|true|false)\b/i,
+    REMOVE_DOC: /@doc\s*/g,
+    WHITESPACE_NORMALIZE: /\s+/g,
+    COMMA_START: /^,\s*/,
+    COMMA_END: /,\s*$/,
+    EMPTY_LINE: /^\s*$/,
+    COLON_END: /:\s*$/,
+    COMMENT_DOUBLE_PREFIX: /^\s*%%\s?/,
+    COMMENT_SINGLE_PREFIX: /^\s*%\s?/
+};
 
 class ErlangLiterateParser {
     constructor(options = {}) {
@@ -26,16 +98,16 @@ class ErlangLiterateParser {
 
     reset() {
         this.lines = [];
-        this.moduleInfo = {};
+        this.moduleInfo = { name: null, doc: null, exports: null };
         this.functions = [];
         this.currentState = {
             inFunction: false,
-            functionName: '',
-            functionSpec: '',
-            functionDoc: '',
+            functionName: STRINGS.EMPTY,
+            functionSpec: STRINGS.EMPTY,
+            functionDoc: STRINGS.EMPTY,
             functionLines: [],
-            pendingDoc: '',
-            specFunctionName: '',
+            pendingDoc: STRINGS.EMPTY,
+            specFunctionName: STRINGS.EMPTY,
             braceDepth: 0,
             parenDepth: 0,
             inlineDocTags: []
@@ -45,296 +117,248 @@ class ErlangLiterateParser {
     parseFile(filePath) {
         const content = fs.readFileSync(filePath, 'utf8');
         this.reset();
-        this.lines = content.split('\n');
+        this.lines = content.split(STRINGS.NEWLINE);
 
-        // Extract module-level information
         this.extractModuleInfo();
-
-        // Process all lines for functions
         this.processFunctions();
 
-        // Generate the markdown
         return this.generateMarkdown(path.basename(filePath));
     }
 
     extractModuleInfo() {
-        let moduleDoc = [];
+        const moduleDoc = [];
         let inModuleDoc = false;
+        const linesLength = this.lines.length;
 
-        for (let i = 0; i < this.lines.length; i++) {
+        for (let i = 0; i < linesLength; i++) {
             const line = this.lines[i];
             const trimmed = line.trim();
 
-            // Module name
-            if (trimmed.match(/^-module\(([^)]+)\)/)) {
-                this.moduleInfo.name = trimmed.match(/^-module\(([^)]+)\)/)[1];
+            if (!trimmed) {
+                if (inModuleDoc) moduleDoc.push(STRINGS.EMPTY);
+                continue;
+            }
+
+            // Fast character check before regex
+            const firstChar = trimmed.charCodeAt(0);
+
+            // Module name - check for dash first
+            if (firstChar === CHAR_CODES.DASH && trimmed.startsWith('-module(')) {
+                const moduleMatch = trimmed.match(REGEX.MODULE);
+                if (moduleMatch) {
+                    this.moduleInfo.name = moduleMatch[1];
+                }
+                continue;
             }
 
-            // Exports
-            if (trimmed.match(/^-export\(\[/)) {
-                const exportMatch = trimmed.match(/^-export\(\[([^\]]+)\]\)/);
+            // Exports - check for dash first
+            if (firstChar === CHAR_CODES.DASH && REGEX.EXPORT_PREFIX.test(trimmed)) {
+                const exportMatch = trimmed.match(REGEX.EXPORT);
                 if (exportMatch) {
                     this.moduleInfo.exports = exportMatch[1]
                         .split(',')
                         .map(e => e.trim())
-                        .filter(e => e);
+                        .filter(Boolean);
                 }
+                continue;
             }
 
-            // Module documentation (%%% comments at the top)
-            if (trimmed.startsWith('%%%')) {
+            // Module documentation - check for percent first
+            if (firstChar === CHAR_CODES.PERCENT && trimmed.startsWith(STRINGS.TRIPLE_PERCENT)) {
                 inModuleDoc = true;
                 let docLine = trimmed.substring(3).trim();
-                // Remove @doc if present
-                docLine = docLine.replace(/^@doc\s*/, '');
-                // Always push the line, even if empty (for paragraph breaks)
+                docLine = docLine.replace(REGEX.REMOVE_DOC, STRINGS.EMPTY);
                 moduleDoc.push(docLine);
-            } else if (inModuleDoc && trimmed === '') {
-                // Empty line in module doc, preserve it for paragraph breaks
-                moduleDoc.push('');
-            } else if (inModuleDoc && trimmed.startsWith('%')) {
-                // Continue with other comment types but end module doc
-                inModuleDoc = false;
-                break;
-            } else if (inModuleDoc && trimmed.startsWith('-')) {
-                // Hit a module directive, end of module doc
+            } else if (inModuleDoc && (firstChar === CHAR_CODES.PERCENT || firstChar === CHAR_CODES.DASH)) {
                 break;
             }
         }
 
-        this.moduleInfo.doc = this.cleanDocumentation(moduleDoc.join('\n'));
+        this.moduleInfo.doc = this.cleanDocumentation(moduleDoc.join(STRINGS.NEWLINE));
     }
 
     processFunctions() {
-        for (let i = 0; i < this.lines.length; i++) {
+        const linesLength = this.lines.length;
+
+        for (let i = 0; i < linesLength; i++) {
             const line = this.lines[i];
             const trimmed = line.trim();
 
+            if (!trimmed) continue;
+
             // Check for start of function documentation block
-            if (trimmed.startsWith('%% @doc')) {
+            if (REGEX.DOC_BLOCK_START.test(trimmed)) {
                 this.collectFunctionDoc(i);
                 continue;
             }
 
             // Check for -spec
-            if (trimmed.startsWith('-spec ')) {
-                // Before collecting the spec, check if there are @param/@returns tags immediately before
+            if (trimmed.startsWith(STRINGS.SPEC_PREFIX)) {
                 this.collectParamTagsBeforeSpec(i);
                 this.collectSpec(i);
-                // Extract function name from spec line
-                const specMatch = trimmed.match(/-spec\s+([a-z][a-z0-9_]*)\s*\(/);
+                const specMatch = trimmed.match(REGEX.SPEC);
                 if (specMatch) {
                     this.currentState.specFunctionName = specMatch[1];
                 }
                 continue;
             }
 
-            // Check for function start (handles both start-of-line and indented functions)
-            const funcMatch = trimmed.match(/^([a-z][a-z0-9_]*)\s*\(/);
+            // Check for function start
+            const funcMatch = trimmed.match(REGEX.FUNCTION);
             if (funcMatch && !this.currentState.inFunction) {
-                // Save any pending doc
                 if (this.currentState.pendingDoc) {
                     this.currentState.functionDoc = this.currentState.pendingDoc;
-                    this.currentState.pendingDoc = '';
+                    this.currentState.pendingDoc = STRINGS.EMPTY;
                 }
 
-                // Use function name from spec if available, otherwise use detected name
                 const functionName = this.currentState.specFunctionName || funcMatch[1];
-                this.startFunction(functionName, i);
-
-                // Clear the spec function name after use
-                this.currentState.specFunctionName = '';
+                this.startFunction(functionName);
+                this.currentState.specFunctionName = STRINGS.EMPTY;
             }
 
             // If in function, collect lines
             if (this.currentState.inFunction) {
-                this.collectFunctionLine(line, i);
-
-                // Check for function end
+                this.collectFunctionLine(line);
                 if (this.isFunctionEnd(line)) {
                     this.endFunction();
                 }
             }
         }
 
-        // Handle any remaining function
         if (this.currentState.inFunction) {
             this.endFunction();
         }
     }
 
-    isFunctionDoc(line) {
-        return line.startsWith('%% @doc') ||
-               (line.startsWith('%%') && !line.startsWith('%%%'));
-    }
-
     collectFunctionDoc(startIdx) {
         const docLines = [];
+        const linesLength = this.lines.length;
 
-        for (let i = startIdx; i < this.lines.length; i++) {
+        for (let i = startIdx; i < linesLength; i++) {
             const line = this.lines[i];
             const trimmed = line.trim();
 
-            if (trimmed.startsWith('%%')) {
+            if (trimmed.startsWith(STRINGS.DOUBLE_PERCENT)) {
                 let docLine = trimmed.substring(2).trim();
-                // Remove @doc prefix if on first line
                 if (i === startIdx) {
-                    docLine = docLine.replace(/^@doc\s*/, '');
+                    docLine = docLine.replace(REGEX.REMOVE_DOC, STRINGS.EMPTY);
                 }
                 docLines.push(docLine);
-            } else if (trimmed === '') {
-                // Empty line - might be part of doc block or end
-                // Look ahead to see if more %% comments follow
+            } else if (!trimmed) {
+                // Look ahead for more %% comments
                 let j = i + 1;
-                let foundMoreDoc = false;
-                while (j < this.lines.length && this.lines[j].trim() === '') {
-                    j++;
-                }
-                if (j < this.lines.length && this.lines[j].trim().startsWith('%%')) {
-                    // More doc coming, include empty line
-                    docLines.push('');
-                    foundMoreDoc = true;
-                }
-                if (!foundMoreDoc) {
-                    // End of doc block
+                while (j < linesLength && !this.lines[j].trim()) j++;
+
+                if (j < linesLength && this.lines[j].trim().startsWith(STRINGS.DOUBLE_PERCENT)) {
+                    docLines.push(STRINGS.EMPTY);
+                } else {
                     break;
                 }
-            } else if (trimmed.startsWith('%')) {
-                // Single % comment, continue but don't include
-                continue;
-            } else {
-                // End of doc block
+            } else if (!trimmed.startsWith(STRINGS.SINGLE_PERCENT)) {
                 break;
             }
         }
 
-        this.currentState.pendingDoc = docLines.join('\n');
+        this.currentState.pendingDoc = docLines.join(STRINGS.NEWLINE);
     }
 
     collectParamTagsBeforeSpec(specIdx) {
         const paramLines = [];
         let hitDocBlock = false;
 
-        // Look backwards from the -spec line to find @param and @returns tags
         for (let i = specIdx - 1; i >= 0; i--) {
-            const line = this.lines[i];
-            const trimmed = line.trim();
+            const trimmed = this.lines[i].trim();
 
-            // If we hit a @doc line, we already collected this documentation
-            if (trimmed.startsWith('%% @doc')) {
+            if (REGEX.DOC_BLOCK_START.test(trimmed)) {
                 hitDocBlock = true;
                 break;
             }
 
-            // If we hit an empty line or a non-comment line, stop looking backwards
-            if (trimmed === '' || (!trimmed.startsWith('%%') && !trimmed.startsWith('%'))) {
+            if (!trimmed || (!trimmed.startsWith(STRINGS.DOUBLE_PERCENT) && !trimmed.startsWith(STRINGS.SINGLE_PERCENT))) {
                 break;
             }
 
-            // Check if this line contains @param or @returns
-            if (trimmed.startsWith('%%') && (trimmed.includes('@param') || trimmed.includes('@returns'))) {
-                let docLine = trimmed.substring(2).trim();
-                paramLines.unshift(docLine); // Add to beginning to maintain order
+            if (trimmed.startsWith(STRINGS.DOUBLE_PERCENT) &&
+                (trimmed.includes(STRINGS.PARAM_TAG) || trimmed.includes(STRINGS.RETURNS_TAG))) {
+                paramLines.unshift(trimmed.substring(2).trim());
             }
         }
 
-        // Only add the tags if we didn't find a @doc block (meaning these are standalone tags)
         if (paramLines.length > 0 && !hitDocBlock) {
-            const existingDoc = this.currentState.pendingDoc || '';
-            const newDoc = paramLines.join('\n');
-
-            if (existingDoc) {
-                this.currentState.pendingDoc = existingDoc + '\n' + newDoc;
-            } else {
-                this.currentState.pendingDoc = newDoc;
-            }
+            const existingDoc = this.currentState.pendingDoc;
+            const newDoc = paramLines.join(STRINGS.NEWLINE);
+            this.currentState.pendingDoc = existingDoc ?
+                existingDoc + STRINGS.NEWLINE + newDoc : newDoc;
         }
     }
 
     collectSpec(startIdx) {
         const specLines = [];
         let depth = 0;
+        const linesLength = this.lines.length;
 
-        for (let i = startIdx; i < this.lines.length; i++) {
+        for (let i = startIdx; i < linesLength; i++) {
             const line = this.lines[i];
             specLines.push(line);
 
-            // Track parentheses to handle multi-line specs
-            for (const char of line) {
-                if (char === '(') depth++;
-                if (char === ')') depth--;
+            // Fast character-based depth tracking
+            for (let j = 0, len = line.length; j < len; j++) {
+                const charCode = line.charCodeAt(j);
+                if (charCode === CHAR_CODES.OPEN_PAREN) depth++;
+                else if (charCode === CHAR_CODES.CLOSE_PAREN) depth--;
             }
 
-            // Check if spec is complete
             if (line.trim().endsWith('.') && depth === 0) {
                 break;
             }
         }
 
-        this.currentState.functionSpec = specLines.join('\n');
-
-        // After collecting spec, look for the actual function definition
-        // that should follow shortly after
-        for (let j = startIdx + specLines.length; j < this.lines.length; j++) {
-            const nextLine = this.lines[j].trim();
-
-            // Skip empty lines and comments
-            if (nextLine === '' || nextLine.startsWith('%')) {
-                continue;
-            }
-
-            // Look for function definition
-            const funcMatch = nextLine.match(/^([a-z][a-z0-9_]*)\s*\(/);
-            if (funcMatch) {
-                // This is likely the function that corresponds to this spec
-                // But don't start the function here, let the main loop handle it
-                break;
-            }
-
-            // If we hit another -spec or module directive, stop looking
-            if (nextLine.startsWith('-spec') || nextLine.startsWith('-')) {
-                break;
-            }
-        }
+        this.currentState.functionSpec = specLines.join(STRINGS.NEWLINE);
     }
 
-    startFunction(name, lineIdx) {
+    startFunction(name) {
         this.currentState.inFunction = true;
         this.currentState.functionName = name;
-        this.currentState.functionLines = [];
+        this.currentState.functionLines.length = 0;
         this.currentState.braceDepth = 0;
         this.currentState.parenDepth = 0;
-        this.currentState.inlineDocTags = [];
+        this.currentState.inlineDocTags.length = 0;
     }
 
-    collectFunctionLine(line, lineIdx) {
+    collectFunctionLine(line) {
         this.currentState.functionLines.push(line);
 
-        // Track depth for function end detection
-        for (const char of line) {
-            if (char === '{' || char === '[') this.currentState.braceDepth++;
-            if (char === '}' || char === ']') this.currentState.braceDepth--;
-            if (char === '(') this.currentState.parenDepth++;
-            if (char === ')') this.currentState.parenDepth--;
+        // Fast character-based depth tracking
+        for (let i = 0, len = line.length; i < len; i++) {
+            const charCode = line.charCodeAt(i);
+            switch (charCode) {
+                case CHAR_CODES.OPEN_BRACE:
+                case CHAR_CODES.OPEN_BRACKET:
+                    this.currentState.braceDepth++;
+                    break;
+                case CHAR_CODES.CLOSE_BRACE:
+                case CHAR_CODES.CLOSE_BRACKET:
+                    this.currentState.braceDepth--;
+                    break;
+                case CHAR_CODES.OPEN_PAREN:
+                    this.currentState.parenDepth++;
+                    break;
+                case CHAR_CODES.CLOSE_PAREN:
+                    this.currentState.parenDepth--;
+                    break;
+            }
         }
     }
 
     isFunctionEnd(line) {
         const trimmed = line.trim();
-
-        // Function ends with . at depth 0
-        if (this.currentState.braceDepth === 0 &&
-            this.currentState.parenDepth === 0 &&
-            trimmed.endsWith('.') &&
-            !trimmed.startsWith('%')) {
-            return true;
-        }
-
-        return false;
+        return this.currentState.braceDepth === 0 &&
+               this.currentState.parenDepth === 0 &&
+               trimmed.charCodeAt(trimmed.length - 1) === CHAR_CODES.DOT &&
+               trimmed.charCodeAt(0) !== CHAR_CODES.PERCENT;
     }
 
     endFunction() {
-        // Process the function body to extract inline comments
         const processedBody = this.processFunctionBody(this.currentState.functionLines);
 
         this.functions.push({
@@ -344,206 +368,191 @@ class ErlangLiterateParser {
             body: processedBody
         });
 
-        // Reset state
+        // Reset state efficiently
         this.currentState.inFunction = false;
-        this.currentState.functionName = '';
-        this.currentState.functionSpec = '';
-        this.currentState.functionDoc = '';
-        this.currentState.functionLines = [];
-        this.currentState.specFunctionName = '';
-        this.currentState.inlineDocTags = [];
+        this.currentState.functionName = STRINGS.EMPTY;
+        this.currentState.functionSpec = STRINGS.EMPTY;
+        this.currentState.functionDoc = STRINGS.EMPTY;
+        this.currentState.functionLines.length = 0;
+        this.currentState.specFunctionName = STRINGS.EMPTY;
+        this.currentState.inlineDocTags.length = 0;
     }
 
     processFunctionBody(lines) {
         const segments = [];
-        let currentCode = [];
-        let pendingTagLines = [];
+        const currentCode = [];
+        const pendingTagLines = [];
 
         const flushCode = () => {
             if (currentCode.length > 0) {
-                segments.push({ type: 'code', content: currentCode.join('\n') });
-                currentCode = [];
+                segments.push({ type: 'code', content: currentCode.join(STRINGS.NEWLINE) });
+                currentCode.length = 0;
             }
         };
 
         const flushTags = () => {
             if (pendingTagLines.length > 0) {
-                const tagText = pendingTagLines.join('\n');
+                const tagText = pendingTagLines.join(STRINGS.NEWLINE);
                 const parsed = this.parseDocumentation(tagText);
-                let docParts = [];
+                const docParts = [];
+
                 if (parsed.params.length > 0) {
-                    docParts.push('### Parameters');
-                    docParts.push('');
-                    for (const p of parsed.params) {
-                        const desc = this.cleanDocumentation(p.description || '');
-                        docParts.push(`- \`${p.name}\` - ${desc}`);
-                    }
-                    docParts.push('');
+                    docParts.push(STRINGS.PARAMETERS_HEADER, STRINGS.EMPTY);
+                    parsed.params.forEach(p => {
+                        const desc = this.cleanDocumentation(p.description || STRINGS.EMPTY);
+                        docParts.push(`- ${STRINGS.BACKTICK}${p.name}${STRINGS.BACKTICK} - ${desc}`);
+                    });
+                    docParts.push(STRINGS.EMPTY);
                 }
+
                 if (parsed.returns.length > 0) {
-                    docParts.push('### Returns');
-                    docParts.push('');
+                    docParts.push(STRINGS.RETURNS_HEADER, STRINGS.EMPTY);
                     const expanded = parsed.returns.flatMap(r => this.splitReturnsIntoOutcomes(r));
-                    for (const r of expanded) {
-                        docParts.push(`- ${this.formatReturnsText(r)}`);
-                    }
-                    docParts.push('');
+                    expanded.forEach(r => docParts.push(`- ${this.formatReturnsText(r)}`));
+                    docParts.push(STRINGS.EMPTY);
                 }
+
                 if (docParts.length > 0) {
-                    segments.push({ type: 'doc', content: docParts.join('\n') });
+                    segments.push({ type: 'doc', content: docParts.join(STRINGS.NEWLINE) });
                 }
-                pendingTagLines = [];
+                pendingTagLines.length = 0;
             }
         };
 
         for (const line of lines) {
             const trimmed = line.trim();
 
-            if (trimmed.match(/^\s*%[^%]/) || trimmed.match(/^\s*%%[^%]/)) {
-                // It's a comment line
-                // Save any accumulated code block first
-                if (currentCode.length > 0) {
-                    flushCode();
-                }
+            if (REGEX.COMMENT_SINGLE.test(trimmed) || REGEX.COMMENT_DOUBLE.test(trimmed)) {
+                flushCode();
 
-                // Extract comment text (remove % or %% prefix)
-                let commentText;
-                if (trimmed.match(/^\s*%%[^%]/)) {
-                    commentText = line.replace(/^\s*%%\s?/, '');
-                } else {
-                    commentText = line.replace(/^\s*%\s?/, '');
-                }
+                const commentText = REGEX.COMMENT_DOUBLE.test(trimmed)
+                    ? line.replace(REGEX.COMMENT_DOUBLE_PREFIX, STRINGS.EMPTY)
+                    : line.replace(REGEX.COMMENT_SINGLE_PREFIX, STRINGS.EMPTY);
                 const cleaned = this.cleanInlineComment(commentText);
 
-                // Heuristic: returns-like lines (e.g., `{ok, Binary}` / `{error, Binary}` ...)
-                const returnsLikeTuple = /^`?\{[^}]+\}`?/.test(cleaned);
-                const returnsLikeAtom = /^`?(ok|error|not_found|true|false)\b/i.test(cleaned);
-                const isTagParam = /^\s*@param\b/i.test(cleaned);
-                const isTagReturns = /^\s*@returns?\b/i.test(cleaned);
+                const returnsLikeTuple = REGEX.RETURNS_LIKE_TUPLE.test(cleaned);
+                const returnsLikeAtom = REGEX.RETURNS_LIKE_ATOM.test(cleaned);
+                const isTagParam = cleaned.startsWith(STRINGS.PARAM_TAG);
+                const isTagReturns = cleaned.startsWith(STRINGS.RETURNS_TAG);
 
                 if (isTagParam || isTagReturns || returnsLikeTuple || returnsLikeAtom) {
-                    const lineAsTag = isTagParam || isTagReturns
+                    const lineAsTag = (isTagParam || isTagReturns)
                         ? cleaned.trim()
-                        : `@returns ${cleaned.trim()}`;
-                    // Accumulate tag lines
+                        : `${STRINGS.RETURNS_TAG} ${cleaned.trim()}`;
                     pendingTagLines.push(lineAsTag);
-                } else if (
-                    pendingTagLines.length > 0 &&
-                    (pendingTagLines[pendingTagLines.length - 1].startsWith('@returns') ||
-                     pendingTagLines[pendingTagLines.length - 1].startsWith('@param'))
-                ) {
-                    // Continuation of the previous @returns/@param block; append line
-                    pendingTagLines.push(cleaned.trim());
+                } else if (pendingTagLines.length > 0) {
+                    const lastTag = pendingTagLines[pendingTagLines.length - 1];
+                    if (lastTag.startsWith(STRINGS.RETURNS_TAG) || lastTag.startsWith(STRINGS.PARAM_TAG)) {
+                        pendingTagLines.push(cleaned.trim());
+                    } else {
+                        flushTags();
+                        segments.push({ type: 'comment', content: cleaned });
+                    }
                 } else {
-                    // Flush any pending tag block before emitting a normal comment
                     flushTags();
                     segments.push({ type: 'comment', content: cleaned });
                 }
             } else {
-                // Non-comment code line; flush any pending tags first, then add code
                 flushTags();
                 currentCode.push(line);
             }
         }
 
-        // Flush any remaining tag or code blocks
         flushTags();
         flushCode();
-
         return segments;
     }
 
     cleanInlineComment(text) {
-        // Convert `thing' to `thing`
-        return text.replace(/`([^']*?)'/g, '`$1`').trim();
+        return text.replace(REGEX.BACKTICK_QUOTE, `${STRINGS.BACKTICK}$1${STRINGS.BACKTICK}`).trim();
     }
 
     cleanDocumentation(text) {
-        if (!text) return '';
+        if (!text) return STRINGS.EMPTY;
 
-        // Handle 
 tags with structured content
-        text = text.replace(/
([\s\S]*?)<\/pre>/g, (match, content) => {
-            return this.formatPreContent(content);
-        });
-
-        // Convert Erlang doc syntax to Markdown
-        let cleaned = text
-            .replace(/`([^']*?)'/g, '`$1`')  // Convert `code' to `code`
-            .replace(/<</g, '<<')       // Fix HTML entities
-            .replace(/>>/g, '>>')
-            .replace(/@doc\s*/g, '')          // Remove @doc tags
-            .replace(/\n\s*\n\s*\n/g, '\n\n')   // Normalize multiple empty lines to double newlines
-            .replace(/[ \t]+$/gm, '')               // Trim trailing spaces per line
-            .replace(/^\s+|\s+$/g, '');            // Final trim
+        text = text.replace(REGEX.PRE_TAG, (match, content) => this.formatPreContent(content));
 
-        // Reflow numbered lists and ensure separation from following headings/labels
-        cleaned = this.reflowNumberedLists(cleaned);
+        const cleaned = text
+            .replace(REGEX.BACKTICK_QUOTE, `${STRINGS.BACKTICK}$1${STRINGS.BACKTICK}`)
+            .replace(REGEX.HTML_ENTITIES_LT, '<<')
+            .replace(REGEX.HTML_ENTITIES_GT, '>>')
+            .replace(REGEX.REMOVE_DOC, STRINGS.EMPTY)
+            .replace(REGEX.MULTIPLE_NEWLINES, '\n\n')
+            .replace(REGEX.TRAILING_SPACES, STRINGS.EMPTY)
+            .replace(REGEX.TRIM, STRINGS.EMPTY);
 
-        return cleaned;
+        return this.reflowNumberedLists(cleaned);
     }
 
     formatReturnsText(text) {
-        if (!text) return '';
-        // First, clean the documentation text
+        if (!text) return STRINGS.EMPTY;
         let result = this.cleanDocumentation(text);
 
-        // Wrap leading return token if it's a tuple/list or common atom
-        const leadingMatch = result.match(/^(\s*)(\{[^}]+\}|\[[^\]]+\]|ok|error|not_found|true|false)(\b|\s|$)/i);
+        const leadingMatch = result.match(REGEX.LEADING_RETURN_TOKEN);
         if (leadingMatch) {
-            const [, leadSpace, token, trail] = leadingMatch;
-            result = leadSpace + '`' + token + '`' + result.slice(leadSpace.length + token.length);
+            const [, leadSpace, token] = leadingMatch;
+            result = leadSpace + STRINGS.BACKTICK + token + STRINGS.BACKTICK +
+                    result.slice(leadSpace.length + token.length);
         }
 
-        // Wrap any standalone tuple occurrences not already inside backticks
-        result = result.replace(/(^|[^`])(\{[^}]+\})([^`]|$)/g, (m, pre, tuple, post) => {
-            return `${pre}\`${tuple}\`${post}`;
-        });
-
-        return result;
+        return result.replace(REGEX.STANDALONE_TUPLE,
+            (m, pre, tuple, post) => `${pre}${STRINGS.BACKTICK}${tuple}${STRINGS.BACKTICK}${post}`);
     }
 
     splitReturnsIntoOutcomes(text) {
         if (!text) return [];
         const s = this.cleanDocumentation(text);
-        const tokenRegex = /(\{[^}]+\}|\bok\b|\berror\b|\bnot_found\b|\btrue\b|\bfalse\b)/gi;
-        const parts = [];
-        let match;
         const matches = [];
-        while ((match = tokenRegex.exec(s)) !== null) {
+        let match;
+
+        // Reset regex lastIndex to avoid issues with global regex
+        REGEX.RETURNS_TOKENS.lastIndex = 0;
+        while ((match = REGEX.RETURNS_TOKENS.exec(s)) !== null) {
             matches.push({ index: match.index, token: match[0] });
         }
-        // If no tokens or prose exists before the first token, don't split; keep as one descriptive line
-        if (matches.length === 0 || (matches.length > 0 && matches[0].index > 0)) {
+
+        if (matches.length === 0 || matches[0].index > 0) {
             return [s.trim()];
         }
-        for (let i = 0; i < matches.length; i++) {
+
+        const parts = [];
+        const matchesLength = matches.length;
+        for (let i = 0; i < matchesLength; i++) {
             const start = matches[i].index;
-            const nextStart = (i + 1 < matches.length) ? matches[i + 1].index : s.length;
-            let segment = s.slice(start, nextStart).trim();
-            // Remove leading commas that were used as separators
-            segment = segment.replace(/^,\s*/, '');
-            // If there's trailing comma before next token, trim it but keep sentence end
-            segment = segment.replace(/,\s*$/, '');
+            const nextStart = (i + 1 < matchesLength) ? matches[i + 1].index : s.length;
+            let segment = s.slice(start, nextStart).trim()
+                .replace(REGEX.COMMA_START, STRINGS.EMPTY)
+                .replace(REGEX.COMMA_END, STRINGS.EMPTY);
             if (segment) parts.push(segment.trim());
         }
-        // If we accidentally merged two outcomes without clear token boundaries, ensure uniqueness
-        return parts.filter(p => p.length > 0);
+
+        return parts.filter(Boolean);
     }
 
     reflowNumberedLists(text) {
-        if (!text) return '';
-        const lines = text.split('\n');
+        if (!text) return STRINGS.EMPTY;
+        const lines = text.split(STRINGS.NEWLINE);
         const out = [];
         let inNumbered = false;
         let lastNumIndex = -1;
-        for (let i = 0; i < lines.length; i++) {
+
+        const linesLength = lines.length;
+        for (let i = 0; i < linesLength; i++) {
             const raw = lines[i];
             const trimmed = raw.trim();
 
-            const isNumbered = /^\d+\.\s/.test(trimmed);
-            const isBullet = /^[-*]\s/.test(trimmed);
-            const isHeading = /^#{1,6}\s/.test(trimmed);
-            const isCodeFence = /^```/.test(trimmed);
+            const isNumbered = REGEX.NUMBERED_LIST.test(trimmed);
+            const isBullet = REGEX.BULLET_LIST.test(trimmed);
+            const isHeading = REGEX.HEADING.test(trimmed);
+            const isCodeFence = REGEX.CODE_FENCE.test(trimmed);
+
+            // Ensure a blank line BEFORE any list (numbered or bullet) begins
+            if ((isNumbered || isBullet) && out.length > 0) {
+                const prev = out[out.length - 1];
+                if (prev.trim() !== STRINGS.EMPTY) {
+                    out.push(STRINGS.EMPTY);
+                }
+            }
 
             if (isNumbered) {
                 out.push(trimmed);
@@ -552,19 +561,23 @@ class ErlangLiterateParser {
                 continue;
             }
 
+            // Pass through bullet list lines unchanged (no reflow of bullets for now)
+            if (isBullet) {
+                out.push(raw);
+                continue;
+            }
+
             if (inNumbered) {
-                if (trimmed === '') {
-                    out.push('');
+                if (!trimmed) {
+                    out.push(STRINGS.EMPTY);
                     inNumbered = false;
                     lastNumIndex = -1;
                     continue;
                 }
                 if (!isNumbered && !isBullet && !isHeading && !isCodeFence) {
-                    // Continuation of previous numbered item; append
-                    out[lastNumIndex] = out[lastNumIndex] + ' ' + trimmed;
+                    out[lastNumIndex] = out[lastNumIndex] + STRINGS.SPACE + trimmed;
                     continue;
                 }
-                // Different kind of line; end numbered block and fall through
                 inNumbered = false;
                 lastNumIndex = -1;
             }
@@ -572,32 +585,32 @@ class ErlangLiterateParser {
             out.push(raw);
         }
 
-        // Ensure a blank line between last numbered item and a label/heading line like 'Config options ...:'
+        // Ensure blank line separation
         const separated = [];
-        for (let i = 0; i < out.length; i++) {
+        const outLength = out.length;
+        for (let i = 0; i < outLength; i++) {
             const cur = out[i];
-            const next = i + 1 < out.length ? out[i + 1] : '';
+            const next = i + 1 < outLength ? out[i + 1] : STRINGS.EMPTY;
             separated.push(cur);
-            if (/^\d+\.\s/.test(cur.trim()) && next && !/^\s*$/.test(next) && /:\s*$/.test(next.trim())) {
-                // Insert a blank line if not already present
-                if (separated[separated.length - 1] !== '') {
-                    separated.push('');
+            // If a paragraph ends with ':' and is immediately followed by a numbered list,
+            // insert a blank line between them to satisfy Markdown list rendering rules.
+            if (REGEX.COLON_END.test(cur.trim()) && next && REGEX.NUMBERED_LIST.test(next.trim())) {
+                if (separated[separated.length - 1] !== STRINGS.EMPTY) {
+                    separated.push(STRINGS.EMPTY);
                 }
             }
         }
 
-        return separated.join('\n');
+        return separated.join(STRINGS.NEWLINE);
     }
 
     formatPreContent(content) {
-        // First, let's look at the actual structure of the content more carefully
-        // The issue is that definitions span multiple lines with varying indentation
-
-        const lines = content.trim().split('\n');
+        const lines = content.trim().split(STRINGS.NEWLINE);
         const formatted = [];
 
         let i = 0;
-        while (i < lines.length) {
+        const linesLength = lines.length;
+        while (i < linesLength) {
             const line = lines[i].trim();
 
             if (!line) {
@@ -605,74 +618,54 @@ class ErlangLiterateParser {
                 continue;
             }
 
-            // Look for definition pattern: starts with word(s), colon, then description
-            // Pattern: "DevMod:ExportedFunc : Description" or "info/exports : Description"
-            const defMatch = line.match(/^(\S+(?:\s*:\s*\S+)?)\s*:\s*(.*)$/);
+            const defMatch = line.match(REGEX.DEFINITION);
 
             if (defMatch) {
                 const [, term, initialDesc] = defMatch;
                 let fullDescription = initialDesc.trim();
 
-                // Collect continuation lines for this definition
                 let j = i + 1;
-                while (j < lines.length) {
+                while (j < linesLength) {
                     const nextLine = lines[j];
 
-                    // Empty line - check if there's more content
                     if (!nextLine.trim()) {
                         j++;
                         continue;
                     }
 
-                    // If it looks like a new definition, stop
-                    if (nextLine.trim().match(/^\S+(?:\s*:\s*\S+)?\s*:\s*/)) {
+                    if (nextLine.trim().match(REGEX.DEFINITION)) {
                         break;
                     }
 
-                    // This is a continuation line - add it to the description
                     if (nextLine.trim()) {
-                        fullDescription += ' ' + nextLine.trim();
+                        fullDescription += STRINGS.SPACE + nextLine.trim();
                     }
                     j++;
                 }
 
-                // Format the definition
-                formatted.push('');
-                formatted.push(`**${term.trim()}**`);
-                formatted.push('');
-                formatted.push(fullDescription);
-
-                i = j; // Move to the next unprocessed line
+                formatted.push(STRINGS.EMPTY, `**${term.trim()}**`, STRINGS.EMPTY, fullDescription);
+                i = j;
             } else {
-                // Not a definition - handle as regular content
                 if (line.toLowerCase().includes('hyperbeam') && line.includes('options')) {
-                    formatted.push('');
-                    formatted.push(`### ${line}`);
-                    formatted.push('');
-                } else if (line.match(/^`[^`]+`\s*:/)) {
-                    // Special case for option definitions like `update_hashpath`:
-                    const optMatch = line.match(/^(`[^`]+`)\s*:\s*(.*)$/);
+                    formatted.push(STRINGS.EMPTY, `### ${line}`, STRINGS.EMPTY);
+                } else {
+                    const optMatch = line.match(REGEX.OPTION_DEF);
                     if (optMatch) {
                         const [, optName, optDesc] = optMatch;
-                        formatted.push('');
-                        formatted.push(`**${optName}**`);
-                        formatted.push('');
-                        formatted.push(optDesc);
+                        formatted.push(STRINGS.EMPTY, `**${optName}**`, STRINGS.EMPTY, optDesc);
                     } else {
                         formatted.push(line);
                     }
-                } else {
-                    formatted.push(line);
                 }
                 i++;
             }
         }
 
-        return formatted.join('\n');
+        return formatted.join(STRINGS.NEWLINE);
     }
 
     parseDocumentation(docText) {
-        const lines = docText.split('\n');
+        const lines = docText.split(STRINGS.NEWLINE);
         const result = {
             description: [],
             params: [],
@@ -686,51 +679,47 @@ class ErlangLiterateParser {
         for (const line of lines) {
             const trimmed = line.trim();
 
-            // Check for @param
-            const paramMatch = trimmed.match(/^@param\s+(\S+)\s*(.*)/);
+            const paramMatch = trimmed.match(REGEX.PARAM);
             if (paramMatch) {
                 if (currentParam) {
                     result.params.push(currentParam);
                 }
                 currentParam = {
                     name: paramMatch[1],
-                    description: paramMatch[2] || ''
+                    description: paramMatch[2] || STRINGS.EMPTY
                 };
                 currentSection = 'param';
                 continue;
             }
 
-            // Check for @returns
-            if (trimmed.match(/^@returns?\s/)) {
+            if (REGEX.RETURNS.test(trimmed)) {
                 if (currentParam) {
                     result.params.push(currentParam);
                     currentParam = null;
                 }
-                const returnsText = trimmed.replace(/^@returns?\s*/, '');
+                const returnsText = trimmed.replace(REGEX.RETURNS, STRINGS.EMPTY);
                 result.returns.push(returnsText);
                 lastReturnIndex = result.returns.length - 1;
                 currentSection = 'returns';
                 continue;
             }
 
-            // Add to current section
             if (currentSection === 'description') {
                 if (trimmed) {
                     result.description.push(trimmed);
                 } else {
-                    // Preserve a single blank line to break paragraphs/lists
                     const last = result.description[result.description.length - 1];
-                    if (last !== '') {
-                        result.description.push('');
+                    if (last !== STRINGS.EMPTY) {
+                        result.description.push(STRINGS.EMPTY);
                     }
                 }
             } else if (currentSection === 'param' && currentParam && trimmed) {
-                currentParam.description += ' ' + trimmed;
+                currentParam.description += STRINGS.SPACE + trimmed;
             } else if (currentSection === 'returns' && trimmed) {
                 if (lastReturnIndex >= 0) {
-                    // Append continuation text to the last returns entry
                     result.returns[lastReturnIndex] =
-                        (result.returns[lastReturnIndex] + ' ' + trimmed).replace(/\s+/g, ' ').trim();
+                        (result.returns[lastReturnIndex] + STRINGS.SPACE + trimmed)
+                        .replace(REGEX.WHITESPACE_NORMALIZE, STRINGS.SPACE).trim();
                 } else {
                     result.returns.push(trimmed);
                     lastReturnIndex = result.returns.length - 1;
@@ -738,7 +727,6 @@ class ErlangLiterateParser {
             }
         }
 
-        // Save final param if exists
         if (currentParam) {
             result.params.push(currentParam);
         }
@@ -748,90 +736,80 @@ class ErlangLiterateParser {
 
     generateMarkdown(fileName) {
         const githubUrl = `${this.options.githubBase}/${fileName}`;
-        let md = [];
+        const md = [];
 
         // Header
-        md.push(`# ${this.moduleInfo.name || fileName.replace('.erl', '')}`);
-        md.push('');
+        md.push(`# ${this.moduleInfo.name || fileName.replace('.erl', STRINGS.EMPTY)}`);
+        md.push(STRINGS.EMPTY);
         md.push(`[View source on GitHub](${githubUrl})`);
-        md.push('');
+        md.push(STRINGS.EMPTY);
 
         // Module documentation
         if (this.moduleInfo.doc) {
             md.push(this.moduleInfo.doc);
-            md.push('');
-            md.push('---');
-            md.push('');
+            md.push(STRINGS.EMPTY);
+            md.push(STRINGS.SEPARATOR);
+            md.push(STRINGS.EMPTY);
         }
 
         // Exports
-        if (this.moduleInfo.exports && this.moduleInfo.exports.length > 0) {
-            md.push('## Exported Functions');
-            md.push('');
-            for (const exp of this.moduleInfo.exports) {
-                md.push(`- \`${exp}\``);
-            }
-            md.push('');
-            md.push('---');
-            md.push('');
+        if (this.moduleInfo.exports?.length > 0) {
+            md.push(STRINGS.EXPORTED_FUNCTIONS);
+            md.push(STRINGS.EMPTY);
+            this.moduleInfo.exports.forEach(exp =>
+                md.push(`- ${STRINGS.BACKTICK}${exp}${STRINGS.BACKTICK}`));
+            md.push(STRINGS.EMPTY);
+            md.push(STRINGS.SEPARATOR);
+            md.push(STRINGS.EMPTY);
         }
 
-        // Group functions by name to merge overloaded functions
         const groupedFunctions = this.groupFunctionsByName(this.functions);
 
-        // Functions
         for (const group of groupedFunctions) {
             md.push(`## ${group.name}`);
-            md.push('');
+            md.push(STRINGS.EMPTY);
 
-            // Combine documentation from all functions in the group
             const combinedDoc = this.combineFunctionDocs(group.functions);
             if (combinedDoc) {
                 md.push(combinedDoc);
-                md.push('');
+                md.push(STRINGS.EMPTY);
             }
 
-            // Add all specs and bodies for the function group
             for (const func of group.functions) {
-                // Spec
                 if (func.spec) {
-                    md.push('```erlang');
+                    md.push(`\`\`\`${STRINGS.ERLANG}`);
                     md.push(func.spec.trim());
                     md.push('```');
-                    md.push('');
+                    md.push(STRINGS.EMPTY);
                 }
 
-                // Function body with inline comments
-                if (func.body && func.body.length > 0) {
-                    md.push('');
-
+                if (func.body?.length > 0) {
+                    md.push(STRINGS.EMPTY);
                     for (const segment of func.body) {
                         if (segment.type === 'comment') {
                             md.push(segment.content);
-                            md.push('');
+                            md.push(STRINGS.EMPTY);
                         } else if (segment.type === 'doc') {
-                            // Insert structured params/returns adjacent to the preceding code
                             md.push(segment.content);
-                            md.push('');
+                            md.push(STRINGS.EMPTY);
                         } else if (segment.type === 'code') {
-                            md.push('```erlang');
+                            md.push(`\`\`\`${STRINGS.ERLANG}`);
                             md.push(segment.content.trim());
                             md.push('```');
-                            md.push('');
+                            md.push(STRINGS.EMPTY);
                         }
                     }
                 }
             }
 
-            md.push('');
+            md.push(STRINGS.EMPTY);
         }
 
-        // Footer
-        md.push('---');
-        md.push('');
+        md.push(STRINGS.SEPARATOR);
+        md.push(STRINGS.EMPTY);
         md.push(`*Generated from [${fileName}](${githubUrl})*`);
 
-        return md.join('\n');
+        return md.join(STRINGS.NEWLINE);
     }
 
     groupFunctionsByName(functions) {
@@ -840,14 +818,9 @@ class ErlangLiterateParser {
 
         for (const func of functions) {
             if (!currentGroup || currentGroup.name !== func.name) {
-                // Start a new group
-                currentGroup = {
-                    name: func.name,
-                    functions: [func]
-                };
+                currentGroup = { name: func.name, functions: [func] };
                 groups.push(currentGroup);
             } else {
-                // Add to current group
                 currentGroup.functions.push(func);
             }
         }
@@ -856,42 +829,35 @@ class ErlangLiterateParser {
     }
 
     combineFunctionDocs(functions) {
-        // Use the documentation from the first function that has it
-        // In practice, usually only the first clause of an overloaded function has detailed docs
         for (const func of functions) {
             if (func.doc) {
                 const parsed = this.parseDocumentation(func.doc);
-                let combinedDoc = [];
+                const combinedDoc = [];
 
-                // Description
                 if (parsed.description.length > 0) {
-                    combinedDoc.push(this.cleanDocumentation(parsed.description.join('\n')));
-                    combinedDoc.push('');
+                    combinedDoc.push(this.cleanDocumentation(parsed.description.join(STRINGS.NEWLINE)));
+                    combinedDoc.push(STRINGS.EMPTY);
                 }
 
-                // Parameters
                 if (parsed.params.length > 0) {
-                    combinedDoc.push('### Parameters');
-                    combinedDoc.push('');
-                    for (const param of parsed.params) {
+                    combinedDoc.push(STRINGS.PARAMETERS_HEADER);
+                    combinedDoc.push(STRINGS.EMPTY);
+                    parsed.params.forEach(param => {
                         const desc = this.cleanDocumentation(param.description);
-                        combinedDoc.push(`- \`${param.name}\` - ${desc}`);
-                    }
-                    combinedDoc.push('');
+                        combinedDoc.push(`- ${STRINGS.BACKTICK}${param.name}${STRINGS.BACKTICK} - ${desc}`);
+                    });
+                    combinedDoc.push(STRINGS.EMPTY);
                 }
 
-                // Returns
                 if (parsed.returns.length > 0) {
-                    combinedDoc.push('### Returns');
-                    combinedDoc.push('');
+                    combinedDoc.push(STRINGS.RETURNS_HEADER);
+                    combinedDoc.push(STRINGS.EMPTY);
                     const expanded = parsed.returns.flatMap(r => this.splitReturnsIntoOutcomes(r));
-                    for (const ret of expanded) {
-                        combinedDoc.push(`- ${this.formatReturnsText(ret)}`);
-                    }
-                    combinedDoc.push('');
+                    expanded.forEach(ret => combinedDoc.push(`- ${this.formatReturnsText(ret)}`));
+                    combinedDoc.push(STRINGS.EMPTY);
                 }
 
-                return combinedDoc.join('\n');
+                return combinedDoc.join(STRINGS.NEWLINE);
             }
         }
         return null;
@@ -903,16 +869,13 @@ function main() {
     const args = process.argv.slice(2);
     const verbose = args.includes('-v') || args.includes('--verbose');
 
-    // Get source directory
     const srcDir = process.env.SRC_DIR || path.join(process.cwd(), 'src');
     const outputDir = process.env.OUTPUT_DIR || path.join(process.cwd(), 'docs/literate-erlang');
 
-    // Ensure output directory exists
     if (!fs.existsSync(outputDir)) {
         fs.mkdirSync(outputDir, { recursive: true });
     }
 
-    // Process all .erl files
     const files = fs.readdirSync(srcDir).filter(f => f.endsWith('.erl'));
     const parser = new ErlangLiterateParser({ verbose });
 
@@ -923,11 +886,9 @@ function main() {
 
         try {
             const inputPath = path.join(srcDir, file);
-            const outputPath = path.join(outputDir, file + '.md');
-
+            const outputPath = path.join(outputDir, `${file}.md`);
             const markdown = parser.parseFile(inputPath);
             fs.writeFileSync(outputPath, markdown);
-
         } catch (error) {
             console.error(`Error processing ${file}:`, error.message);
         }

From bb2dac936cf15cce40ac1b83ab0418dee5da39fd Mon Sep 17 00:00:00 2001
From: Dylan Shade <63427984+dpshade@users.noreply.github.com>
Date: Wed, 24 Sep 2025 17:19:15 -0400
Subject: [PATCH 16/17] docs: Remove deprecated documentation build scripts

- Update outdated scripts: `build-all.sh`, `build-and-serve.sh`, `build-literate-erlang.sh`, and `build-literate-erlang-js.sh`.
- These scripts have been superseded by more efficient implementations, streamlining the documentation generation process.
- Ensure the codebase is cleaner and more maintainable by removing legacy files.
---
 docs/book/book.toml                           |   5 +-
 docs/{build-all.sh => build-docs.sh}          |   0
 docs/build-literate-erlang.sh                 | 666 --------------
 docs/deploy-dry-run.sh                        |  94 ++
 docs/erlang-literate-parser.js                | 847 ++++++++++++++++--
 ...erlang-js.sh => generate-literate-docs.sh} |  90 +-
 docs/{build-and-serve.sh => serve-book.sh}    |   2 +-
 7 files changed, 960 insertions(+), 744 deletions(-)
 rename docs/{build-all.sh => build-docs.sh} (100%)
 delete mode 100755 docs/build-literate-erlang.sh
 create mode 100755 docs/deploy-dry-run.sh
 rename docs/{build-literate-erlang-js.sh => generate-literate-docs.sh} (51%)
 rename docs/{build-and-serve.sh => serve-book.sh} (96%)

diff --git a/docs/book/book.toml b/docs/book/book.toml
index ab213ea81..51f3cc0fa 100644
--- a/docs/book/book.toml
+++ b/docs/book/book.toml
@@ -8,9 +8,7 @@ description = "Literate programming documentation for the HyperBEAM decentralize
 [build]
 build-dir = "dist"
 
-[rust]
-edition = "2021"
-
+# Configure for non-Rust documentation - this prevents mdBook from trying to compile code blocks as Rust
 [output.html]
 default-theme = "rust"
 additional-css = ["custom.css", "theme/highlight.css"]
@@ -41,6 +39,7 @@ copy-js = true
 line-numbers = false
 runnable = false
 
+
 [output.html.search]
 enable = true
 limit-results = 30
diff --git a/docs/build-all.sh b/docs/build-docs.sh
similarity index 100%
rename from docs/build-all.sh
rename to docs/build-docs.sh
diff --git a/docs/build-literate-erlang.sh b/docs/build-literate-erlang.sh
deleted file mode 100755
index f1cf0262d..000000000
--- a/docs/build-literate-erlang.sh
+++ /dev/null
@@ -1,666 +0,0 @@
-#!/bin/bash
-
-# Script to generate literate Erlang documentation from HyperBEAM source files
-#
-# ⚠️  DEPRECATED: This bash parser has been superseded by the JavaScript version
-# For superior results with comprehensive comment parsing, use:
-#   ./docs/build-literate-erlang-js.sh
-#
-# This creates .erl.md files that combine source code with documentation
-# in a format optimized for GitHub rendering with cleaner appearance
-#
-# Usage: ./docs/build-literate-erlang.sh [-v | --verbose]
-#   -v, --verbose: Show detailed processing output
-
-# --- Color Definitions ---
-GREEN='\033[0;32m'
-RED='\033[0;31m'
-YELLOW='\033[0;33m'
-BLUE='\033[0;34m'
-BOLD='\033[1m'
-NC='\033[0m' # No Color
-
-# HyperBEAM Logo Colors
-NEON_GREEN='\033[38;5;46m'
-CYAN='\033[38;5;51m'
-BRIGHT_YELLOW='\033[38;5;226m'
-MAGENTA='\033[38;5;201m'
-BRIGHT_RED='\033[38;5;196m'
-BLACK='\033[38;5;0m'
-GRAY='\033[38;5;245m'
-
-# --- Helper Functions ---
-log_success() {
-  echo -e "${GREEN}✓ $1${NC}"
-}
-
-log_info() {
-  echo -e "${BLUE}→ $1${NC}"
-}
-
-log_step() {
-  echo -e "\n${YELLOW}${BOLD}$1${NC}"
-}
-
-log_error() {
-  echo -e "${RED}✗ $1${NC}"
-}
-
-log_verbose() {
-  if [ "$VERBOSE" = true ]; then
-    echo -e "${GRAY}  $1${NC}"
-  fi
-}
-
-# --- Variable Defaults ---
-VERBOSE=false
-
-# --- Parse Command Line Arguments ---
-while [[ $# -gt 0 ]]; do
-  key="$1"
-  case $key in
-    -v|--verbose)
-      VERBOSE=true
-      log_info "Verbose mode enabled"
-      shift
-      ;;
-    *)
-      log_error "Unknown option: $1"
-      echo "Usage: $0 [-v | --verbose]"
-      exit 1
-      ;;
-  esac
-done
-
-# --- Display HyperBEAM ASCII Logo ---
-display_logo() {
-  echo -e "
-${NEON_GREEN}                ++         ${BLACK}${BOLD}                                 ${NC}
-${NEON_GREEN}               +++        ${BLACK}${BOLD} _                              ${NC}
-${NEON_GREEN}             ++++*        ${BLACK}${BOLD}| |__  _   _ _ __   ___ _ __  ${NC}
-${NEON_GREEN}           :+++*${BRIGHT_YELLOW}##       ${BLACK}${BOLD} | '_ \\| | | | '_ \\ / _ \\ '__| ${NC}
-${NEON_GREEN}          ++**${BRIGHT_YELLOW}####       ${BLACK}${BOLD} | | | | |_| | |_) |  __/ |    ${NC}
-${NEON_GREEN}        +++${BRIGHT_YELLOW}####${NEON_GREEN}***       ${BLACK}${BOLD} |_| |_|\\__, | .__/ \\___|_|    ${NC}
-${NEON_GREEN}        +*${BRIGHT_YELLOW}##${NEON_GREEN}****${MAGENTA}+--      ${BLACK}${BOLD}        |___/|_|              ${NC}
-${MAGENTA}    -**${BRIGHT_YELLOW}##${NEON_GREEN}**${MAGENTA}+------       ${BLACK}${BOLD}                	BEAM.${NC}
-${MAGENTA}   -##${NEON_GREEN}*+${BRIGHT_RED}---:::::::
-${GRAY}  =${GRAY}%%${NEON_GREEN}*+${BRIGHT_RED}=-:::::::::${GRAY}        LITERATE ERLANG DOCUMENTATION${NC}
-"
-}
-
-# --- Script Start ---
-display_logo
-log_step "LITERATE ERLANG DOCUMENTATION GENERATION"
-
-# Display deprecation notice
-echo -e "${YELLOW}${BOLD}⚠️  DEPRECATION NOTICE${NC}"
-echo -e "${YELLOW}This bash parser has been superseded by a superior JavaScript implementation.${NC}"
-echo -e "${YELLOW}For comprehensive comment parsing and true literate programming, use:${NC}"
-echo -e "${GREEN}${BOLD}  ./docs/build-literate-erlang-js.sh${NC}"
-echo -e "${GRAY}Continuing with legacy bash parser...${NC}"
-echo ""
-
-# Ensure we're in the root directory
-ROOT_DIR="$(dirname "$(realpath "$0")")/.."
-cd "$ROOT_DIR" || { log_error "Failed to change to root directory"; exit 1; }
-
-# GitHub repository base URL
-GITHUB_BASE_URL="https://github.com/permaweb/HyperBEAM/blob/edge/src"
-
-# Output directory for literate Erlang files
-OUTPUT_DIR="$ROOT_DIR/docs/literate-erlang"
-mkdir -p "$OUTPUT_DIR"
-
-# --- Function to extract module documentation ---
-extract_module_doc() {
-  local file="$1"
-  local in_doc=false
-  local doc_content=""
-
-  while IFS= read -r line; do
-    if [[ "$line" =~ ^%%%[[:space:]]?(.*)$ ]]; then
-      in_doc=true
-      doc_content+="${BASH_REMATCH[1]}"$'\n'
-    elif [[ "$line" =~ ^%%[[:space:]]?(@doc[[:space:]])?(.*)$ ]] && [ "$in_doc" = true ]; then
-      # Extract content after @doc if present
-      doc_content+="${BASH_REMATCH[2]}"$'\n'
-    elif [[ ! "$line" =~ ^%% ]] && [ "$in_doc" = true ]; then
-      break
-    fi
-  done < "$file"
-
-  # Clean up @doc prefixes and convert edocs syntax to markdown, preserving paragraph breaks
-  echo "$doc_content" | \
-    sed 's/^@doc$//' | \
-    sed 's/^@doc //' | \
-    sed 's/^@end$//' | \
-    sed 's/^@author /**Author:** /' | \
-    sed 's/^@copyright /**Copyright:** /' | \
-    sed 's/^---*$//' | \
-    sed "s/\`\([^']*\)'/\`\1\`/g" | \
-    awk '
-    BEGIN { prev_empty = 0 }
-    /^[[:space:]]*$/ {
-      if (!prev_empty) {
-        print ""
-        prev_empty = 1
-      }
-      next
-    }
-    {
-      print $0
-      prev_empty = 0
-    }'
-}
-
-# --- Function to extract function documentation ---
-extract_function_doc() {
-  local content="$1"
-
-  # Clean the content and separate into sections
-  local cleaned_content=$(echo "$content" | \
-    sed 's/^%% *//' | \
-    sed 's/^% *//' | \
-    sed 's/^@doc$//' | \
-    sed 's/^@doc //' | \
-    sed 's/^@end$//' | \
-    sed "s/\`\([^']*\)'/\`\1\`/g")
-
-  # Initialize variables for different sections
-  local description=""
-  local params=""
-  local returns=""
-  local in_params=false
-  local in_returns=false
-  local current_param=""
-
-  while IFS= read -r line; do
-    if [[ "$line" =~ ^@param[[:space:]]+([^[:space:]]+)[[:space:]]+(.*)$ ]]; then
-      # Save any current param before starting new one
-      if [ -n "$current_param" ]; then
-        params+="- ${current_param}"$'\n'
-      fi
-      # Start new param with code-formatted name
-      current_param="\`${BASH_REMATCH[1]}\` - ${BASH_REMATCH[2]}"
-      in_params=true
-      in_returns=false
-    elif [[ "$line" =~ ^@returns?[[:space:]]+(.*)$ ]]; then
-      # Save any current param before starting returns
-      if [ -n "$current_param" ]; then
-        params+="- ${current_param}"$'\n'
-        current_param=""
-      fi
-      returns="${BASH_REMATCH[1]}"
-      in_params=false
-      in_returns=true
-    elif [[ "$line" =~ ^@author[[:space:]]+(.*)$ ]]; then
-      # Skip author lines for now
-      continue
-    elif [[ "$line" =~ ^@copyright[[:space:]]+(.*)$ ]]; then
-      # Skip copyright lines for now
-      continue
-    elif [[ "$line" =~ ^[[:space:]]*$ ]]; then
-      # Empty line - add to current section
-      if [ "$in_params" = true ] && [ -n "$current_param" ]; then
-        current_param+=" "
-      elif [ "$in_returns" = true ]; then
-        returns+=" "
-      elif [ "$in_params" = false ] && [ "$in_returns" = false ]; then
-        description+="$line"$'\n'
-      fi
-    else
-      # Regular content line
-      if [ "$in_params" = true ]; then
-        # Continue current param description - clean up whitespace
-        local cleaned_line=$(echo "$line" | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//')
-        current_param+=" $cleaned_line"
-      elif [ "$in_returns" = true ]; then
-        # Continue returns description - clean up whitespace
-        local cleaned_line=$(echo "$line" | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//')
-        returns+=" $cleaned_line"
-      else
-        # Part of main description
-        description+="$line"$'\n'
-      fi
-    fi
-  done <<< "$cleaned_content"
-
-  # Save any remaining param
-  if [ -n "$current_param" ]; then
-    params+="- ${current_param}"$'\n'
-  fi
-
-  # Build formatted output
-  local output=""
-
-  # Add description (clean up extra newlines)
-  if [ -n "$description" ]; then
-    output+=$(echo "$description" | awk '
-      BEGIN { prev_empty = 0 }
-      /^[[:space:]]*$/ {
-        if (!prev_empty) {
-          print ""
-          prev_empty = 1
-        }
-        next
-      }
-      {
-        print $0
-        prev_empty = 0
-      }')
-    output+=$'\n'
-  fi
-
-  # Add parameters section
-  if [ -n "$params" ]; then
-    output+=$'\n'"#### Parameters"$'\n'$'\n'
-    output+="$params"$'\n'
-  fi
-
-  # Add returns section
-  if [ -n "$returns" ]; then
-    output+=$'\n'"#### Returns"$'\n'$'\n'
-    # Clean up returns text and parse return type vs description
-    local cleaned_returns=$(echo "$returns" | sed 's/[[:space:]]\+/ /g' | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//')
-
-    # Try to extract and format return types - always use bullet format
-    # Check for multiple return patterns first (most comprehensive)
-    if [[ "$cleaned_returns" =~ , ]]; then
-      # Try to format multiple return patterns - handle nested braces properly
-      # Use a more robust approach for nested structures
-      local formatted_returns=$(echo "$cleaned_returns" | \
-        perl -pe 's/(\{(?:[^{}]++|(?1))*\})/`$1`/g' 2>/dev/null || \
-        echo "$cleaned_returns" | sed -E 's/(\{[^{}]*(\{[^}]*\}[^{}]*)*\})/`\1`/g')
-      formatted_returns=$(echo "$formatted_returns" | sed -E 's/([[:space:]]|^)(not_found|error|ok|true|false)([[:space:]]|$)/\1`\2`\3/g')
-      output+="- $formatted_returns"$'\n'
-    elif [[ "$cleaned_returns" =~ ^\{ ]]; then
-      # Complex return type - use better pattern to handle nested braces
-      # Extract the complete tuple including nested structures
-      local temp_string="$cleaned_returns"
-      local brace_count=0
-      local char_pos=0
-      local return_type=""
-
-      # Parse character by character to find matching braces
-      while [ $char_pos -lt ${#temp_string} ]; do
-        local char="${temp_string:$char_pos:1}"
-        return_type+="$char"
-
-        if [ "$char" = "{" ]; then
-          ((brace_count++))
-        elif [ "$char" = "}" ]; then
-          ((brace_count--))
-          if [ $brace_count -eq 0 ]; then
-            break
-          fi
-        fi
-        ((char_pos++))
-      done
-
-      # Extract description after the return type
-      local return_desc="${temp_string:$((char_pos + 1))}"
-      return_desc=$(echo "$return_desc" | sed 's/^[[:space:]]*//')
-
-      if [ -n "$return_desc" ]; then
-        output+="- \`$return_type\` $return_desc"$'\n'
-      else
-        output+="- \`$return_type\`"$'\n'
-      fi
-    elif [[ "$cleaned_returns" =~ ^(true|false)[[:space:]]+(.*) ]]; then
-      # Boolean return types
-      local return_type="${BASH_REMATCH[1]}"
-      local return_desc="${BASH_REMATCH[2]}"
-      output+="- \`$return_type\` $return_desc"$'\n'
-    elif [[ "$cleaned_returns" =~ ^(ok|error|not_found)[[:space:]]+(.*) ]]; then
-      # Simple atom return types
-      local return_type="${BASH_REMATCH[1]}"
-      local return_desc="${BASH_REMATCH[2]}"
-      output+="- \`$return_type\` $return_desc"$'\n'
-    else
-      output+="- $cleaned_returns"$'\n'
-    fi
-  fi
-
-  echo "$output"
-}
-
-# --- Function to process a single Erlang file ---
-process_erlang_file() {
-  local src_file="$1"
-  local module_name=$(basename "$src_file" .erl)
-  local output_file="$OUTPUT_DIR/${module_name}.erl.md"
-
-  log_verbose "Processing $module_name"
-
-  # Start the literate Erlang document with cleaner format
-  cat > "$output_file" <> "$output_file"
-    echo "" >> "$output_file"
-    echo "---" >> "$output_file"
-    echo "" >> "$output_file"
-  fi
-
-  # Add module exports in a clean format
-  local exports=$(grep -E "^-export\(" "$src_file" | sed 's/-export(\[//' | sed 's/\]).*//' | tr ',' '\n' | sed 's/^[[:space:]]*//' | sed 's/[[:space:]]*$//' | sort -u)
-
-  if [ -n "$exports" ]; then
-    echo "## Exported Functions" >> "$output_file"
-    echo "" >> "$output_file"
-
-    # Create a proper bulleted list for exports
-    while IFS= read -r export; do
-      if [[ "$export" =~ ^[a-z] ]]; then
-        echo "- \`$export\`" >> "$output_file"
-      fi
-    done <<< "$exports"
-
-    echo "" >> "$output_file"
-    echo "---" >> "$output_file"
-    echo "" >> "$output_file"
-  fi
-
-  # Process functions
-  local in_function=false
-  local in_spec=false
-  local in_doc_comment=false
-  local current_function=""
-  local function_content=""
-  local spec_content=""
-  local doc_content=""
-  local previous_doc_content=""
-  local functions_written=0
-
-  while IFS= read -r line; do
-    # Check for doc comments (before functions)
-    if [[ "$line" =~ ^%%[[:space:]]?@doc[[:space:]](.*)$ ]] ||
-       [[ "$line" =~ ^%%[[:space:]]?@doc$ ]]; then
-      in_doc_comment=true
-      if [[ "$line" =~ @doc[[:space:]](.*)$ ]]; then
-        doc_content+="${BASH_REMATCH[1]}"$'\n'
-      fi
-      continue
-    fi
-
-    # Continue collecting doc comment lines
-    if [ "$in_doc_comment" = true ] && [[ "$line" =~ ^%% ]]; then
-      # Remove %% prefix and collect
-      local cleaned_line=$(echo "$line" | sed 's/^%%[[:space:]]*//')
-      doc_content+="$cleaned_line"$'\n'
-      continue
-    fi
-
-    # Check for -spec
-    if [[ "$line" =~ ^-spec[[:space:]] ]]; then
-      in_spec=true
-      spec_content="$line"$'\n'
-      in_doc_comment=false
-      continue
-    fi
-
-    # Continue collecting spec if in multi-line spec
-    if [ "$in_spec" = true ]; then
-      spec_content+="$line"$'\n'
-      if [[ "$line" =~ \.[[:space:]]*$ ]]; then
-        in_spec=false
-      fi
-      continue
-    fi
-
-    # Check for function definition
-    if [[ "$line" =~ ^([a-z][a-z0-9_]*)[[:space:]]*\( ]]; then
-      # If we were already in a function, write it out
-      if [ -n "$current_function" ] && [ -n "$function_content" ]; then
-        write_clean_function "$output_file" "$current_function" "$spec_content" "$previous_doc_content" "$function_content" "$functions_written"
-        ((functions_written++))
-      fi
-
-      # Start new function - preserve current doc_content for this function
-      current_function="${BASH_REMATCH[1]}"
-      function_content="$line"$'\n'
-      in_function=true
-      in_doc_comment=false
-      previous_doc_content="$doc_content"
-      spec_content=""
-      doc_content=""
-      continue
-    fi
-
-    # Continue collecting function content
-    if [ "$in_function" = true ]; then
-      function_content+="$line"$'\n'
-      # Check for function end (period at end of line, not in string or comment)
-      if [[ "$line" =~ ^[[:space:]]*end\.[[:space:]]*$ ]] ||
-         ([[ "$line" =~ \.[[:space:]]*$ ]] && ! [[ "$line" =~ \" ]] && ! [[ "$line" =~ ^[[:space:]]*% ]]); then
-        in_function=false
-        write_clean_function "$output_file" "$current_function" "$spec_content" "$previous_doc_content" "$function_content" "$functions_written"
-        ((functions_written++))
-        current_function=""
-        function_content=""
-        spec_content=""
-        previous_doc_content=""
-      fi
-    else
-      # Only reset doc content if we hit a non-comment, non-spec, non-function line
-      if ! [[ "$line" =~ ^% ]] && ! [[ "$line" =~ ^-spec ]] && [ "$in_doc_comment" = false ]; then
-        doc_content=""
-      fi
-      in_doc_comment=false
-    fi
-  done < "$src_file"
-
-  # Write any remaining function
-  if [ -n "$current_function" ] && [ -n "$function_content" ]; then
-    write_clean_function "$output_file" "$current_function" "$spec_content" "$previous_doc_content" "$function_content" "$functions_written"
-  fi
-
-  # Add footer
-  echo "" >> "$output_file"
-  echo "---" >> "$output_file"
-  echo "" >> "$output_file"
-  echo "*Generated from [$module_name.erl]($GITHUB_BASE_URL/${module_name}.erl)*" >> "$output_file"
-}
-
-# --- Function to write a function section with cleaner format ---
-write_clean_function() {
-  local output_file="$1"
-  local func_name="$2"
-  local spec="$3"
-  local doc="$4"
-  local code="$5"
-  local func_num="$6"
-
-  # Add section separator for better readability (except for first function)
-  if [ "$func_num" -gt 0 ]; then
-    echo "" >> "$output_file"
-  fi
-
-  echo "### $func_name" >> "$output_file"
-  echo "" >> "$output_file"
-
-  # Add documentation if present
-  if [ -n "$doc" ]; then
-    local cleaned_doc=$(extract_function_doc "$doc")
-    if [ -n "$cleaned_doc" ]; then
-      echo "$cleaned_doc" >> "$output_file"
-      echo "" >> "$output_file"
-    fi
-  fi
-
-  # Add spec if present (in a more compact format)
-  if [ -n "$spec" ] && [ "$spec" != $'\n' ]; then
-    echo '```erlang' >> "$output_file"
-    echo -n "$spec" | sed '/^[[:space:]]*$/d' >> "$output_file"
-    echo '```' >> "$output_file"
-    echo "" >> "$output_file"
-  fi
-
-  # Add Function subheader before the implementation
-  echo "#### Function" >> "$output_file"
-  echo "" >> "$output_file"
-
-  # Add implementation with inline comment processing
-  write_code_with_inline_comments "$output_file" "$code"
-}
-
-# --- Function to write code blocks with inline comments breaking them ---
-write_code_with_inline_comments() {
-  local output_file="$1"
-  local code="$2"
-
-  local in_code_block=false
-  local in_comment_block=false
-  local comment_lines=()
-
-  while IFS= read -r line; do
-    # Check if this is an inline comment
-    # Match lines that have whitespace followed by a single % (not %% or %%%)
-    if [[ "$line" =~ ^[[:space:]]*%([[:space:]]|$) ]] && ! [[ "$line" =~ ^[[:space:]]*%% ]]; then
-      # If we were in a code block, close it
-      if [ "$in_code_block" = true ]; then
-        echo '```' >> "$output_file"
-        echo "" >> "$output_file"
-        in_code_block=false
-      fi
-
-      # Extract comment text - remove leading whitespace and %
-      comment_text=$(echo "$line" | sed 's/^[[:space:]]*%//')
-
-      # Remove leading space after % if present
-      comment_text=$(echo "$comment_text" | sed 's/^[[:space:]]//')
-
-      # Convert backtick-quote pairs to proper markdown backticks
-      comment_text=$(echo "$comment_text" | sed "s/\`\([^']*\)'/\`\1\`/g")
-
-      # Add to comment lines array
-      comment_lines+=("$comment_text")
-      in_comment_block=true
-    else
-      # Regular code line (including %% and %%% doc comments)
-      # If we were collecting comments, write them out first
-      if [ "$in_comment_block" = true ]; then
-        # Write each comment line separately to preserve structure
-        for comment_line in "${comment_lines[@]}"; do
-          echo "$comment_line" >> "$output_file"
-        done
-        echo "" >> "$output_file"
-        echo '```erlang' >> "$output_file"
-        in_comment_block=false
-        comment_lines=()
-        in_code_block=true
-      elif [ "$in_code_block" = false ]; then
-        echo '```erlang' >> "$output_file"
-        in_code_block=true
-      fi
-      echo "$line" >> "$output_file"
-    fi
-  done <<< "$code"
-
-  # Handle any remaining comment lines
-  if [ "$in_comment_block" = true ]; then
-    for comment_line in "${comment_lines[@]}"; do
-      echo "$comment_line" >> "$output_file"
-    done
-    echo "" >> "$output_file"
-  fi
-
-  # Close any remaining code block
-  if [ "$in_code_block" = true ]; then
-    echo '```' >> "$output_file"
-    echo "" >> "$output_file"
-  fi
-}
-
-# --- Main processing loop ---
-log_step "Processing Erlang source files"
-
-# Count total files
-total_files=$(find "$ROOT_DIR/src" -name "*.erl" -type f | wc -l)
-processed=0
-
-# Process each .erl file in src directory
-find "$ROOT_DIR/src" -name "*.erl" -type f | sort | while read -r erl_file; do
-  ((processed++))
-  module_name=$(basename "$erl_file" .erl)
-  log_info "[$processed/$total_files] Processing $module_name.erl"
-  process_erlang_file "$erl_file"
-done
-
-log_success "Processed $total_files Erlang files"
-
-# --- Generate index file ---
-log_step "Generating index file"
-
-cat > "$OUTPUT_DIR/README.md" <> "$OUTPUT_DIR/README.md"
-echo "|--------|-------------|" >> "$OUTPUT_DIR/README.md"
-
-find "$OUTPUT_DIR" -name "*.erl.md" -type f | sort | while read -r md_file; do
-  module_name=$(basename "$md_file" .erl.md)
-  # Try to extract first line of module doc as description
-  first_line=$(grep -m 1 -A 1 "^# $module_name" "$md_file" | tail -1 | head -c 100)
-  if [ "$first_line" = "[View source on GitHub]"* ] || [ -z "$first_line" ]; then
-    first_line="Erlang module"
-  fi
-  echo "| [$module_name](./${module_name}.erl.md) | $first_line... |" >> "$OUTPUT_DIR/README.md"
-done
-
-cat >> "$OUTPUT_DIR/README.md" < e.trim())
-                        .filter(Boolean);
+                // Exports - handle multi-line exports
+                if (REGEX.EXPORT_PREFIX.test(trimmed)) {
+                    const exportLines = this.collectMultiLineConstruct(i, '[', ']');
+                    const fullExport = exportLines.join(' ');
+                    const exportMatch = fullExport.match(REGEX.EXPORT);
+                    if (exportMatch) {
+                        const exports = exportMatch[1]
+                            .split(',')
+                            .map(e => e.trim())
+                            .filter(Boolean);
+                        this.moduleInfo.exports.push(...exports);
+                    }
+                    i += exportLines.length - 1;
+                    continue;
+                }
+
+                // Includes
+                const includeMatch = trimmed.match(REGEX.INCLUDE);
+                if (includeMatch) {
+                    this.moduleInfo.includes.push({
+                        file: includeMatch[1],
+                        line: trimmed
+                    });
+                    continue;
+                }
+
+                // Defines
+                const defineMatch = trimmed.match(REGEX.DEFINE);
+                if (defineMatch) {
+                    this.moduleInfo.defines.push({
+                        name: defineMatch[1],
+                        value: defineMatch[2] || '',
+                        line: trimmed
+                    });
+                    continue;
+                }
+
+                // Behaviours
+                const behaviourMatch = trimmed.match(REGEX.BEHAVIOUR);
+                if (behaviourMatch) {
+                    this.moduleInfo.behaviours.push(behaviourMatch[1]);
+                    continue;
+                }
+
+                // Records
+                if (REGEX.RECORD.test(trimmed)) {
+                    const recordLines = this.collectMultiLineConstruct(i, '{', '}');
+                    const recordMatch = trimmed.match(REGEX.RECORD);
+                    if (recordMatch) {
+                        this.moduleInfo.records.push({
+                            name: recordMatch[1],
+                            definition: recordLines.join('\n')
+                        });
+                    }
+                    i += recordLines.length - 1;
+                    continue;
+                }
+
+                // Types
+                const typeMatch = trimmed.match(REGEX.TYPE);
+                if (typeMatch) {
+                    const typeLines = this.collectMultiLineConstruct(i, '(', ')');
+                    this.moduleInfo.types.push({
+                        name: typeMatch[1],
+                        definition: typeLines.join('\n')
+                    });
+                    i += typeLines.length - 1;
+                    continue;
+                }
+
+                // Specs (collect but don't process here)
+                if (REGEX.SPEC.test(trimmed)) {
+                    const specLines = this.collectMultiLineConstruct(i, '(', ')');
+                    const specMatch = trimmed.match(REGEX.SPEC);
+                    if (specMatch) {
+                        this.moduleInfo.specs.push({
+                            function: specMatch[1],
+                            definition: specLines.join('\n')
+                        });
+                    }
+                    i += specLines.length - 1;
+                    continue;
+                }
+
+                // Other attributes
+                const attrMatch = trimmed.match(REGEX.ATTRIBUTE);
+                if (attrMatch) {
+                    this.moduleInfo.attributes.push({
+                        name: attrMatch[1],
+                        line: trimmed
+                    });
+                    continue;
                 }
-                continue;
             }
 
-            // Module documentation - check for percent first
-            if (firstChar === CHAR_CODES.PERCENT && trimmed.startsWith(STRINGS.TRIPLE_PERCENT)) {
+            // Module documentation - only collect at the beginning of the file
+            if (firstChar === CHAR_CODES.PERCENT && trimmed.startsWith(STRINGS.TRIPLE_PERCENT) && !moduleDocCollected) {
                 inModuleDoc = true;
                 let docLine = trimmed.substring(3).trim();
                 docLine = docLine.replace(REGEX.REMOVE_DOC, STRINGS.EMPTY);
+
+                // Check for termination pattern (%%% ''')
+                if (docLine === "'''") {
+                    inModuleDoc = false; // End module documentation processing
+                    moduleDocCollected = true; // Mark as collected
+                    continue; // Continue processing rest of file
+                }
+
                 moduleDoc.push(docLine);
             } else if (inModuleDoc && (firstChar === CHAR_CODES.PERCENT || firstChar === CHAR_CODES.DASH)) {
-                break;
+                inModuleDoc = false;
+                moduleDocCollected = true; // Mark as collected
+                // Don't break - continue processing this line for module declarations
+                i--; // Re-process this line outside module doc context
             }
         }
 
-        this.moduleInfo.doc = this.cleanDocumentation(moduleDoc.join(STRINGS.NEWLINE));
+        this.moduleInfo.doc = this.cleanDocumentation(this.fixModuleDocCodeBlocks(moduleDoc.join(STRINGS.NEWLINE)));
+    }
+
+    collectMultiLineConstruct(startIdx, openChar, closeChar) {
+        const lines = [];
+        let depth = 0;
+        let found = false;
+
+        for (let i = startIdx; i < this.lines.length; i++) {
+            const line = this.lines[i];
+            lines.push(line);
+
+            for (let j = 0; j < line.length; j++) {
+                const char = line[j];
+                if (char === openChar) {
+                    depth++;
+                    found = true;
+                } else if (char === closeChar && found) {
+                    depth--;
+                    if (depth === 0) {
+                        return lines;
+                    }
+                }
+            }
+
+            // Safety check for runaway constructs
+            if (i - startIdx > 100) break;
+        }
+
+        return lines;
     }
 
     processFunctions() {
         const linesLength = this.lines.length;
+        const processedFunctions = new Set();
+        const commentedCodeBlocks = [];
+        let currentCommentedBlock = [];
+        let inCommentedBlock = false;
 
         for (let i = 0; i < linesLength; i++) {
             const line = this.lines[i];
             const trimmed = line.trim();
 
-            if (!trimmed) continue;
+            if (!trimmed) {
+                if (inCommentedBlock) {
+                    currentCommentedBlock.push(line);
+                }
+                continue;
+            }
+
+            // Check for comment-style section headers
+            if (trimmed.startsWith('%%%') && trimmed.match(/^%%%-{10,}$/)) {
+                // This is a dash line, check if next line is a header and line after that is also dashes
+                if (i + 1 < linesLength && i + 2 < linesLength) {
+                    const nextLine = this.lines[i + 1].trim();
+                    const afterLine = this.lines[i + 2].trim();
+
+                    if (nextLine.startsWith('%%%') && !nextLine.match(/^%%%-{10,}$/) &&
+                        afterLine.match(/^%%%-{10,}$/)) {
+                        // Extract header text
+                        const headerText = nextLine.replace(/^%%%\s*/, '').trim();
+                        if (headerText) {
+                            this.sections.push({
+                                type: 'section_header',
+                                title: headerText,
+                                lineNumber: i + 2 // Store line number for sorting later
+                            });
+                        }
+                        // Skip the next two lines
+                        i += 2;
+                        continue;
+                    }
+                }
+            }
+
+            // Check for commented-out code blocks (lines starting with % but containing code patterns)
+            if (trimmed.startsWith('%') && !trimmed.startsWith('%%')) {
+                const uncommented = trimmed.substring(1).trim();
+                if (this.looksLikeCode(uncommented)) {
+                    if (!inCommentedBlock) {
+                        inCommentedBlock = true;
+                        currentCommentedBlock = [];
+                    }
+                    currentCommentedBlock.push(line);
+                    continue;
+                } else if (inCommentedBlock) {
+                    // End of commented code block
+                    if (currentCommentedBlock.length > 0) {
+                        commentedCodeBlocks.push({
+                            type: 'commented_code',
+                            lines: [...currentCommentedBlock],
+                            startLine: i - currentCommentedBlock.length + 1
+                        });
+                    }
+                    inCommentedBlock = false;
+                    currentCommentedBlock = [];
+                }
+            } else if (inCommentedBlock) {
+                // End of commented code block
+                if (currentCommentedBlock.length > 0) {
+                    commentedCodeBlocks.push({
+                        type: 'commented_code',
+                        lines: [...currentCommentedBlock],
+                        startLine: i - currentCommentedBlock.length + 1
+                    });
+                }
+                inCommentedBlock = false;
+                currentCommentedBlock = [];
+            }
 
             // Check for start of function documentation block
             if (REGEX.DOC_BLOCK_START.test(trimmed)) {
@@ -203,16 +419,31 @@ class ErlangLiterateParser {
                 continue;
             }
 
+            // Check for conditional compilation directives
+            if (trimmed.startsWith('-ifdef(') || trimmed.startsWith('-ifndef(') ||
+                trimmed.startsWith('-else') || trimmed.startsWith('-endif')) {
+                this.addDirectiveToOutput(line, i);
+                continue;
+            }
+
             // Check for function start
             const funcMatch = trimmed.match(REGEX.FUNCTION);
             if (funcMatch && !this.currentState.inFunction) {
+                const functionName = this.currentState.specFunctionName || funcMatch[1];
+                processedFunctions.add(functionName);
+
                 if (this.currentState.pendingDoc) {
                     this.currentState.functionDoc = this.currentState.pendingDoc;
                     this.currentState.pendingDoc = STRINGS.EMPTY;
+                    this.startFunction(functionName, i);
+                } else {
+                    // This is an undocumented function
+                    this.startUndocumentedFunction(functionName, i);
+                    // Skip ahead to avoid reprocessing
+                    while (i < linesLength && !this.isFunctionEnd(this.lines[i])) {
+                        i++;
+                    }
                 }
-
-                const functionName = this.currentState.specFunctionName || funcMatch[1];
-                this.startFunction(functionName);
                 this.currentState.specFunctionName = STRINGS.EMPTY;
             }
 
@@ -225,9 +456,21 @@ class ErlangLiterateParser {
             }
         }
 
+        // Handle remaining commented code block
+        if (inCommentedBlock && currentCommentedBlock.length > 0) {
+            commentedCodeBlocks.push({
+                type: 'commented_code',
+                lines: currentCommentedBlock,
+                startLine: linesLength - currentCommentedBlock.length + 1
+            });
+        }
+
         if (this.currentState.inFunction) {
             this.endFunction();
         }
+
+        // Store commented code blocks for later inclusion
+        this.commentedCodeBlocks = commentedCodeBlocks;
     }
 
     collectFunctionDoc(startIdx) {
@@ -316,9 +559,10 @@ class ErlangLiterateParser {
         this.currentState.functionSpec = specLines.join(STRINGS.NEWLINE);
     }
 
-    startFunction(name) {
+    startFunction(name, lineNumber = 0) {
         this.currentState.inFunction = true;
         this.currentState.functionName = name;
+        this.currentState.functionLineNumber = lineNumber;
         this.currentState.functionLines.length = 0;
         this.currentState.braceDepth = 0;
         this.currentState.parenDepth = 0;
@@ -365,12 +609,15 @@ class ErlangLiterateParser {
             name: this.currentState.functionName,
             spec: this.currentState.functionSpec,
             doc: this.currentState.functionDoc,
-            body: processedBody
+            body: processedBody,
+            hasImplementation: true,
+            lineNumber: this.currentState.functionLineNumber
         });
 
         // Reset state efficiently
         this.currentState.inFunction = false;
         this.currentState.functionName = STRINGS.EMPTY;
+        this.currentState.functionLineNumber = 0;
         this.currentState.functionSpec = STRINGS.EMPTY;
         this.currentState.functionDoc = STRINGS.EMPTY;
         this.currentState.functionLines.length = 0;
@@ -378,6 +625,104 @@ class ErlangLiterateParser {
         this.currentState.inlineDocTags.length = 0;
     }
 
+    startUndocumentedFunction(name, startLine) {
+        const functionLines = [];
+        let braceDepth = 0;
+        let parenDepth = 0;
+        let i = startLine;
+
+        // Collect the entire function
+        while (i < this.lines.length) {
+            const line = this.lines[i];
+            functionLines.push(line);
+
+            // Track depth
+            for (let j = 0; j < line.length; j++) {
+                const charCode = line.charCodeAt(j);
+                switch (charCode) {
+                    case CHAR_CODES.OPEN_BRACE:
+                    case CHAR_CODES.OPEN_BRACKET:
+                        braceDepth++;
+                        break;
+                    case CHAR_CODES.CLOSE_BRACE:
+                    case CHAR_CODES.CLOSE_BRACKET:
+                        braceDepth--;
+                        break;
+                    case CHAR_CODES.OPEN_PAREN:
+                        parenDepth++;
+                        break;
+                    case CHAR_CODES.CLOSE_PAREN:
+                        parenDepth--;
+                        break;
+                }
+            }
+
+            // Check if function ended
+            const trimmed = line.trim();
+            if (braceDepth === 0 && parenDepth === 0 &&
+                trimmed.charCodeAt(trimmed.length - 1) === CHAR_CODES.DOT &&
+                trimmed.charCodeAt(0) !== CHAR_CODES.PERCENT) {
+                break;
+            }
+
+            i++;
+        }
+
+        // Find corresponding spec
+        const spec = this.moduleInfo.specs.find(s => s.function === name);
+
+        this.undocumentedFunctions.push({
+            name,
+            spec: spec ? spec.definition : null,
+            body: this.processFunctionBody(functionLines),
+            lines: functionLines
+        });
+    }
+
+    // Helper method to detect if a line looks like code
+    looksLikeCode(line) {
+        if (!line || line.length === 0) return false;
+
+        // Check for common code patterns
+        const codePatterns = [
+            /^[a-z][a-z0-9_]*\s*\(/,           // function calls: function(
+            /^[A-Z][a-zA-Z0-9_]*\s*=/,         // variable assignments: Var =
+            /^\s*\{/,                          // tuples/records: {
+            /^\s*\[/,                          // lists: [
+            /^\s*case\s+/,                     // case statements
+            /^\s*if\s+/,                       // if statements
+            /^\s*catch\s+/,                    // catch blocks
+            /^\s*after\s+/,                    // after blocks
+            /^\s*end[,.]?\s*$/,                // end keywords
+            /^\s*ok\s*$/,                      // ok atoms
+            /^\s*true\s*$/,                    // boolean atoms
+            /^\s*false\s*$/,                   // boolean atoms
+            /->\s*$/,                          // arrow operators
+            /^\s*\?[A-Z]/,                     // macro usage
+            /^\s*[a-z_][a-z0-9_]*\s*\(/,      // function definitions
+            /^\s*\d+\s*$/,                     // numbers
+            /^\s*".*"\s*$/,                   // strings
+            /^\s*<<.*>>\s*$/,                  // binaries
+            /^\s*#\w+/,                        // record syntax
+            /^\s*receive\s+/,                  // receive blocks
+            /^\s*spawn/,                       // spawn calls
+            /^\s*gen_server:/,                 // gen_server calls
+            /^\s*supervisor:/,                 // supervisor calls
+            /\bmatch\b|\bguard\b|\btry\b|\bfun\b/, // erlang keywords
+        ];
+
+        return codePatterns.some(pattern => pattern.test(line));
+    }
+
+    // Helper method to add conditional compilation directives
+    addDirectiveToOutput(line, lineNumber) {
+        this.conditionalDirectives.push({
+            line: line.trim(),
+            lineNumber: lineNumber + 1,
+            type: 'conditional_compilation'
+        });
+    }
+
     processFunctionBody(lines) {
         const segments = [];
         const currentCode = [];
@@ -399,7 +744,7 @@ class ErlangLiterateParser {
                 if (parsed.params.length > 0) {
                     docParts.push(STRINGS.PARAMETERS_HEADER, STRINGS.EMPTY);
                     parsed.params.forEach(p => {
-                        const desc = this.cleanDocumentation(p.description || STRINGS.EMPTY);
+                        const desc = this.cleanDocumentation(p.description || STRINGS.EMPTY, true);
                         docParts.push(`- ${STRINGS.BACKTICK}${p.name}${STRINGS.BACKTICK} - ${desc}`);
                     });
                     docParts.push(STRINGS.EMPTY);
@@ -467,12 +812,139 @@ class ErlangLiterateParser {
         return text.replace(REGEX.BACKTICK_QUOTE, `${STRINGS.BACKTICK}$1${STRINGS.BACKTICK}`).trim();
     }
 
-    cleanDocumentation(text) {
+    fixCodeBlocks(text) {
+        if (!text) return text;
+
+        const lines = text.split(STRINGS.NEWLINE);
+        const result = [];
+        let inCodeBlock = false;
+
+        for (let i = 0; i < lines.length; i++) {
+            const line = lines[i];
+            const trimmed = line.trim();
+
+            // Check if this is a code block delimiter
+            if (REGEX.CODE_FENCE.test(trimmed)) {
+                if (!inCodeBlock && trimmed === '```') {
+                    // Start of unmarked code block - check if it's empty first
+                    let blockContent = [];
+                    let j = i + 1;
+
+                    // Collect content until closing ```
+                    while (j < lines.length) {
+                        const contentLine = lines[j];
+                        if (contentLine.trim() === '```') {
+                            break;
+                        }
+                        blockContent.push(contentLine);
+                        j++;
+                    }
+
+                    // If block is empty, skip it entirely
+                    const hasContent = blockContent.some(line => line.trim() !== '');
+                    if (!hasContent) {
+                        // Skip the empty code block entirely
+                        i = j; // Skip to after the closing ```
+                        continue;
+                    }
+
+                    // Determine appropriate language based on content
+                    const nextLine = blockContent.length > 0 ? blockContent[0].trim() : '';
+                    let language = 'text'; // default
+
+                    // Heuristics to determine language based on content
+                    if (nextLine.startsWith('/') || nextLine.includes('Parameters:') ||
+                        nextLine.includes('- `') || nextLine.includes('(optional)')) {
+                        language = 'text';
+                    } else if (nextLine.includes('#{') || nextLine.includes('<<') ||
+                              nextLine.includes('->') || nextLine.match(/^[a-z_]+\(/)) {
+                        language = 'erlang';
+                    }
+
+                    // For text blocks, add ignore attribute to prevent mdBook testing
+                    if (language === 'text') {
+                        result.push('```text,ignore');
+                    } else {
+                        result.push('```' + language);
+                    }
+                    inCodeBlock = true;
+                } else if (inCodeBlock && trimmed === '```') {
+                    // End of code block
+                    result.push(line);
+                    inCodeBlock = false;
+                } else {
+                    // Already has language specifier or other case
+                    result.push(line);
+                    if (trimmed.startsWith('```')) {
+                        inCodeBlock = !inCodeBlock;
+                    }
+                }
+            } else {
+                result.push(line);
+            }
+        }
+
+        return result.join(STRINGS.NEWLINE);
+    }
+
+    fixModuleDocCodeBlocks(text) {
+        if (!text) return STRINGS.EMPTY;
+
+        const lines = text.split(STRINGS.NEWLINE);
+        const result = [];
+        let inCodeBlock = false;
+        let codeBlockStart = -1;
+
+        for (let i = 0; i < lines.length; i++) {
+            const line = lines[i];
+            const trimmed = line.trim();
+
+            // Check for code block start
+            if (trimmed === '```' && !inCodeBlock) {
+                inCodeBlock = true;
+                codeBlockStart = i;
+                result.push(line);
+                continue;
+            }
+
+            // Check for code block end
+            if (trimmed === '```' && inCodeBlock) {
+                inCodeBlock = false;
+                result.push(line);
+                continue;
+            }
+
+            // Check for implicit code block end (new section starting with /)
+            if (inCodeBlock && trimmed.startsWith('/')) {
+                // Close the previous code block
+                result.push('```');
+                result.push('');
+                inCodeBlock = false;
+            }
+
+            result.push(line);
+        }
+
+        // Close any unclosed code block at the end
+        if (inCodeBlock) {
+            result.push('```');
+        }
+
+        return result.join(STRINGS.NEWLINE);
+    }
+
+    cleanDocumentation(text, skipCodeBlockFix = false) {
         if (!text) return STRINGS.EMPTY;
 
         text = text.replace(REGEX.PRE_TAG, (match, content) => this.formatPreContent(content));
 
-        const cleaned = text
+        // Only fix unmarked code blocks for module-level documentation
+        // Skip for function documentation to avoid excessive text,ignore blocks
+        if (!skipCodeBlockFix) {
+            text = this.fixCodeBlocks(text);
+        }
+
+        let cleaned = text
             .replace(REGEX.BACKTICK_QUOTE, `${STRINGS.BACKTICK}$1${STRINGS.BACKTICK}`)
             .replace(REGEX.HTML_ENTITIES_LT, '<<')
             .replace(REGEX.HTML_ENTITIES_GT, '>>')
@@ -484,9 +956,127 @@ class ErlangLiterateParser {
         return this.reflowNumberedLists(cleaned);
     }
 
+    convertCommentStyleHeaders(text) {
+        if (!text) return text;
+
+        const lines = text.split(STRINGS.NEWLINE);
+        const result = [];
+
+        for (let i = 0; i < lines.length; i++) {
+            const line = lines[i];
+            const trimmed = line.trim();
+
+            // Look for pattern: dashes followed by text followed by dashes
+            if (trimmed.match(/^-{10,}$/)) {
+                // This is a dash line, check if next line is a header
+                if (i + 1 < lines.length) {
+                    const nextLine = lines[i + 1];
+                    const nextTrimmed = nextLine.trim();
+
+                    // Check if the line after is also dashes (closing the header)
+                    if (i + 2 < lines.length && lines[i + 2].trim().match(/^-{10,}$/)) {
+                        // This is a comment-style header: convert to markdown
+                        if (nextTrimmed) {
+                            result.push(`## ${nextTrimmed}`);
+                            result.push(STRINGS.EMPTY);
+                        }
+                        // Skip the next two lines (header text and closing dashes)
+                        i += 2;
+                        continue;
+                    }
+                }
+            }
+
+            result.push(line);
+        }
+
+        return result.join(STRINGS.NEWLINE);
+    }
+
+    generateInterleavedContent(md) {
+        // Create a combined list of sections and functions, sorted by line number
+        const contentItems = [];
+
+        // Add sections
+        for (const section of this.sections) {
+            if (section.type === 'section_header') {
+                contentItems.push({
+                    type: 'section',
+                    lineNumber: section.lineNumber,
+                    title: section.title
+                });
+            }
+        }
+
+        // Add functions
+        const groupedFunctions = this.groupFunctionsByName(this.functions);
+        for (const group of groupedFunctions) {
+            // Use the line number of the first function in the group
+            const lineNumber = group.functions[0]?.lineNumber || 0;
+            contentItems.push({
+                type: 'function_group',
+                lineNumber: lineNumber,
+                group: group
+            });
+        }
+
+        // Sort by line number
+        contentItems.sort((a, b) => a.lineNumber - b.lineNumber);
+
+        // Generate markdown for each item
+        for (const item of contentItems) {
+            if (item.type === 'section') {
+                md.push(`## ${item.title}`);
+                md.push(STRINGS.EMPTY);
+            } else if (item.type === 'function_group') {
+                this.generateFunctionGroupMarkdown(md, item.group);
+            }
+        }
+    }
+
+    generateFunctionGroupMarkdown(md, group) {
+        md.push(`## ${group.name}`);
+        md.push(STRINGS.EMPTY);
+
+        const combinedDoc = this.combineFunctionDocs(group.functions);
+        if (combinedDoc) {
+            md.push(combinedDoc);
+            md.push(STRINGS.EMPTY);
+        }
+
+        for (const func of group.functions) {
+            if (func.spec) {
+                md.push(`\`\`\`${STRINGS.ERLANG}`);
+                md.push(func.spec.trim());
+                md.push('```');
+                md.push(STRINGS.EMPTY);
+            }
+
+            if (func.body?.length > 0) {
+                md.push(STRINGS.EMPTY);
+                for (const segment of func.body) {
+                    if (segment.type === 'comment') {
+                        md.push(segment.content);
+                        md.push(STRINGS.EMPTY);
+                    } else if (segment.type === 'doc') {
+                        md.push(segment.content);
+                        md.push(STRINGS.EMPTY);
+                    } else if (segment.type === 'code') {
+                        md.push(`\`\`\`${STRINGS.ERLANG}`);
+                        md.push(segment.content.trim());
+                        md.push('```');
+                        md.push(STRINGS.EMPTY);
+                    }
+                }
+            }
+        }
+
+        md.push(STRINGS.EMPTY);
+    }
+
     formatReturnsText(text) {
         if (!text) return STRINGS.EMPTY;
-        let result = this.cleanDocumentation(text);
+        let result = this.cleanDocumentation(text, true);
 
         const leadingMatch = result.match(REGEX.LEADING_RETURN_TOKEN);
         if (leadingMatch) {
@@ -501,7 +1091,7 @@ class ErlangLiterateParser {
 
     splitReturnsIntoOutcomes(text) {
         if (!text) return [];
-        const s = this.cleanDocumentation(text);
+        const s = this.cleanDocumentation(text, true);
         const matches = [];
         let match;
 
@@ -734,6 +1324,31 @@ class ErlangLiterateParser {
         return result;
     }
 
+    addSeparator(md) {
+        // Only add separator if the last entry is not empty and not already a separator
+        if (md.length > 0) {
+            const lastLine = md[md.length - 1];
+            const prevLine = md.length > 1 ? md[md.length - 2] : '';
+
+            // Don't add separator if last line is already empty and previous is separator
+            if (lastLine === STRINGS.EMPTY && prevLine === STRINGS.SEPARATOR) {
+                return;
+            }
+
+            // Don't add separator if last line is already a separator
+            if (lastLine === STRINGS.SEPARATOR) {
+                return;
+            }
+
+            // Add separator with proper spacing
+            if (lastLine !== STRINGS.EMPTY) {
+                md.push(STRINGS.EMPTY);
+            }
+            md.push(STRINGS.SEPARATOR);
+            md.push(STRINGS.EMPTY);
+        }
+    }
+
     generateMarkdown(fileName) {
         const githubUrl = `${this.options.githubBase}/${fileName}`;
         const md = [];
@@ -744,38 +1359,61 @@ class ErlangLiterateParser {
         md.push(`[View source on GitHub](${githubUrl})`);
         md.push(STRINGS.EMPTY);
 
+        // Metadata section
+        this.generateMetadataSection(md);
+
         // Module documentation
         if (this.moduleInfo.doc) {
             md.push(this.moduleInfo.doc);
+            this.addSeparator(md);
+        }
+
+        // Generate interleaved content (sections and functions) sorted by line number
+        this.generateInterleavedContent(md);
+
+        // Commented-out code blocks
+        if (this.commentedCodeBlocks && this.commentedCodeBlocks.length > 0) {
+            md.push('## Commented-Out Code');
             md.push(STRINGS.EMPTY);
-            md.push(STRINGS.SEPARATOR);
+            md.push('*The following code blocks are commented out but may contain useful examples:*');
             md.push(STRINGS.EMPTY);
+
+            for (const block of this.commentedCodeBlocks) {
+                md.push('```erlang');
+                for (const line of block.lines) {
+                    md.push(line);
+                }
+                md.push('```');
+                md.push(STRINGS.EMPTY);
+            }
         }
 
-        // Exports
-        if (this.moduleInfo.exports?.length > 0) {
-            md.push(STRINGS.EXPORTED_FUNCTIONS);
+        // Conditional compilation directives
+        if (this.conditionalDirectives && this.conditionalDirectives.length > 0) {
+            md.push('## Conditional Compilation');
             md.push(STRINGS.EMPTY);
-            this.moduleInfo.exports.forEach(exp =>
-                md.push(`- ${STRINGS.BACKTICK}${exp}${STRINGS.BACKTICK}`));
+            md.push('*The following conditional compilation directives are used in this module:*');
             md.push(STRINGS.EMPTY);
-            md.push(STRINGS.SEPARATOR);
+
+            md.push('```erlang');
+            for (const directive of this.conditionalDirectives) {
+                md.push(directive.line);
+            }
+            md.push('```');
             md.push(STRINGS.EMPTY);
         }
 
-        const groupedFunctions = this.groupFunctionsByName(this.functions);
-
-        for (const group of groupedFunctions) {
-            md.push(`## ${group.name}`);
+        // Undocumented functions section
+        if (this.undocumentedFunctions.length > 0) {
+            md.push('## Undocumented Functions');
+            md.push(STRINGS.EMPTY);
+            md.push('*The following functions lack documentation comments but are included for completeness:*');
             md.push(STRINGS.EMPTY);
 
-            const combinedDoc = this.combineFunctionDocs(group.functions);
-            if (combinedDoc) {
-                md.push(combinedDoc);
+            for (const func of this.undocumentedFunctions) {
+                md.push(`### ${func.name}`);
                 md.push(STRINGS.EMPTY);
-            }
 
-            for (const func of group.functions) {
                 if (func.spec) {
                     md.push(`\`\`\`${STRINGS.ERLANG}`);
                     md.push(func.spec.trim());
@@ -784,14 +1422,10 @@ class ErlangLiterateParser {
                 }
 
                 if (func.body?.length > 0) {
-                    md.push(STRINGS.EMPTY);
                     for (const segment of func.body) {
                         if (segment.type === 'comment') {
                             md.push(segment.content);
                             md.push(STRINGS.EMPTY);
-                        } else if (segment.type === 'doc') {
-                            md.push(segment.content);
-                            md.push(STRINGS.EMPTY);
                         } else if (segment.type === 'code') {
                             md.push(`\`\`\`${STRINGS.ERLANG}`);
                             md.push(segment.content.trim());
@@ -802,14 +1436,109 @@ class ErlangLiterateParser {
                 }
             }
 
-            md.push(STRINGS.EMPTY);
+            this.addSeparator(md);
         }
 
-        md.push(STRINGS.SEPARATOR);
-        md.push(STRINGS.EMPTY);
+        this.addSeparator(md);
         md.push(`*Generated from [${fileName}](${githubUrl})*`);
 
-        return md.join(STRINGS.NEWLINE);
+        const finalMarkdown = md.join(STRINGS.NEWLINE);
+        return finalMarkdown;
+    }
+
+    generateMetadataSection(md) {
+        md.push('## Module Metadata');
+        md.push(STRINGS.EMPTY);
+
+        // Basic module information
+        md.push(`**Module:** \`${this.moduleInfo.name || 'unknown'}\``);
+        md.push(`**Exports:** ${this.moduleInfo.exports.length} functions`);
+
+        if (this.moduleInfo.behaviours.length > 0) {
+            md.push(`**Behaviours:** ${this.moduleInfo.behaviours.map(b => `\`${b}\``).join(', ')}`);
+        }
+
+        if (this.moduleInfo.includes.length > 0) {
+            md.push(`**Includes:** ${this.moduleInfo.includes.length} files`);
+        }
+
+        if (this.moduleInfo.defines.length > 0) {
+            md.push(`**Defines:** ${this.moduleInfo.defines.length} macros`);
+        }
+
+        if (this.moduleInfo.records.length > 0) {
+            md.push(`**Records:** ${this.moduleInfo.records.length} records`);
+        }
+
+        if (this.moduleInfo.types.length > 0) {
+            md.push(`**Types:** ${this.moduleInfo.types.length} type definitions`);
+        }
+
+        md.push(STRINGS.EMPTY);
+
+        // Exports section
+        if (this.moduleInfo.exports.length > 0) {
+            md.push('### Exported Functions');
+            md.push(STRINGS.EMPTY);
+            this.moduleInfo.exports.forEach(exp => {
+                md.push(`- \`${exp}\``);
+            });
+            md.push(STRINGS.EMPTY);
+        }
+
+        // Includes section
+        if (this.moduleInfo.includes.length > 0) {
+            md.push('### Includes');
+            md.push(STRINGS.EMPTY);
+            md.push('```erlang');
+            this.moduleInfo.includes.forEach(inc => {
+                md.push(inc.line);
+            });
+            md.push('```');
+            md.push(STRINGS.EMPTY);
+        }
+
+        // Defines section
+        if (this.moduleInfo.defines.length > 0) {
+            md.push('### Macro Definitions');
+            md.push(STRINGS.EMPTY);
+            md.push('```erlang');
+            this.moduleInfo.defines.forEach(def => {
+                md.push(def.line);
+            });
+            md.push('```');
+            md.push(STRINGS.EMPTY);
+        }
+
+        // Records section
+        if (this.moduleInfo.records.length > 0) {
+            md.push('### Record Definitions');
+            md.push(STRINGS.EMPTY);
+            this.moduleInfo.records.forEach(rec => {
+                md.push(`#### \`${rec.name}\``);
+                md.push(STRINGS.EMPTY);
+                md.push('```erlang');
+                md.push(rec.definition);
+                md.push('```');
+                md.push(STRINGS.EMPTY);
+            });
+        }
+
+        // Types section
+        if (this.moduleInfo.types.length > 0) {
+            md.push('### Type Definitions');
+            md.push(STRINGS.EMPTY);
+            this.moduleInfo.types.forEach(type => {
+                md.push(`#### \`${type.name}\``);
+                md.push(STRINGS.EMPTY);
+                md.push('```erlang');
+                md.push(type.definition);
+                md.push('```');
+                md.push(STRINGS.EMPTY);
+            });
+        }
+
+        this.addSeparator(md);
     }
 
     groupFunctionsByName(functions) {
@@ -835,7 +1564,7 @@ class ErlangLiterateParser {
                 const combinedDoc = [];
 
                 if (parsed.description.length > 0) {
-                    combinedDoc.push(this.cleanDocumentation(parsed.description.join(STRINGS.NEWLINE)));
+                    combinedDoc.push(this.cleanDocumentation(parsed.description.join(STRINGS.NEWLINE), true));
                     combinedDoc.push(STRINGS.EMPTY);
                 }
 
@@ -843,7 +1572,7 @@ class ErlangLiterateParser {
                     combinedDoc.push(STRINGS.PARAMETERS_HEADER);
                     combinedDoc.push(STRINGS.EMPTY);
                     parsed.params.forEach(param => {
-                        const desc = this.cleanDocumentation(param.description);
+                        const desc = this.cleanDocumentation(param.description, true);
                         combinedDoc.push(`- ${STRINGS.BACKTICK}${param.name}${STRINGS.BACKTICK} - ${desc}`);
                     });
                     combinedDoc.push(STRINGS.EMPTY);
@@ -870,7 +1599,7 @@ function main() {
     const verbose = args.includes('-v') || args.includes('--verbose');
 
     const srcDir = process.env.SRC_DIR || path.join(process.cwd(), 'src');
-    const outputDir = process.env.OUTPUT_DIR || path.join(process.cwd(), 'docs/literate-erlang');
+    const outputDir = process.env.OUTPUT_DIR || path.join(process.cwd(), 'docs/book/src');
 
     if (!fs.existsSync(outputDir)) {
         fs.mkdirSync(outputDir, { recursive: true });
diff --git a/docs/build-literate-erlang-js.sh b/docs/generate-literate-docs.sh
similarity index 51%
rename from docs/build-literate-erlang-js.sh
rename to docs/generate-literate-docs.sh
index 91b5545ce..0c1328897 100755
--- a/docs/build-literate-erlang-js.sh
+++ b/docs/generate-literate-docs.sh
@@ -16,16 +16,54 @@ cd "$ROOT_DIR"
 
 # Configuration
 SRC_DIR="${SRC_DIR:-$ROOT_DIR/src}"
-OUTPUT_DIR="${OUTPUT_DIR:-$ROOT_DIR/docs/literate-erlang}"
+OUTPUT_DIR="${OUTPUT_DIR:-$ROOT_DIR/docs/book/src}"
 PARSER_SCRIPT="$SCRIPT_DIR/erlang-literate-parser.js"
 
 # Parse arguments
 VERBOSE=false
+DRY_RUN=false
+SHOW_HELP=false
+
+if [[ "$@" == *"-h"* ]] || [[ "$@" == *"--help"* ]]; then
+    SHOW_HELP=true
+fi
 if [[ "$@" == *"-v"* ]] || [[ "$@" == *"--verbose"* ]]; then
     VERBOSE=true
 fi
+if [[ "$@" == *"--dry-run"* ]] || [[ "$@" == *"--dryrun"* ]]; then
+    DRY_RUN=true
+fi
+
+if [ "$SHOW_HELP" = true ]; then
+    echo -e "${GREEN}HyperBEAM Literate Erlang Documentation Generator (JavaScript)${NC}"
+    echo "========================================================"
+    echo ""
+    echo "Usage: $0 [OPTIONS]"
+    echo ""
+    echo "Options:"
+    echo "  -v, --verbose     Enable verbose output"
+    echo "  --dry-run        Simulate deployment without actually deploying"
+    echo "  -h, --help       Show this help message"
+    echo ""
+    echo "Environment Variables:"
+    echo "  SRC_DIR          Source directory (default: ./src)"
+    echo "  OUTPUT_DIR       Output directory (default: ./docs/book/src)"
+    echo "  DEPLOY_KEY       Arweave wallet key for deployment"
+    echo "  ANT_PROCESS      ANT process ID for ArNS deployment"
+    echo ""
+    echo "Examples:"
+    echo "  $0                    # Generate documentation normally"
+    echo "  $0 -v                 # Generate with verbose output"
+    echo "  $0 --dry-run          # Test deployment without deploying"
+    echo "  $0 --dry-run -v       # Dry run with verbose output"
+    echo ""
+    exit 0
+fi
 
 echo -e "${GREEN}HyperBEAM Literate Erlang Documentation Generator (JavaScript)${NC}"
+if [ "$DRY_RUN" = true ]; then
+    echo -e "${YELLOW}[DRY RUN MODE] - No actual deployment will occur${NC}"
+fi
 echo "========================================================"
 
 # Check for Node.js
@@ -106,31 +144,53 @@ if [ $PARSER_EXIT_CODE -eq 0 ]; then
 
     # Copy to mdBook if it exists
     if [ -d "$ROOT_DIR/docs/book/src" ]; then
-        echo -e "${GREEN}Copying documentation to mdBook...${NC}"
-        cp "$OUTPUT_DIR"/*.md "$ROOT_DIR/docs/book/src/" 2>/dev/null
-        if [ $? -eq 0 ]; then
-            echo -e "${GREEN}✓ Documentation copied to mdBook${NC}"
+        if [ "$DRY_RUN" = true ]; then
+            echo -e "${YELLOW}[DRY RUN] Would copy documentation to mdBook...${NC}"
+            echo -e "${YELLOW}[DRY RUN] ✓ Documentation would be copied to mdBook${NC}"
         else
-            echo -e "${YELLOW}Warning: Could not copy to mdBook (no files generated?)${NC}"
+            echo -e "${GREEN}Copying documentation to mdBook...${NC}"
+            cp "$OUTPUT_DIR"/*.md "$ROOT_DIR/docs/book/src/" 2>/dev/null
+            if [ $? -eq 0 ]; then
+                echo -e "${GREEN}✓ Documentation copied to mdBook${NC}"
+            else
+                echo -e "${YELLOW}Warning: Could not copy to mdBook (no files generated?)${NC}"
+            fi
         fi
     fi
 
     # Build mdBook if available
     if command -v mdbook &> /dev/null && [ -f "$ROOT_DIR/docs/book/book.toml" ]; then
-        echo -e "${GREEN}Building mdBook...${NC}"
-        cd "$ROOT_DIR/docs/book"
-        mdbook build
-        if [ $? -eq 0 ]; then
-            echo -e "${GREEN}✓ mdBook built successfully${NC}"
-            echo "View at: file://$ROOT_DIR/docs/book/book/index.html"
+        if [ "$DRY_RUN" = true ]; then
+            echo -e "${YELLOW}[DRY RUN] Would build mdBook...${NC}"
+            echo -e "${YELLOW}[DRY RUN] ✓ mdBook would be built successfully${NC}"
+            echo -e "${YELLOW}[DRY RUN] Would be viewable at: file://$ROOT_DIR/docs/book/dist/index.html${NC}"
         else
-            echo -e "${YELLOW}Warning: mdBook build failed${NC}"
+            echo -e "${GREEN}Building mdBook...${NC}"
+            cd "$ROOT_DIR/docs/book"
+            mdbook build
+            if [ $? -eq 0 ]; then
+                echo -e "${GREEN}✓ mdBook built successfully${NC}"
+                echo "View at: file://$ROOT_DIR/docs/book/dist/index.html"
+            else
+                echo -e "${YELLOW}Warning: mdBook build failed${NC}"
+            fi
+            cd "$ROOT_DIR"
         fi
-        cd "$ROOT_DIR"
+    fi
+
+    # Run deployment dry run if requested
+    if [ "$DRY_RUN" = true ]; then
+        echo ""
+        echo -e "${YELLOW}Running deployment dry run...${NC}"
+        "$SCRIPT_DIR/deploy-dry-run.sh"
     fi
 
     echo ""
-    echo -e "${GREEN}Documentation generation complete!${NC}"
+    if [ "$DRY_RUN" = true ]; then
+        echo -e "${YELLOW}Documentation generation and deployment dry run complete!${NC}"
+    else
+        echo -e "${GREEN}Documentation generation complete!${NC}"
+    fi
     echo "Output directory: $OUTPUT_DIR"
 
 else
diff --git a/docs/build-and-serve.sh b/docs/serve-book.sh
similarity index 96%
rename from docs/build-and-serve.sh
rename to docs/serve-book.sh
index c590e7c8d..ef7ddaea6 100755
--- a/docs/build-and-serve.sh
+++ b/docs/serve-book.sh
@@ -14,7 +14,7 @@ echo -e "${BLUE}🚀 Building and serving HyperBEAM documentation...${NC}"
 
 # Generate literate documentation
 echo -e "${GREEN}📚 Generating literate documentation...${NC}"
-./build-literate-erlang-js.sh
+./generate-literate-docs.sh
 
 # Build and serve mdBook
 echo -e "${GREEN}📖 Building mdBook...${NC}"

From 81e2a440bd854fafabde60d5789346bfcb5ad860 Mon Sep 17 00:00:00 2001
From: Dylan Shade <63427984+dpshade@users.noreply.github.com>
Date: Wed, 24 Sep 2025 17:24:52 -0400
Subject: [PATCH 17/17] docs: Update documentation build workflow to use new
 script

- Replace deprecated `build-literate-erlang.sh` with `generate-literate-docs.sh` in the GitHub Actions workflow.
- Ensure the workflow reflects the latest script for generating literate documentation, maintaining consistency in the build process.
---
 .github/workflows/build-deploy-mdbook.yml | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/build-deploy-mdbook.yml b/.github/workflows/build-deploy-mdbook.yml
index 2780d2c22..717c493b9 100644
--- a/.github/workflows/build-deploy-mdbook.yml
+++ b/.github/workflows/build-deploy-mdbook.yml
@@ -10,7 +10,7 @@ on:
       - "docs/book/book.toml"
       - "docs/book/custom.css"
       - "docs/book/custom.js"
-      - "docs/build-literate-erlang.sh"
+      - "docs/generate-literate-docs.sh"
       - ".github/workflows/build-deploy-mdbook.yml"
   push:
     branches:
@@ -21,7 +21,7 @@ on:
       - "docs/book/book.toml"
       - "docs/book/custom.css"
       - "docs/book/custom.js"
-      - "docs/build-literate-erlang.sh"
+      - "docs/generate-literate-docs.sh"
       - ".github/workflows/build-deploy-mdbook.yml"
 
   # Perform a release using a workflow dispatch
@@ -50,7 +50,7 @@ jobs:
 
       - name: 📝 Generate Literate Erlang Documentation
         run: |
-          ./docs/build-literate-erlang.sh -v
+          ./docs/generate-literate-docs.sh -v
 
       - name: 📚 Build mdBook Documentation
         run: |
@@ -94,7 +94,7 @@ jobs:
 
       - name: 📝 Generate Literate Erlang Documentation
         run: |
-          ./docs/build-literate-erlang.sh -v
+          ./docs/generate-literate-docs.sh -v
 
       - name: 📚 Build mdBook Documentation
         id: build_mdbook