feat: initial release

This commit is contained in:
Víctor Martínez 2026-01-19 23:25:18 +01:00 committed by JasterV
commit a533c4aa83
26 changed files with 2680 additions and 0 deletions

11
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,11 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "daily"
time: "09:00"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

53
.github/workflows/cd.yml vendored Normal file
View file

@ -0,0 +1,53 @@
name: Release-plz
on:
push:
branches:
- main
jobs:
# Release unpublished packages.
release-plz-release:
name: Release-plz release
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- &checkout
name: Checkout repository
uses: actions/checkout@v6
with:
fetch-depth: 0
persist-credentials: false
- &install-rust
name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
- name: Run release-plz
uses: release-plz/action@v0.5
with:
command: release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_AUTH_KEY }}
# Create a PR with the new versions and changelog, preparing the next release.
release-plz-pr:
name: Release-plz PR
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
concurrency:
group: release-plz-${{ github.ref }}
cancel-in-progress: false
steps:
- *checkout
- *install-rust
- name: Run release-plz
uses: release-plz/action@v0.5
with:
command: release-pr
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_AUTH_KEY }}

36
.github/workflows/ci.yml vendored Normal file
View file

@ -0,0 +1,36 @@
name: CI
on:
push:
branches:
- main
pull_request:
concurrency:
group: ci-${{ github.head_ref || github.ref }}
cancel-in-progress: true
env:
CARGO_TERM_COLOR: always
jobs:
ci:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- run: rustup default 1.92.0
- run: rustup component add clippy rustfmt
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # ratchet:Swatinem/rust-cache@v2
- uses: taiki-e/install-action@ae532dedd825648efd18d9c49c9a443d0398ca0a # ratchet:taiki-e/install-action@cargo-make
- name: Install protoc compiler
run: |
sudo apt update -y
sudo apt install -y protobuf-compiler
- run: cargo make ci
- run: cargo make test
alls-green:
if: always()
runs-on: ubuntu-latest
needs:
- ci
steps:
- run: ${{ !contains(needs.*.result, 'failure') }}

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/target

1
CHANGELOG.md Symbolic link
View file

@ -0,0 +1 @@
grab/CHANGELOG.md

1271
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

3
Cargo.toml Normal file
View file

@ -0,0 +1,3 @@
[workspace]
members = ["grab", "echo-service"]
resolver = "2"

201
LICENSE Normal file
View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

51
Makefile.toml Normal file
View file

@ -0,0 +1,51 @@
[config]
# Sets the default task to run when executing `cargo make` without arguments
default_to_workspace = false
skip_core_tasks = true
# --- Main Flow Tasks ---
[tasks.default]
description = "Default task: Formats, Lints, and Builds"
dependencies = ["fmt", "clippy", "build"]
[tasks.ci]
description = "CI task: Runs checks without modifying files"
dependencies = ["fmt-check", "clippy", "test"]
# --- Atomic Tasks ---
[tasks.run]
description = "Runs the CLI tool with dynamic arguments"
command = "cargo"
# Added '-p grab' to explicitly target the CLI binary in the workspace
args = ["run", "-p", "grab", "${@}"]
[tasks.fmt]
description = "Formats all source files"
command = "cargo"
args = ["fmt", "--all"]
[tasks.fmt-check]
description = "Checks formatting without modifying files (fails if unformatted)"
command = "cargo"
args = ["fmt", "--all", "--", "--check"]
[tasks.clippy]
description = "Runs Clippy lints on the workspace"
command = "cargo"
# Added '--workspace' to lint both crates
args = [
"clippy",
"--workspace",
"--all-targets",
"--all-features",
"--",
"-D",
"warnings",
]
[tasks.test]
description = "Runs tests for the grab crate only"
command = "cargo"
# Added '-p grab' to strictly run integration/unit tests for the CLI
args = ["nextest", "run", "--no-fail-fast", "-p", "grab"]

1
README.md Symbolic link
View file

@ -0,0 +1 @@
grab/README.md

14
echo-service/Cargo.toml Normal file
View file

@ -0,0 +1,14 @@
[package]
name = "echo-service"
edition = "2024"
publish = false
[dependencies]
bytes = "1"
prost = "0.14"
tonic = "0.14"
prost-types = "0.14"
tonic-prost = "0.14.2"
[build-dependencies]
tonic-prost-build = "0.14"

24
echo-service/build.rs Normal file
View file

@ -0,0 +1,24 @@
use std::env::var;
use std::io::Result;
fn main() -> Result<()> {
// List of proto files containing a message definition
let proto_files = &[
// Services
"proto/echo.proto",
];
// Name of the folder containing the proto definitions
let proto_folder = "proto";
let out_dir = var("OUT_DIR").expect("Missing OUT_DIR environment variable");
let descriptors_path = format!("{}/descriptors.bin", out_dir);
tonic_prost_build::configure()
.file_descriptor_set_path(descriptors_path)
.protoc_arg("--experimental_allow_proto3_optional")
.build_client(false)
.compile_protos(proto_files, &[proto_folder])
.unwrap();
Ok(())
}

View file

@ -0,0 +1,18 @@
syntax = "proto3";
package echo;
service EchoService {
rpc UnaryEcho (EchoRequest) returns (EchoResponse);
rpc ServerStreamingEcho (EchoRequest) returns (stream EchoResponse);
rpc ClientStreamingEcho (stream EchoRequest) returns (EchoResponse);
rpc BidirectionalEcho (stream EchoRequest) returns (stream EchoResponse);
}
message EchoRequest {
string message = 1;
}
message EchoResponse {
string message = 1;
}

12
echo-service/src/lib.rs Normal file
View file

@ -0,0 +1,12 @@
//! # Echo Service
//!
//! **INTERNAL USE ONLY**: This crate exists solely to provide a gRPC server implementation
//! and descriptor set for integration testing the `grab` CLI tool.
//! It is not intended for production use.
pub mod pb {
include!(concat!(env!("OUT_DIR"), "/echo.rs"));
}
pub use pb::echo_service_server::{EchoService, EchoServiceServer};
pub const FILE_DESCRIPTOR_SET: &[u8] = tonic::include_file_descriptor_set!("descriptors");

22
grab/CHANGELOG.md Normal file
View file

@ -0,0 +1,22 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
## `grab` - [0.1.0](https://github.com/JasterV/grab/compare/grab-v0.1.0...grab-v0.1.1) - 2026-01-20
### Added
- **Dynamic gRPC Client**: Implemented a CLI that performs gRPC calls without generating Rust code, bridging JSON payloads to Protobuf binary format at runtime.
- **Schema Loading**: Support for loading Protobuf schemas dynamically from binary `FileDescriptorSet` (`.bin` or `.pb`) files.
- **Full Streaming Support**: Automatic dispatch for all four gRPC access patterns based on the method descriptor:
- Unary (Single Request → Single Response)
- Server Streaming (Single Request → Stream)
- Client Streaming (Stream → Single Response)
- Bidirectional Streaming (Stream → Stream)
- **JSON Transcoding**: Custom `tonic::Codec` implementation (`JsonCodec`) to validate and transcode `serde_json::Value` to/from Protobuf bytes on the fly.
- **Metadata Support**: Ability to attach custom headers/metadata to requests via the `-H` / `--header` flag.
- **Input Validation**: Fast-fail validation that checks if the provided JSON structure is valid before making the network request.

32
grab/Cargo.toml Normal file
View file

@ -0,0 +1,32 @@
[package]
authors = ["Victor Martínez Montané <jaster.victor@gmail.com>"]
categories = ["network-programming", "command-line-utilities"]
description = "A dynamic gRPC CLI tool written in Rust (gRPC + Crab)"
edition = "2024"
homepage = "https://github.com/JasterV/grab"
keywords = ["cli", "command-line", "grpc", "grpcurl", "curl"]
license = "MIT OR Apache-2.0"
name = "grab"
publish = true
readme = "README.md"
repository = "https://github.com/JasterV/grab"
rust-version = "1.89"
version = "0.1.0"
[dependencies]
anyhow = "1.0.100"
clap = { version = "4.5.54", features = ["derive"] }
futures-util = "0.3.31"
http = "1.4.0"
http-body = "1.0.1"
prost = "0.14.3"
prost-reflect = { version = "0.16.3", features = ["serde"] }
serde_json = "1.0.149"
thiserror = "2.0.18"
tokio = { version = "1.49.0", features = ["rt-multi-thread", "macros"] }
tokio-stream = "0.1.18"
tonic = "0.14.2"
[dev-dependencies]
echo-service = { path = "../echo-service" }
tokio = { version = "1", features = ["rt-multi-thread", "macros", "test-util"] }

146
grab/README.md Normal file
View file

@ -0,0 +1,146 @@
# gRab 🦀
> ⚠️ **Status: Experimental**
>
> This project is currently in a **highly experimental phase**. It is a working prototype intended for testing and development purposes. APIs, command-line arguments, and internal logic are subject to breaking changes. Please use with caution.
**gRab** (gRPC + Crab) is a lightweight, dynamic gRPC CLI tool written in Rust.
It allows you to make gRPC calls to any server using simple JSON payloads, without needing to compile the specific Protobuf files into the client. By loading a `FileDescriptorSet` at runtime, gRab acts as a bridge between human-readable JSON and binary Protobuf wire format.
It is heavily inspired by tools like `grpcurl` but built to leverage the safety and performance of the Rust ecosystem (Tonic + Prost).
## 🚀 Features
* **Dynamic Encoding/Decoding**: Transcodes JSON to Protobuf (and vice versa) on the fly using `prost-reflect`.
* **Smart Dispatch**: Automatically detects if a call is Unary, Server Streaming, Client Streaming, or Bidirectional based on the descriptor.
* **Fast Fail Validation**: Validates your JSON *before* hitting the network.
* **Zero Compilation Dependencies**: Does not require generating Rust code for your protos. Just point to a descriptor file.
* **Metadata Support**: Easily attach custom headers (authorization, tracing) to your requests.
* **Tonic 0.14**: Built on the latest stable Rust gRPC stack.
## 📦 Installation
### From Source
Ensure you have Rust and Cargo installed.
```bash
git clone https://github.com/JasterV/grab
cd grab
cargo install --path .
```
## 🛠️ Prerequisites: Generating Descriptors
To use gRab, you currently need a binary **FileDescriptorSet** (`.bin` or `.pb`). This file contains the schema definitions for your services.
You can generate this using the standard `protoc` compiler:
```bash
# Generate descriptor.bin including all imports
protoc \
--include_imports \
--descriptor_set_out=descriptor.bin \
--proto_path=. \
my_service.proto
```
> **Note**: The `--include_imports` flag is crucial. It ensures that types defined in imported files (like `google/protobuf/timestamp.proto`) are available for reflection.
## 📖 Usage
**Syntax:**
```bash
grab [OPTIONS] <URL> <METHOD>
```
### Arguments
| Argument | Description | Required |
| --- | --- | --- |
| `<URL>` | Server address (e.g., `http://[::1]:50051`). | **Yes** |
| `<METHOD>` | Fully qualified method name (e.g., `my.package.Service/Method`). | **Yes** |
### Options
| Flag | Short | Description | Required |
| --- | --- | --- | --- |
| `--proto-set` | | Path to the binary FileDescriptorSet (`.bin`). | **Yes** |
| `--body` | | The request body in JSON format. | **Yes** |
| `--header` | `-H` | Custom header `key:value`. Can be used multiple times. | No |
### JSON Body Format
* **Unary / Server Streaming**: Provide a single JSON object `{ ... }`.
* **Client / Bidirectional Streaming**: Provide a JSON array of objects `[ { ... }, { ... } ]`.
### Examples
**1. Unary Call**
```bash
grab \
--proto-set ./descriptor.bin \
--body '{"name": "Ferris"}' \
http://localhost:50051 \
helloworld.Greeter/SayHello
```
**2. Bidirectional Streaming (Chat)**
```bash
grab \
--proto-set ./descriptor.bin \
--body '[{"text": "Hello"}, {"text": "How are you?"}]' \
-H "authorization: Bearer token123" \
http://localhost:50051 \
chat.ChatService/StreamMessages
```
## 🔮 Roadmap
* **Automatic Server Reflection**: We are working on removing the requirement for the `--proto-set` file. Future versions will support fetching the schema directly from servers that have the [gRPC Server Reflection Protocol](https://github.com/grpc/grpc/blob/master/doc/server-reflection.md) enabled.
* **Interactive Mode**: A REPL for streaming requests interactively.
* **Pretty Printing**: Enhanced colored output for JSON responses.
## ⚠️ Common Errors
**1. `Service 'x' not found**`
* **Cause:** The service name in the command does not match the package defined in your proto file.
* **Fix:** Check your `.proto` file. If it has `package my.app;` and `service API {}`, the full name is `my.app.API`.
**2. `Method 'y' not found in service 'x'**`
* **Cause:** Typo in the method name or the method doesn't exist.
* **Fix:** Ensure case sensitivity matches (e.g., `GetUser` vs `getUser`).
**3. `h2 protocol error**`
* **Cause:** This often occurs when the JSON payload fails to encode *after* the connection has already been established, or the server rejected the stream structure.
* **Fix:** Double-check your JSON payload against the Protobuf schema.
## 🤝 Contributing
Contributions are welcome! Please run the Makefile checks before submitting a PR:
```bash
cargo make # Formats, lints, and builds
```
## 📄 License
Licensed under either of:
* Apache License, Version 2.0 ([LICENSE-APACHE](http://www.apache.org/licenses/LICENSE-2.0))
* MIT license ([LICENSE-MIT](http://opensource.org/licenses/MIT))
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.

193
grab/src/client.rs Normal file
View file

@ -0,0 +1,193 @@
//! # gRPC Client Layer
//!
//! This module handles the low-level networking and `tonic` client construction.
//! It is decoupled from the descriptor logic and strictly focuses on sending
//! requests and receiving responses using the `JsonCodec`.
//!
//! # Error Handling
//!
//! - **`ClientError`**: Represents transport errors (connection refused, DNS resolution),
//! configuration errors (invalid URI), or usage errors (invalid headers).
//! - **`tonic::Status`**: Represents a successful HTTP interaction where the gRPC server
//! returned an error code (e.g., `NOT_FOUND`, `UNAUTHENTICATED`).
//!
//! The methods in `GrpcClient` separate these two types of errors by returning
//! `Result<Result<T, Status>, ClientError>`.
use crate::codec::JsonCodec;
use futures_util::Stream;
use prost_reflect::MethodDescriptor;
use std::str::FromStr;
use thiserror::Error;
use tonic::{
Request,
client::Grpc,
metadata::{
MetadataKey, MetadataValue,
errors::{InvalidMetadataKey, InvalidMetadataValue},
},
transport::Channel,
};
#[cfg(test)]
mod integration_test;
/// Represents failures that occur *before* or during the establishment of the network call,
/// or protocol violations that prevent a response.
#[derive(Error, Debug)]
pub enum ClientError {
#[error("Invalid uri '{addr}' provided: '{source}'")]
InvalidUri {
addr: String,
source: http::uri::InvalidUri,
},
#[error("Failed to connect: '{0}'")]
ConnectionFailed(tonic::transport::Error),
#[error("Internal error, the client was not ready: '{0}'")]
ClientNotReady(tonic::transport::Error),
#[error("Invalid metadata (header) key '{key}': '{source}'")]
InvalidMetadataKey {
key: String,
source: InvalidMetadataKey,
},
#[error("Invalid metadata (header) value for key '{key}': '{source}'")]
InvalidMetadataValue {
key: String,
source: InvalidMetadataValue,
},
}
/// A generic gRPC client that uses dynamic dispatch via `prost-reflect`.
pub struct GrpcClient {
channel: Channel,
}
impl GrpcClient {
/// Connects to the specified gRPC server address.
///
/// # Arguments
/// * `addr` - The URI of the server (e.g., `http://localhost:50051`).
pub async fn connect(addr: &str) -> Result<Self, ClientError> {
let uri =
tonic::transport::Uri::from_str(addr).map_err(|source| ClientError::InvalidUri {
addr: addr.to_string(),
source,
})?;
let channel = Channel::builder(uri)
.connect()
.await
.map_err(ClientError::ConnectionFailed)?;
Ok(Self { channel })
}
/// Performs a Unary gRPC call (Single Request -> Single Response).
///
/// # Returns
/// * `Ok(Ok(Value))` - Successful RPC execution.
/// * `Ok(Err(Status))` - RPC executed, but server returned an error.
/// * `Err(ClientError)` - Failed to send request or connect.
pub async fn unary(
&self,
method: MethodDescriptor,
payload: serde_json::Value,
headers: Vec<(String, String)>,
) -> Result<Result<serde_json::Value, tonic::Status>, ClientError> {
let mut client = Grpc::new(self.channel.clone());
client.ready().await.map_err(ClientError::ClientNotReady)?;
let codec = JsonCodec::new(method.input(), method.output());
let path = http_path(&method);
let request = build_request(payload, headers)?;
match client.unary(request, path, codec).await {
Ok(response) => Ok(Ok(response.into_inner())),
Err(status) => Ok(Err(status)),
}
}
/// Performs a Server Streaming gRPC call (Single Request -> Stream of Responses).
pub async fn server_streaming(
&self,
method: MethodDescriptor,
payload: serde_json::Value,
headers: Vec<(String, String)>,
) -> Result<
Result<impl Stream<Item = Result<serde_json::Value, tonic::Status>>, tonic::Status>,
ClientError,
> {
let mut client = Grpc::new(self.channel.clone());
client.ready().await.map_err(ClientError::ClientNotReady)?;
let codec = JsonCodec::new(method.input(), method.output());
let path = http_path(&method);
let request = build_request(payload, headers)?;
match client.server_streaming(request, path, codec).await {
Ok(response) => Ok(Ok(response.into_inner())),
Err(status) => Ok(Err(status)),
}
}
/// Performs a Client Streaming gRPC call (Stream of Requests -> Single Response).
pub async fn client_streaming(
&self,
method: MethodDescriptor,
payload_stream: impl Stream<Item = serde_json::Value> + Send + 'static,
headers: Vec<(String, String)>,
) -> Result<Result<serde_json::Value, tonic::Status>, ClientError> {
let mut client = Grpc::new(self.channel.clone());
client.ready().await.map_err(ClientError::ClientNotReady)?;
let codec = JsonCodec::new(method.input(), method.output());
let path = http_path(&method);
let request = build_request(payload_stream, headers)?;
match client.client_streaming(request, path, codec).await {
Ok(response) => Ok(Ok(response.into_inner())),
Err(status) => Ok(Err(status)),
}
}
/// Performs a Bidirectional Streaming gRPC call (Stream of Requests -> Stream of Responses).
pub async fn bidirectional_streaming(
&self,
method: MethodDescriptor,
payload_stream: impl Stream<Item = serde_json::Value> + Send + 'static,
headers: Vec<(String, String)>,
) -> Result<
Result<impl Stream<Item = Result<serde_json::Value, tonic::Status>>, tonic::Status>,
ClientError,
> {
let mut client = Grpc::new(self.channel.clone());
client.ready().await.map_err(ClientError::ClientNotReady)?;
let codec = JsonCodec::new(method.input(), method.output());
let path = http_path(&method);
let request = build_request(payload_stream, headers)?;
match client.streaming(request, path, codec).await {
Ok(response) => Ok(Ok(response.into_inner())),
Err(status) => Ok(Err(status)),
}
}
}
fn http_path(method: &MethodDescriptor) -> http::uri::PathAndQuery {
let path = format!("/{}/{}", method.parent_service().full_name(), method.name());
http::uri::PathAndQuery::from_str(&path).expect("valid gRPC path")
}
fn build_request<T>(payload: T, headers: Vec<(String, String)>) -> Result<Request<T>, ClientError> {
let mut request = Request::new(payload);
for (k, v) in headers {
let key = MetadataKey::from_str(&k).map_err(|source| ClientError::InvalidMetadataKey {
key: k.clone(),
source,
})?;
let val = MetadataValue::from_str(&v)
.map_err(|source| ClientError::InvalidMetadataValue { key: k, source })?;
request.metadata_mut().insert(key, val);
}
Ok(request)
}

View file

@ -0,0 +1,133 @@
use crate::{client::GrpcClient, descriptor::DescriptorRegistry};
use echo_service::EchoServiceServer;
use echo_service::FILE_DESCRIPTOR_SET;
use echo_service_impl::EchoServiceImpl;
use tokio_stream::StreamExt;
use tonic::transport::Server;
mod echo_service_impl;
async fn spawn_server() -> String {
let listener = tokio::net::TcpListener::bind("0.0.0.0:0").await.unwrap();
let addr = listener.local_addr().unwrap();
tokio::spawn(async move {
Server::builder()
.add_service(EchoServiceServer::new(EchoServiceImpl))
.serve_with_incoming(tokio_stream::wrappers::TcpListenerStream::new(listener))
.await
.unwrap();
});
format!("http://{}", addr)
}
#[tokio::test]
async fn test_unary() {
let url = spawn_server().await;
let registry = DescriptorRegistry::from_bytes(FILE_DESCRIPTOR_SET).unwrap();
let method = registry
.fetch_method_descriptor("echo.EchoService/UnaryEcho")
.unwrap();
let client = GrpcClient::connect(&url).await.unwrap();
let payload = serde_json::json!({ "message": "hello" });
let res = client
.unary(method, payload, vec![])
.await
.unwrap()
.unwrap();
assert_eq!(res["message"], "hello");
}
#[tokio::test]
async fn test_server_streaming() {
let url = spawn_server().await;
let registry = DescriptorRegistry::from_bytes(FILE_DESCRIPTOR_SET).unwrap();
let method = registry
.fetch_method_descriptor("echo.EchoService/ServerStreamingEcho")
.unwrap();
let client = GrpcClient::connect(&url).await.unwrap();
let payload = serde_json::json!({ "message": "stream" });
let stream = client
.server_streaming(method, payload, vec![])
.await
.unwrap()
.unwrap();
let results: Vec<_> = stream.map(|r| r.unwrap()).collect().await;
assert_eq!(results.len(), 3);
assert_eq!(results[0]["message"], "stream - seq 0");
assert_eq!(results[1]["message"], "stream - seq 1");
assert_eq!(results[2]["message"], "stream - seq 2");
}
#[tokio::test]
async fn test_client_streaming() {
let url = spawn_server().await;
let registry = DescriptorRegistry::from_bytes(FILE_DESCRIPTOR_SET).unwrap();
let method = registry
.fetch_method_descriptor("echo.EchoService/ClientStreamingEcho")
.unwrap();
let client = GrpcClient::connect(&url).await.unwrap();
let payload = serde_json::json!([
{ "message": "A" },
{ "message": "B" },
{ "message": "C" }
]);
let stream_source = tokio_stream::iter(payload.as_array().unwrap().clone());
let res = client
.client_streaming(method, stream_source, vec![])
.await
.unwrap()
.unwrap();
assert_eq!(res["message"], "ABC");
}
#[tokio::test]
async fn test_bidirectional_streaming() {
let url = spawn_server().await;
let registry = DescriptorRegistry::from_bytes(FILE_DESCRIPTOR_SET).unwrap();
let method = registry
.fetch_method_descriptor("echo.EchoService/BidirectionalEcho")
.unwrap();
let client = GrpcClient::connect(&url).await.unwrap();
let payload = serde_json::json!([
{ "message": "Ping" },
{ "message": "Pong" }
]);
let stream_source = tokio_stream::iter(payload.as_array().unwrap().clone());
let response_stream = client
.bidirectional_streaming(method, stream_source, vec![])
.await
.unwrap()
.unwrap();
let results: Vec<_> = response_stream.map(|r| r.unwrap()).collect().await;
assert_eq!(results.len(), 2);
assert_eq!(results[0]["message"], "echo: Ping");
assert_eq!(results[1]["message"], "echo: Pong");
}

View file

@ -0,0 +1,88 @@
use echo_service::EchoService;
use echo_service::pb::{EchoRequest, EchoResponse};
use futures_util::Stream;
use std::pin::Pin;
use tokio::sync::mpsc;
use tokio_stream::{StreamExt, wrappers::ReceiverStream};
use tonic::{Request, Response, Status, Streaming};
pub struct EchoServiceImpl;
#[tonic::async_trait]
impl EchoService for EchoServiceImpl {
type BidirectionalEchoStream = Pin<Box<dyn Stream<Item = Result<EchoResponse, Status>> + Send>>;
type ServerStreamingEchoStream = ReceiverStream<Result<EchoResponse, Status>>;
async fn unary_echo(
&self,
request: Request<EchoRequest>,
) -> Result<Response<EchoResponse>, Status> {
Ok(Response::new(EchoResponse {
message: request.into_inner().message,
}))
}
async fn server_streaming_echo(
&self,
request: Request<EchoRequest>,
) -> Result<Response<Self::ServerStreamingEchoStream>, Status> {
let msg = request.into_inner().message;
let (tx, rx) = mpsc::channel(4);
tokio::spawn(async move {
for i in 0..3 {
let response = EchoResponse {
message: format!("{} - seq {}", msg, i),
};
tx.send(Ok(response)).await.ok();
}
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn client_streaming_echo(
&self,
request: Request<Streaming<EchoRequest>>,
) -> Result<Response<EchoResponse>, Status> {
let mut stream = request.into_inner();
let mut full_msg = String::new();
while let Some(req) = stream.next().await {
let req = req?;
full_msg.push_str(&req.message);
}
Ok(Response::new(EchoResponse { message: full_msg }))
}
async fn bidirectional_echo(
&self,
request: Request<Streaming<EchoRequest>>,
) -> Result<Response<Self::BidirectionalEchoStream>, Status> {
let mut in_stream = request.into_inner();
let (tx, rx) = mpsc::channel(128);
tokio::spawn(async move {
while let Some(result) = in_stream.next().await {
match result {
Ok(req) => {
let resp = EchoResponse {
message: format!("echo: {}", req.message),
};
if tx.send(Ok(resp)).await.is_err() {
break;
}
}
Err(e) => {
let _ = tx.send(Err(e)).await;
break;
}
}
}
});
Ok(Response::new(Box::pin(ReceiverStream::new(rx))))
}
}

98
grab/src/codec.rs Normal file
View file

@ -0,0 +1,98 @@
//! # JSON <-> Protobuf Codec
//!
//! This module implements a custom `tonic::codec::Codec` that allows `tonic` to work
//! directly with `serde_json::Value`.
//!
//! It acts as a bridge:
//! - **Encoding (Request):** Takes a JSON value -> Validates against Schema -> Serializes to Protobuf bytes.
//! - **Decoding (Response):** Takes Protobuf bytes -> Deserializes using Schema -> Converts to JSON value.
use prost::Message;
use prost_reflect::{DynamicMessage, MessageDescriptor};
use tonic::{
Status,
codec::{Codec, DecodeBuf, Decoder, EncodeBuf, Encoder},
};
/// A custom Codec that bridges `serde_json::Value` and Protobuf binary format.
///
/// It holds the descriptors (schemas) for both the request and the response messages,
/// allowing it to perform dynamic serialization.
pub struct JsonCodec {
/// Schema for the input message.
req_desc: MessageDescriptor,
/// Schema for the output message.
res_desc: MessageDescriptor,
}
impl JsonCodec {
/// Creates a new `JsonCodec`.
///
/// # Arguments
/// * `req_desc` - Descriptor for the request message type.
/// * `res_desc` - Descriptor for the response message type.
pub fn new(req_desc: MessageDescriptor, res_desc: MessageDescriptor) -> Self {
Self { req_desc, res_desc }
}
}
impl Codec for JsonCodec {
type Encode = serde_json::Value;
type Decode = serde_json::Value;
type Encoder = JsonEncoder;
type Decoder = JsonDecoder;
fn encoder(&mut self) -> Self::Encoder {
JsonEncoder(self.req_desc.clone())
}
fn decoder(&mut self) -> Self::Decoder {
JsonDecoder(self.res_desc.clone())
}
}
/// Responsible for encoding a JSON value into Protobuf bytes.
pub struct JsonEncoder(MessageDescriptor);
impl Encoder for JsonEncoder {
type Item = serde_json::Value;
type Error = Status;
fn encode(&mut self, item: Self::Item, dst: &mut EncodeBuf<'_>) -> Result<(), Self::Error> {
// DynamicMessage::deserialize accepts any Serde Deserializer.
// serde_json::Value implements IntoDeserializer, so we can pass it directly.
let msg = DynamicMessage::deserialize(self.0.clone(), item).map_err(|e| {
Status::invalid_argument(format!(
"JSON structure does not match Protobuf schema: {}",
e
))
})?;
msg.encode_raw(dst);
Ok(())
}
}
/// Responsible for decoding Protobuf bytes into a JSON value.
pub struct JsonDecoder(MessageDescriptor);
impl Decoder for JsonDecoder {
type Item = serde_json::Value;
type Error = Status;
fn decode(&mut self, src: &mut DecodeBuf<'_>) -> Result<Option<Self::Item>, Self::Error> {
// 1. Decode Bytes -> DynamicMessage
let mut msg = DynamicMessage::new(self.0.clone());
msg.merge(src)
.map_err(|e| Status::internal(format!("Failed to decode Protobuf bytes: {}", e)))?;
// 2. DynamicMessage -> serde_json::Value
// We convert the DynamicMessage into a Value structure.
// This is efficient and keeps the Client working with structured data.
let value = serde_json::to_value(&msg)
.map_err(|e| Status::internal(format!("Failed to map response to JSON: {}", e)))?;
Ok(Some(value))
}
}

68
grab/src/descriptor.rs Normal file
View file

@ -0,0 +1,68 @@
//! # Descriptor Registry
//!
//! This module handles the loading and querying of Protobuf `FileDescriptorSet`s.
//! It acts as a database of schema definitions, allowing the application to
//! resolve service and method names into `MethodDescriptor` objects required
//! for reflection.
use prost_reflect::{DescriptorPool, MethodDescriptor};
use std::path::Path;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum DescriptorError {
#[error("Failed to read descriptor file: {0}")]
Io(#[from] std::io::Error),
#[error("Failed to decode descriptor set: {0}")]
Decode(#[from] prost_reflect::DescriptorError),
#[error("Service '{0}' not found")]
ServiceNotFound(String),
#[error("Method '{0}' not found")]
MethodNotFound(String),
#[error("Invalid method path. Expected format 'package.Service/Method', got '{0}'")]
InvalidFormat(String),
}
/// A registry that holds loaded Protobuf definitions and allows looking up
/// services and methods by name.
pub struct DescriptorRegistry {
pool: DescriptorPool,
}
impl DescriptorRegistry {
/// Decodes a FileDescriptorSet directly from a byte slice.
/// Useful for tests or embedded descriptors.
#[cfg(test)]
pub fn from_bytes(bytes: &[u8]) -> Result<Self, DescriptorError> {
let pool = DescriptorPool::decode(bytes)?;
Ok(Self { pool })
}
/// Loads a FileDescriptorSet from a file on disk and builds the registry.
pub fn from_file(path: impl AsRef<Path>) -> Result<Self, DescriptorError> {
let bytes = std::fs::read(path)?;
let pool = DescriptorPool::decode(bytes.as_slice())?;
Ok(Self { pool })
}
/// Resolves a full method path (e.g., "my.package.MyService/MyMethod")
/// into a MethodDescriptor.
pub fn fetch_method_descriptor(
&self,
method_path: &str,
) -> Result<MethodDescriptor, DescriptorError> {
let (service_name, method_name) = method_path
.split_once('/')
.ok_or_else(|| DescriptorError::InvalidFormat(method_path.to_string()))?;
let service = self
.pool
.get_service_by_name(service_name)
.ok_or_else(|| DescriptorError::ServiceNotFound(service_name.to_string()))?;
service
.methods()
.find(|m| m.name() == method_name)
.ok_or_else(|| DescriptorError::MethodNotFound(method_name.to_string()))
}
}

175
grab/src/main.rs Normal file
View file

@ -0,0 +1,175 @@
#![doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/README.md"))]
/// # gRab CLI Entry Point
///
/// The main module orchestrates the CLI workflow:
/// 1. Parses command-line arguments.
/// 2. Loads the Protobuf descriptor registry.
/// 3. Connects to the gRPC server.
/// 4. Dispatches the request to the appropriate method type (Unary, Streaming, etc.).
use clap::Parser;
use client::GrpcClient;
use descriptor::DescriptorRegistry;
use futures_util::{Stream, StreamExt};
use prost_reflect::MethodDescriptor;
use std::path::PathBuf;
use std::process;
mod client;
mod codec;
mod descriptor;
#[derive(Parser)]
#[command(name = "grab", version, about = "Dynamic gRPC CLI")]
struct Cli {
#[arg(long, help = "Path to the descriptor set (.bin)")]
proto_set: PathBuf,
#[arg(long, help = "JSON body (Object for Unary, Array for Streaming)")]
body: String,
#[arg(short = 'H', long = "header", value_parser = parse_header)]
headers: Vec<(String, String)>,
#[arg(help = "Server URL (http://host:port)")]
url: String,
#[arg(help = "Method (package.Service/Method)")]
method: String,
}
fn parse_header(s: &str) -> Result<(String, String), String> {
s.split_once(':')
.map(|(k, v)| (k.trim().to_string(), v.trim().to_string()))
.ok_or_else(|| "Format must be 'key:value'".to_string())
}
#[tokio::main]
async fn main() {
if let Err(e) = run().await {
eprintln!("Error: {}", e);
process::exit(1);
}
}
async fn run() -> anyhow::Result<()> {
let args = Cli::parse();
let registry = DescriptorRegistry::from_file(&args.proto_set)?;
let method = registry.fetch_method_descriptor(&args.method)?;
let body_json: serde_json::Value =
serde_json::from_str(&args.body).map_err(|e| anyhow::anyhow!("Invalid JSON: {}", e))?;
let client = GrpcClient::connect(&args.url).await?;
println!("Calling {}...", args.method);
match (method.is_client_streaming(), method.is_server_streaming()) {
(false, false) => handle_unary(client, method, body_json, args.headers).await,
(false, true) => handle_server_stream(client, method, body_json, args.headers).await,
(true, false) => handle_client_stream(client, method, body_json, args.headers).await,
(true, true) => handle_bidirectional_stream(client, method, body_json, args.headers).await,
}
}
// --- Handlers ---
async fn handle_unary(
client: GrpcClient,
method: MethodDescriptor,
body: serde_json::Value,
headers: Vec<(String, String)>,
) -> anyhow::Result<()> {
match client.unary(method, body, headers).await? {
Ok(val) => print_json(&val),
Err(status) => print_status(status),
}
Ok(())
}
async fn handle_server_stream(
client: GrpcClient,
method: MethodDescriptor,
body: serde_json::Value,
headers: Vec<(String, String)>,
) -> anyhow::Result<()> {
match client.server_streaming(method, body, headers).await? {
Ok(stream) => print_stream(stream).await,
Err(status) => print_status(status),
}
Ok(())
}
async fn handle_client_stream(
client: GrpcClient,
method: MethodDescriptor,
body: serde_json::Value,
headers: Vec<(String, String)>,
) -> anyhow::Result<()> {
let input_stream = json_array_to_stream(body)?;
match client
.client_streaming(method, input_stream, headers)
.await?
{
Ok(val) => print_json(&val),
Err(status) => print_status(status),
}
Ok(())
}
async fn handle_bidirectional_stream(
client: GrpcClient,
method: MethodDescriptor,
body: serde_json::Value,
headers: Vec<(String, String)>,
) -> anyhow::Result<()> {
let input_stream = json_array_to_stream(body)?;
match client
.bidirectional_streaming(method, input_stream, headers)
.await?
{
Ok(stream) => print_stream(stream).await,
Err(status) => print_status(status),
}
Ok(())
}
// --- Helpers ---
fn json_array_to_stream(
json: serde_json::Value,
) -> anyhow::Result<impl Stream<Item = serde_json::Value> + Send + 'static> {
match json {
serde_json::Value::Array(items) => Ok(tokio_stream::iter(items)),
_ => Err(anyhow::anyhow!(
"Client streaming requires a JSON Array body"
)),
}
}
fn print_json(val: &serde_json::Value) {
println!(
"{}",
serde_json::to_string_pretty(val).unwrap_or_else(|_| val.to_string())
);
}
fn print_status(status: tonic::Status) {
eprintln!(
"gRPC Failed: code={:?} message={:?}",
status.code(),
status.message()
);
}
async fn print_stream(
mut stream: impl Stream<Item = Result<serde_json::Value, tonic::Status>> + Unpin,
) {
while let Some(result) = stream.next().await {
match result {
Ok(val) => print_json(&val),
Err(status) => print_status(status),
}
}
}

22
release-plz.toml Normal file
View file

@ -0,0 +1,22 @@
[workspace]
# set the path of all the crates to the changelog to the root of the repository
changelog_path = "./CHANGELOG.md"
pr_draft = true
pr_labels = ["release"]
pr_branch_prefix = "release-"
[changelog]
body = """
## `{{ package }}` - [{{ version }}]{%- if release_link -%}({{ release_link }}){% endif %} - {{ timestamp | date(format="%Y-%m-%d") }}
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group | upper_first }}
{% for commit in commits %}
{%- if commit.scope -%}
- *({{commit.scope}})* {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message }}{%- if commit.links %} ({% for link in commit.links %}[{{link.text}}]({{link.href}}) {% endfor -%}){% endif %}
{% else -%}
- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message }}
{% endif -%}
{% endfor -%}
{% endfor -%}
"""

3
rust-toolchain.toml Normal file
View file

@ -0,0 +1,3 @@
[toolchain]
channel = "1.92"
components = ["rustfmt", "clippy", "rust-src", "rust-analyzer"]

3
yamlfmt.yml Normal file
View file

@ -0,0 +1,3 @@
formatter:
type: basic
retain_line_breaks_single: true