Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: update to http 1.x crates - rebased to main #92

Merged
merged 5 commits into from
Sep 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions .github/workflows/CI.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: bigtable_rs CI

on:
on:
push:
branches:
- main
Expand All @@ -20,7 +20,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macOS-latest] # todo: support windows-latest after knowing how to solve openssl issues
os: [ ubuntu-latest, macOS-latest ] # todo: support windows-latest after knowing how to solve openssl issues

steps:
# cache cargo build
Expand Down Expand Up @@ -60,6 +60,8 @@ jobs:
run: cargo fmt -- --check
- name: Cargo build
run: cargo build
env:
BUILD_BIGTABLE_RS_GOOGLE_PROTO: 'true'
- name: Check git status clean
uses: CatChen/check-git-status-action@v1
with:
Expand Down
19 changes: 10 additions & 9 deletions bigtable_rs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,15 @@ readme = "../README.md"

[dependencies]
# tonic, prost, and prost-types are need for build generated rs files
http = "0.2.11"
http = "1.1.0"
hyper-util = { version = "0.1.7", features = ["tokio"] }
tokio = { version = "1.25.0", features = ["rt-multi-thread"] }
tonic = { version = "0.11.0", features = ["tls", "transport"] }
tonic = { version = "0.12.2", features = ["tls", "transport"] }
tower = { version = "0.4" }
prost = "0.12.2"
prost-types = "0.12.2"
prost-wkt = "0.5.0"
prost-wkt-types = "0.5.0"
prost = "0.13.1"
prost-types = "0.13.1"
prost-wkt = { version = "0.6.0" }
prost-wkt-types = { version = "0.6.0" }
serde = { version = "1.0.192", features = ["derive"] }
serde_with = { version = "3.4.0", features = ["base64"] }
# end of above part
Expand All @@ -34,6 +35,6 @@ serde_json = "1.0.85"
serde_path_to_error = "0.1.8"

[build-dependencies]
tonic-build = { version = "0.11.0", features = ["cleanup-markdown"] }
prost-build = "0.12.2"
prost-wkt-build = "0.5.0"
tonic-build = { version = "0.12.2", features = ["cleanup-markdown"] }
prost-build = "0.13.1"
prost-wkt-build = { version = "0.6.0" }
184 changes: 102 additions & 82 deletions bigtable_rs/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,89 +3,109 @@
/// https://github.com/hyperium/tonic/tree/master/tonic-build
/// https://github.com/tokio-rs/prost/issues/672
fn main() -> Result<(), Box<dyn std::error::Error>> {
// Uncomment the code below and cargo build again, when updating google protos
// Conditionally run this build.rs so we do not need protoc dependency
// for normal build process.
// Set up env parameter BUILD_BIGTABLE_RS_GOOGLE_PROTO=true
// whenever we need to update the generated google proto files
match std::env::var("BUILD_BIGTABLE_RS_GOOGLE_PROTO") {
Ok(var) => {
if var != "true" {
return Ok(());
}
}
Err(_) => return Ok(()),
}

// use prost_wkt_build::{FileDescriptorSet, Message};
// use std::{env, path::PathBuf};
// let out = PathBuf::from(env::var("OUT_DIR").unwrap());
// let descriptor_file = out.join("descriptors.bin");
//
// tonic_build::configure()
// .build_server(false)
// .out_dir("src/google")
// .compile_well_known_types(true)
// .type_attribute(".", "#[serde_with::serde_as]")
// .type_attribute(".", "#[derive(serde::Serialize,serde::Deserialize)]")
// .type_attribute(".", "#[serde(rename_all = \"camelCase\")]")
// .type_attribute(
// ".google.cloud.conformance.bigtable.v2.ReadRowsTest",
// "#[serde(default)]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.row_key",
// "#[serde_as(as = \"serde_with::base64::Base64\")]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.row_key",
// "#[serde(default)]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.qualifier",
// "#[serde_as(as = \"Option<serde_with::base64::Base64>\")]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.qualifier",
// "#[serde(default)]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.value",
// "#[serde_as(as = \"serde_with::base64::Base64\")]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.timestamp_micros",
// "#[serde(default)]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.labels",
// "#[serde(default)]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.value",
// "#[serde(default)]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.timestamp_micros",
// "#[serde_as(as = \"serde_with::DisplayFromStr\")]",
// )
// .field_attribute(
// ".google.bigtable.v2.ReadRowsResponse.CellChunk.value_size",
// "#[serde(default)]",
// )
// .field_attribute(
// ".google.cloud.conformance.bigtable.v2.ReadRowsTest.Result.timestamp_micros",
// "#[serde_as(as = \"serde_with::DisplayFromStr\")]",
// )
// .extern_path(
// ".google.protobuf.BytesValue",
// "::prost::alloc::vec::Vec<u8>",
// )
// .extern_path(
// ".google.protobuf.StringValue",
// "::prost::alloc::string::String",
// )
// .extern_path(".google.protobuf", "::prost_wkt_types")
// .file_descriptor_set_path(&descriptor_file)
// .compile(
// &[
// "../googleapis/google/bigtable/v2/bigtable.proto",
// "../googleapis/test/bigtable_test.proto", // only works with fork https://github.com/liufuyang/googleapis
// ],
// &["../googleapis"],
// )?;
//
// let descriptor_bytes = std::fs::read(descriptor_file).unwrap();
// let descriptor = FileDescriptorSet::decode(&descriptor_bytes[..]).unwrap();
// prost_wkt_build::add_serde(out, descriptor);
println!(
"cargo:warning=Running build.rs to generate and format Google API Bigtable proto rs files."
);

use prost_wkt_build::{FileDescriptorSet, Message};
use std::{env, path::PathBuf};
let out = PathBuf::from(env::var("OUT_DIR").unwrap());
let descriptor_file = out.join("descriptors.bin");

tonic_build::configure()
.build_server(false)
.out_dir("src/google")
.compile_well_known_types(true)
.type_attribute(".", "#[serde_with::serde_as]")
.type_attribute(".", "#[derive(serde::Serialize,serde::Deserialize)]")
.type_attribute(".", "#[serde(rename_all = \"camelCase\")]")
.type_attribute(
".google.cloud.conformance.bigtable.v2.ReadRowsTest",
"#[serde(default)]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.row_key",
"#[serde_as(as = \"serde_with::base64::Base64\")]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.row_key",
"#[serde(default)]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.qualifier",
"#[serde_as(as = \"Option<serde_with::base64::Base64>\")]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.qualifier",
"#[serde(default)]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.value",
"#[serde_as(as = \"serde_with::base64::Base64\")]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.timestamp_micros",
"#[serde(default)]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.labels",
"#[serde(default)]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.value",
"#[serde(default)]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.timestamp_micros",
"#[serde_as(as = \"serde_with::DisplayFromStr\")]",
)
.field_attribute(
".google.bigtable.v2.ReadRowsResponse.CellChunk.value_size",
"#[serde(default)]",
)
.field_attribute(
".google.cloud.conformance.bigtable.v2.ReadRowsTest.Result.timestamp_micros",
"#[serde_as(as = \"serde_with::DisplayFromStr\")]",
)
.extern_path(
".google.protobuf.BytesValue",
"::prost::alloc::vec::Vec<u8>",
)
.extern_path(
".google.protobuf.StringValue",
"::prost::alloc::string::String",
)
.extern_path(".google.protobuf", "::prost_wkt_types")
.file_descriptor_set_path(&descriptor_file)
.compile(
&[
"../googleapis/google/bigtable/v2/bigtable.proto",
"../googleapis/test/bigtable_test.proto", // only works with fork https://github.com/liufuyang/googleapis
],
&["../googleapis"],
)?;

let descriptor_bytes = std::fs::read(descriptor_file).unwrap();
let descriptor = FileDescriptorSet::decode(&descriptor_bytes[..]).unwrap();
prost_wkt_build::add_serde(out, descriptor);

std::process::Command::new("cargo")
.arg("fmt")
.output()
.expect("Running `cargo fmt` failed");

Ok(())
}
3 changes: 1 addition & 2 deletions bigtable_rs/src/auth_service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ use gcp_auth::TokenProvider;
use http::{HeaderValue, Request, Response};
use log::debug;
use tonic::body::BoxBody;
use tonic::transport::Body;
use tonic::transport::Channel;
use tower::Service;

Expand All @@ -33,7 +32,7 @@ impl AuthSvc {
}

impl Service<Request<BoxBody>> for AuthSvc {
type Response = Response<Body>;
type Response = Response<BoxBody>;
type Error = Box<dyn std::error::Error + Send + Sync>;
#[allow(clippy::type_complexity)]
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
Expand Down
8 changes: 7 additions & 1 deletion bigtable_rs/src/bigtable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,13 @@ impl BigTableConnection {

let path: String = path.to_string();
let connector = tower::service_fn({
move |_: tonic::transport::Uri| UnixStream::connect(path.clone())
move |_: tonic::transport::Uri| {
let path = path.clone();
async move {
let stream = UnixStream::connect(path).await?;
Ok::<_, std::io::Error>(hyper_util::rt::TokioIo::new(stream))
}
}
});

endpoint.connect_with_connector_lazy(connector)
Expand Down
Loading
Loading