diff --git a/examples/buf.gen.yaml b/examples/buf.gen.yaml
deleted file mode 100644
index 6bb9c4e3..00000000
--- a/examples/buf.gen.yaml
+++ /dev/null
@@ -1,14 +0,0 @@
-version: v1
-managed:
- enabled: true
- java_package_prefix:
- default: io.opentdf.platform
- except:
- - buf.build/bufbuild/protovalidate
- - buf.build/googleapis/googleapis
- - buf.build/grpc-ecosystem/grpc-gateway
-plugins:
- - plugin: buf.build/protocolbuffers/java:v25.3
- out: ./
- - plugin: buf.build/grpc/java:v1.61.1
- out: ./
diff --git a/examples/buf.lock b/examples/buf.lock
deleted file mode 100644
index deef61e8..00000000
--- a/examples/buf.lock
+++ /dev/null
@@ -1,18 +0,0 @@
-# Generated by buf. DO NOT EDIT.
-version: v1
-deps:
- - remote: buf.build
- owner: bufbuild
- repository: protovalidate
- commit: f05a6f4403ce4327bae4f50f281c3ed0
- digest: shake256:668a0661b8df44d41839194896329330965fc215f3d2f88057fd60eeb759c2daf6cc6edfdd13b2a653d49fe2896ebedcb1a33c4c5b2dd10919f03ffb7fc52ae6
- - remote: buf.build
- owner: googleapis
- repository: googleapis
- commit: 7e6f6e774e29406da95bd61cdcdbc8bc
- digest: shake256:fe43dd2265ea0c07d76bd925eeba612667cf4c948d2ce53d6e367e1b4b3cb5fa69a51e6acb1a6a50d32f894f054a35e6c0406f6808a483f2752e10c866ffbf73
- - remote: buf.build
- owner: grpc-ecosystem
- repository: grpc-gateway
- commit: 3f42134f4c564983838425bc43c7a65f
- digest: shake256:3d11d4c0fe5e05fda0131afefbce233940e27f0c31c5d4e385686aea58ccd30f72053f61af432fa83f1fc11cda57f5f18ca3da26a29064f73c5a0d076bba8d92
\ No newline at end of file
diff --git a/examples/buf.yaml b/examples/buf.yaml
deleted file mode 100644
index 2dc8eb0e..00000000
--- a/examples/buf.yaml
+++ /dev/null
@@ -1,22 +0,0 @@
-version: v1
-deps:
- - buf.build/bufbuild/protovalidate
- - buf.build/googleapis/googleapis
- - buf.build/grpc-ecosystem/grpc-gateway
-breaking:
- use:
- - FILE
- - PACKAGE
- - WIRE_JSON
- - WIRE
-lint:
- allow_comment_ignores: true
- use:
- - DEFAULT
- except:
- - PACKAGE_VERSION_SUFFIX
- ignore_only:
- PACKAGE_VERSION_SUFFIX:
- - google/api/annotations.proto
- - google/api/http.proto
- - google/protobuf/wrappers.proto
diff --git a/examples/pom.xml b/examples/pom.xml
index eb0b94dc..1ed843c1 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -125,59 +125,6 @@
io.opentdf.platform.App
-
- org.apache.maven.plugins
- maven-antrun-plugin
- 3.1.0
-
-
-
- generateSources
- generate-sources
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- run
-
-
-
-
-
-
- org.codehaus.mojo
- build-helper-maven-plugin
- 3.5.0
-
-
- add-source
- generate-sources
-
- add-source
-
-
-
- target/generated-sources
-
-
-
-
-
diff --git a/examples/src/main/java/io/opentdf/platform/CreateAttribute.java b/examples/src/main/java/io/opentdf/platform/CreateAttribute.java
index ba72189e..480b4ecb 100644
--- a/examples/src/main/java/io/opentdf/platform/CreateAttribute.java
+++ b/examples/src/main/java/io/opentdf/platform/CreateAttribute.java
@@ -1,13 +1,14 @@
package io.opentdf.platform;
+import com.connectrpc.ResponseMessageKt;
+import io.opentdf.platform.policy.Attribute;
+import io.opentdf.platform.policy.AttributeRuleTypeEnum;
+import io.opentdf.platform.policy.attributes.CreateAttributeRequest;
+import io.opentdf.platform.policy.attributes.CreateAttributeResponse;
import io.opentdf.platform.sdk.*;
+import java.util.Collections;
import java.util.concurrent.ExecutionException;
-import io.opentdf.platform.policy.AttributeRuleTypeEnum;
-
-import io.opentdf.platform.policy.attributes.*;
-import io.opentdf.platform.policy.Attribute;
-
import java.util.Arrays;
public class CreateAttribute {
@@ -28,7 +29,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc
.setRule(AttributeRuleTypeEnum.forNumber(AttributeRuleTypeEnum.ATTRIBUTE_RULE_TYPE_ENUM_ALL_OF_VALUE))
.addAllValues(Arrays.asList("test1", "test2")).build();
- CreateAttributeResponse resp = sdk.getServices().attributes().createAttribute(request).get();
+ CreateAttributeResponse resp = ResponseMessageKt.getOrThrow(sdk.getServices().attributes().createAttributeBlocking(request, Collections.emptyMap()).execute());
Attribute attribute = resp.getAttribute();
diff --git a/examples/src/main/java/io/opentdf/platform/CreateNamespace.java b/examples/src/main/java/io/opentdf/platform/CreateNamespace.java
index 112cde4f..736d8dce 100644
--- a/examples/src/main/java/io/opentdf/platform/CreateNamespace.java
+++ b/examples/src/main/java/io/opentdf/platform/CreateNamespace.java
@@ -1,10 +1,12 @@
package io.opentdf.platform;
+import com.connectrpc.ResponseMessageKt;
+import io.opentdf.platform.policy.namespaces.CreateNamespaceRequest;
+import io.opentdf.platform.policy.namespaces.CreateNamespaceResponse;
import io.opentdf.platform.sdk.*;
+import java.util.Collections;
import java.util.concurrent.ExecutionException;
-import io.opentdf.platform.policy.namespaces.*;
-
public class CreateNamespace {
public static void main(String[] args) throws ExecutionException, InterruptedException{
@@ -19,7 +21,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc
CreateNamespaceRequest request = CreateNamespaceRequest.newBuilder().setName("mynamespace.com").build();
- CreateNamespaceResponse resp = sdk.getServices().namespaces().createNamespace(request).get();
+ CreateNamespaceResponse resp = ResponseMessageKt.getOrThrow(sdk.getServices().namespaces().createNamespaceBlocking(request, Collections.emptyMap()).execute());
System.out.println(resp.getNamespace().getName());
diff --git a/examples/src/main/java/io/opentdf/platform/CreateSubjectConditionSet.java b/examples/src/main/java/io/opentdf/platform/CreateSubjectConditionSet.java
index 0eb6c5a2..5fbecfdd 100644
--- a/examples/src/main/java/io/opentdf/platform/CreateSubjectConditionSet.java
+++ b/examples/src/main/java/io/opentdf/platform/CreateSubjectConditionSet.java
@@ -1,16 +1,18 @@
package io.opentdf.platform;
-import io.opentdf.platform.sdk.*;
-
-import java.util.concurrent.ExecutionException;
-
-import io.opentdf.platform.policy.subjectmapping.*;
-import io.opentdf.platform.policy.SubjectMapping;
-import io.opentdf.platform.policy.SubjectConditionSet;
-import io.opentdf.platform.policy.SubjectSet;
-import io.opentdf.platform.policy.ConditionGroup;
+import com.connectrpc.ResponseMessageKt;
import io.opentdf.platform.policy.Condition;
import io.opentdf.platform.policy.ConditionBooleanTypeEnum;
+import io.opentdf.platform.policy.ConditionGroup;
+import io.opentdf.platform.policy.SubjectConditionSet;
import io.opentdf.platform.policy.SubjectMappingOperatorEnum;
+import io.opentdf.platform.policy.SubjectSet;
+import io.opentdf.platform.policy.subjectmapping.CreateSubjectConditionSetRequest;
+import io.opentdf.platform.policy.subjectmapping.CreateSubjectConditionSetResponse;
+import io.opentdf.platform.policy.subjectmapping.SubjectConditionSetCreate;
+import io.opentdf.platform.sdk.*;
+
+import java.util.Collections;
+import java.util.concurrent.ExecutionException;
public class CreateSubjectConditionSet {
@@ -38,7 +40,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc
SubjectConditionSetCreate.newBuilder().addSubjectSets(subjectset))
.build();
- CreateSubjectConditionSetResponse resp = sdk.getServices().subjectMappings().createSubjectConditionSet(request).get();
+ CreateSubjectConditionSetResponse resp = ResponseMessageKt.getOrThrow(sdk.getServices().subjectMappings().createSubjectConditionSetBlocking(request, Collections.emptyMap()).execute());
SubjectConditionSet scs = resp.getSubjectConditionSet();
diff --git a/examples/src/main/java/io/opentdf/platform/CreateSubjectMapping.java b/examples/src/main/java/io/opentdf/platform/CreateSubjectMapping.java
index 41f07336..1370a38a 100644
--- a/examples/src/main/java/io/opentdf/platform/CreateSubjectMapping.java
+++ b/examples/src/main/java/io/opentdf/platform/CreateSubjectMapping.java
@@ -1,12 +1,14 @@
package io.opentdf.platform;
+import com.connectrpc.ResponseMessageKt;
+import io.opentdf.platform.policy.Action;
+import io.opentdf.platform.policy.SubjectMapping;
+import io.opentdf.platform.policy.subjectmapping.CreateSubjectMappingRequest;
+import io.opentdf.platform.policy.subjectmapping.CreateSubjectMappingResponse;
import io.opentdf.platform.sdk.*;
+import java.util.Collections;
import java.util.concurrent.ExecutionException;
-import io.opentdf.platform.policy.subjectmapping.*;
-import io.opentdf.platform.policy.SubjectMapping;
-import io.opentdf.platform.policy.Action;
-
public class CreateSubjectMapping {
public static void main(String[] args) throws ExecutionException, InterruptedException{
@@ -25,7 +27,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc
.setExistingSubjectConditionSetId("9009fde8-d22b-4dfb-a456-f9ce6943244a")
.build();
- CreateSubjectMappingResponse resp = sdk.getServices().subjectMappings().createSubjectMapping(request).get();
+ CreateSubjectMappingResponse resp = ResponseMessageKt.getOrThrow(sdk.getServices().subjectMappings().createSubjectMappingBlocking(request, Collections.emptyMap()).execute());
SubjectMapping sm = resp.getSubjectMapping();
diff --git a/examples/src/main/java/io/opentdf/platform/GetDecisions.java b/examples/src/main/java/io/opentdf/platform/GetDecisions.java
index 4bac7694..999f9ef3 100644
--- a/examples/src/main/java/io/opentdf/platform/GetDecisions.java
+++ b/examples/src/main/java/io/opentdf/platform/GetDecisions.java
@@ -1,11 +1,18 @@
package io.opentdf.platform;
+import com.connectrpc.ResponseMessageKt;
+import io.opentdf.platform.authorization.DecisionRequest;
+import io.opentdf.platform.authorization.DecisionResponse;
+import io.opentdf.platform.authorization.Entity;
+import io.opentdf.platform.authorization.EntityChain;
+import io.opentdf.platform.authorization.GetDecisionsRequest;
+import io.opentdf.platform.authorization.GetDecisionsResponse;
+import io.opentdf.platform.authorization.ResourceAttribute;
+import io.opentdf.platform.policy.Action;
import io.opentdf.platform.sdk.*;
+import java.util.Collections;
import java.util.concurrent.ExecutionException;
-import io.opentdf.platform.authorization.*;
-import io.opentdf.platform.policy.Action;
-
import java.util.List;
public class GetDecisions {
@@ -28,7 +35,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc
.addAttributeValueFqns("https://mynamespace.com/attr/test/value/test1"))
).build();
- GetDecisionsResponse resp = sdk.getServices().authorization().getDecisions(request).get();
+ GetDecisionsResponse resp = ResponseMessageKt.getOrThrow(sdk.getServices().authorization().getDecisionsBlocking(request, Collections.emptyMap()).execute());
List decisions = resp.getDecisionResponsesList();
diff --git a/examples/src/main/java/io/opentdf/platform/GetEntitlements.java b/examples/src/main/java/io/opentdf/platform/GetEntitlements.java
index ff484b80..e018bf9c 100644
--- a/examples/src/main/java/io/opentdf/platform/GetEntitlements.java
+++ b/examples/src/main/java/io/opentdf/platform/GetEntitlements.java
@@ -1,10 +1,14 @@
package io.opentdf.platform;
+import com.connectrpc.ResponseMessageKt;
+import io.opentdf.platform.authorization.Entity;
+import io.opentdf.platform.authorization.EntityEntitlements;
+import io.opentdf.platform.authorization.GetEntitlementsRequest;
+import io.opentdf.platform.authorization.GetEntitlementsResponse;
import io.opentdf.platform.sdk.*;
+import java.util.Collections;
import java.util.concurrent.ExecutionException;
-import io.opentdf.platform.authorization.*;
-
import java.util.List;
public class GetEntitlements {
@@ -23,7 +27,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc
.addEntities(Entity.newBuilder().setId("entity-1").setClientId("opentdf"))
.build();
- GetEntitlementsResponse resp = sdk.getServices().authorization().getEntitlements(request).get();
+ GetEntitlementsResponse resp = ResponseMessageKt.getOrThrow(sdk.getServices().authorization().getEntitlementsBlocking(request, Collections.emptyMap()).execute());
List entitlements = resp.getEntitlementsList();
diff --git a/examples/src/main/java/io/opentdf/platform/ListAttributes.java b/examples/src/main/java/io/opentdf/platform/ListAttributes.java
index 366af20a..45528274 100644
--- a/examples/src/main/java/io/opentdf/platform/ListAttributes.java
+++ b/examples/src/main/java/io/opentdf/platform/ListAttributes.java
@@ -1,13 +1,13 @@
package io.opentdf.platform;
+import com.connectrpc.ResponseMessageKt;
+import io.opentdf.platform.policy.Attribute;
+import io.opentdf.platform.policy.attributes.ListAttributesRequest;
+import io.opentdf.platform.policy.attributes.ListAttributesResponse;
import io.opentdf.platform.sdk.*;
+import java.util.Collections;
import java.util.concurrent.ExecutionException;
-import io.opentdf.platform.policy.AttributeRuleTypeEnum;
-
-import io.opentdf.platform.policy.attributes.*;
-import io.opentdf.platform.policy.Attribute;
-
import java.util.List;
public class ListAttributes {
@@ -25,7 +25,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc
ListAttributesRequest request = ListAttributesRequest.newBuilder()
.setNamespace("mynamespace.com").build();
- ListAttributesResponse resp = sdk.getServices().attributes().listAttributes(request).get();
+ ListAttributesResponse resp = ResponseMessageKt.getOrThrow(sdk.getServices().attributes().listAttributesBlocking(request, Collections.emptyMap()).execute());
List attributes = resp.getAttributesList();
diff --git a/examples/src/main/java/io/opentdf/platform/ListNamespaces.java b/examples/src/main/java/io/opentdf/platform/ListNamespaces.java
index 43e514a5..2f553d76 100644
--- a/examples/src/main/java/io/opentdf/platform/ListNamespaces.java
+++ b/examples/src/main/java/io/opentdf/platform/ListNamespaces.java
@@ -1,11 +1,14 @@
package io.opentdf.platform;
+import com.connectrpc.ResponseMessageKt;
+import io.opentdf.platform.policy.Namespace;
+import io.opentdf.platform.policy.namespaces.ListNamespacesRequest;
+import io.opentdf.platform.policy.namespaces.ListNamespacesResponse;
import io.opentdf.platform.sdk.*;
+import java.util.Collections;
+import java.util.List;
import java.util.concurrent.ExecutionException;
-import io.opentdf.platform.policy.namespaces.*;
-import io.opentdf.platform.policy.Namespace;
-
public class ListNamespaces {
public static void main(String[] args) throws ExecutionException, InterruptedException{
@@ -20,8 +23,8 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc
ListNamespacesRequest request = ListNamespacesRequest.newBuilder().build();
- ListNamespacesResponse resp = sdk.getServices().namespaces().listNamespaces(request).get();
+ ListNamespacesResponse resp = ResponseMessageKt.getOrThrow(sdk.getServices().namespaces().listNamespacesBlocking(request, Collections.emptyMap()).execute());
- java.util.List namespaces = resp.getNamespacesList();
+ List namespaces = resp.getNamespacesList();
}
}
diff --git a/examples/src/main/java/io/opentdf/platform/ListSubjectMappings.java b/examples/src/main/java/io/opentdf/platform/ListSubjectMappings.java
index 12bc4ab7..e960cce7 100644
--- a/examples/src/main/java/io/opentdf/platform/ListSubjectMappings.java
+++ b/examples/src/main/java/io/opentdf/platform/ListSubjectMappings.java
@@ -1,11 +1,13 @@
package io.opentdf.platform;
+import com.connectrpc.ResponseMessageKt;
+import io.opentdf.platform.policy.SubjectMapping;
+import io.opentdf.platform.policy.subjectmapping.ListSubjectMappingsRequest;
+import io.opentdf.platform.policy.subjectmapping.ListSubjectMappingsResponse;
import io.opentdf.platform.sdk.*;
+import java.util.Collections;
import java.util.concurrent.ExecutionException;
-import io.opentdf.platform.policy.subjectmapping.*;
-import io.opentdf.platform.policy.SubjectMapping;
-
import java.util.List;
public class ListSubjectMappings {
@@ -22,7 +24,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc
ListSubjectMappingsRequest request = ListSubjectMappingsRequest.newBuilder().build();
- ListSubjectMappingsResponse resp = sdk.getServices().subjectMappings().listSubjectMappings(request).get();
+ ListSubjectMappingsResponse resp = ResponseMessageKt.getOrThrow(sdk.getServices().subjectMappings().listSubjectMappingsBlocking(request, Collections.emptyMap()).execute());
List sms = resp.getSubjectMappingsList();
diff --git a/pom.xml b/pom.xml
index be59a042..626220c9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -16,7 +16,7 @@
11
2.20.0
1.68.0
- 3.25.3
+ 4.29.2
8.3.5
@@ -123,7 +123,7 @@
com.google.protobuf
protobuf-java
- 3.25.5
+ ${protobuf.version}
org.slf4j
@@ -138,7 +138,7 @@
org.jetbrains.kotlin
kotlin-stdlib
- 1.9.23
+ 2.1.20
diff --git a/sdk/buf.gen.yaml b/sdk/buf.gen.yaml
index 6bb9c4e3..ba8ddec5 100644
--- a/sdk/buf.gen.yaml
+++ b/sdk/buf.gen.yaml
@@ -1,14 +1,25 @@
-version: v1
+version: v2
managed:
enabled: true
- java_package_prefix:
- default: io.opentdf.platform
- except:
- - buf.build/bufbuild/protovalidate
- - buf.build/googleapis/googleapis
- - buf.build/grpc-ecosystem/grpc-gateway
+ disable:
+ - file_option: java_package
+ module: buf.build/bufbuild/protovalidate
+ - file_option: java_package
+ module: buf.build/googleapis/googleapis
+ - file_option: java_package
+ module: buf.build/grpc-ecosystem/grpc-gateway
+ override:
+ - file_option: java_package_prefix
+ value: io.opentdf.platform
+ - file_option: java_package_prefix
+ value: io.opentdf.platform.test
+ module: buf.build/grpc/java
plugins:
- - plugin: buf.build/protocolbuffers/java:v25.3
- out: ./
- - plugin: buf.build/grpc/java:v1.61.1
- out: ./
+ - remote: buf.build/protocolbuffers/java:v25.3
+ out: ./generated-sources
+ - remote: buf.build/connectrpc/kotlin
+ out: ./generated-sources
+ opt:
+ - generateBlockingUnaryMethods=true
+ - remote: buf.build/grpc/java:v1.61.1
+ out: ./generated-test-sources
\ No newline at end of file
diff --git a/sdk/buf.lock b/sdk/buf.lock
index deef61e8..6cc49ead 100644
--- a/sdk/buf.lock
+++ b/sdk/buf.lock
@@ -1,18 +1,12 @@
# Generated by buf. DO NOT EDIT.
-version: v1
+version: v2
deps:
- - remote: buf.build
- owner: bufbuild
- repository: protovalidate
+ - name: buf.build/bufbuild/protovalidate
commit: f05a6f4403ce4327bae4f50f281c3ed0
- digest: shake256:668a0661b8df44d41839194896329330965fc215f3d2f88057fd60eeb759c2daf6cc6edfdd13b2a653d49fe2896ebedcb1a33c4c5b2dd10919f03ffb7fc52ae6
- - remote: buf.build
- owner: googleapis
- repository: googleapis
+ digest: b5:f1d76430ee97c89cd2044e9ae1c510887b701ee7bca60564ebf82e3919e53cacefc830a0eb803277c2d98c5f313b4167e8914afc9f214332717a50b5e170e6f4
+ - name: buf.build/googleapis/googleapis
commit: 7e6f6e774e29406da95bd61cdcdbc8bc
- digest: shake256:fe43dd2265ea0c07d76bd925eeba612667cf4c948d2ce53d6e367e1b4b3cb5fa69a51e6acb1a6a50d32f894f054a35e6c0406f6808a483f2752e10c866ffbf73
- - remote: buf.build
- owner: grpc-ecosystem
- repository: grpc-gateway
+ digest: b5:654225f30f2351e6515417825836cb4bd5df3d952f6086e406957f80e03c8ee00c67739b836b87e06f2ff90a6f44675ad175e47ea5aef29ee909b91a29bdd334
+ - name: buf.build/grpc-ecosystem/grpc-gateway
commit: 3f42134f4c564983838425bc43c7a65f
- digest: shake256:3d11d4c0fe5e05fda0131afefbce233940e27f0c31c5d4e385686aea58ccd30f72053f61af432fa83f1fc11cda57f5f18ca3da26a29064f73c5a0d076bba8d92
\ No newline at end of file
+ digest: b5:291b947d8ac09492517557e4e72e294788cb8201afc7d0df7bda80fa10931adb60d4d669208a7696bf24f1ecb2a33a16d4c1e766e6f31809248b00343119569b
diff --git a/sdk/buf.yaml b/sdk/buf.yaml
index 2dc8eb0e..0c330db3 100644
--- a/sdk/buf.yaml
+++ b/sdk/buf.yaml
@@ -1,22 +1,27 @@
-version: v1
+version: v2
deps:
- buf.build/bufbuild/protovalidate
- buf.build/googleapis/googleapis
- buf.build/grpc-ecosystem/grpc-gateway
-breaking:
- use:
- - FILE
- - PACKAGE
- - WIRE_JSON
- - WIRE
lint:
- allow_comment_ignores: true
use:
- DEFAULT
except:
+ - FIELD_NOT_REQUIRED
+ - PACKAGE_NO_IMPORT_CYCLE
- PACKAGE_VERSION_SUFFIX
ignore_only:
PACKAGE_VERSION_SUFFIX:
- google/api/annotations.proto
- google/api/http.proto
- google/protobuf/wrappers.proto
+breaking:
+ use:
+ - FILE
+ - PACKAGE
+ - WIRE
+ - WIRE_JSON
+ except:
+ - EXTENSION_NO_DELETE
+ - FIELD_SAME_DEFAULT
+ - PACKAGE_EXTENSION_NO_DELETE
diff --git a/sdk/pom.xml b/sdk/pom.xml
index 56bf0e7c..6494cb56 100644
--- a/sdk/pom.xml
+++ b/sdk/pom.xml
@@ -12,6 +12,9 @@
0.22.1
https://github.com/CodeIntelligenceTesting/jazzer/releases/download/v${jazzer.version}
+ 2.1.0
+ 0.7.2
+ 4.12.0
protocol/go/v0.3.0
@@ -79,6 +82,76 @@
io.grpc
grpc-stub
+
+ com.squareup.okhttp3
+ okhttp
+ ${okhttp.version}
+
+
+ com.squareup.okio
+ okio-jvm
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib-jdk8
+
+
+
+
+ com.squareup.okio
+ okio-jvm
+ 3.11.0
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib
+
+
+
+
+ com.connectrpc
+ connect-kotlin-okhttp
+ ${connect.version}
+
+
+ org.jetbrains.kotlin
+ kotlin-reflect
+
+
+ com.squareup.okio
+ okio-jvm
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib-jdk8
+
+
+ org.jetbrains
+ annotations
+
+
+
+
+ com.connectrpc
+ connect-kotlin-google-java-ext
+ ${connect.version}
+
+
+ com.google.code.gson
+ gson
+
+
+ com.google.j2objc
+ j2objc-annotations
+
+
+ com.squareup.okio
+ okio-jvm
+
+
+
+
+
org.apache.tomcat
@@ -122,8 +195,18 @@
com.squareup.okhttp3
mockwebserver
- 5.0.0-alpha.14
+ ${okhttp.version}
test
+
+
+ com.squareup.okio
+ okio-jvm
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib-jdk8
+
+
junit
@@ -144,15 +227,25 @@
test
- org.apache.commons
- commons-compress
- 1.26.1
+ com.squareup.okhttp3
+ okhttp-tls
+ ${okhttp.version}
test
+
+
+ com.squareup.okio
+ okio-jvm
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib-jdk8
+
+
- com.squareup.okhttp3
- okhttp-tls
- 5.0.0-alpha.14
+ org.apache.commons
+ commons-compress
+ 1.26.1
test
@@ -181,6 +274,17 @@
${grpc.version}
test
+
+ org.jetbrains.kotlin
+ kotlin-stdlib-jdk8
+ ${kotlin.version}
+
+
+ org.jetbrains.kotlin
+ kotlin-test
+ ${kotlin.version}
+ test
+
@@ -203,6 +307,9 @@
org.apache.maven.plugins
maven-javadoc-plugin
3.8.0
+
+ ${project.basedir}/src/main/**/*.java
+
attach-javadocs
@@ -260,14 +367,14 @@
-
+
-
+
@@ -295,6 +402,18 @@
+
+ add-test-source
+ generate-sources
+
+ add-test-source
+
+
+
+ target/generated-test-sources
+
+
+
@@ -327,6 +446,65 @@
+
+ kotlin-maven-plugin
+
+
+ compile
+ compile
+
+ compile
+
+
+
+ src/main/kotlin
+ target/generated-sources
+
+
+
+
+ test-compile
+ test-compile
+
+ test-compile
+
+
+
+
+ 1.8
+
+ org.jetbrains.kotlin
+ ${kotlin.version}
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+
+ default-compile
+ none
+
+
+ default-testCompile
+ none
+
+
+ compile
+ compile
+
+ compile
+
+
+
+ testCompile
+ test-compile
+
+ testCompile
+
+
+
+
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/AddressNormalizer.java b/sdk/src/main/java/io/opentdf/platform/sdk/AddressNormalizer.java
new file mode 100644
index 00000000..8e616e9c
--- /dev/null
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/AddressNormalizer.java
@@ -0,0 +1,47 @@
+package io.opentdf.platform.sdk;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+class AddressNormalizer {
+ private static final Logger logger = LoggerFactory.getLogger(AddressNormalizer.class);
+
+ private AddressNormalizer(){
+ }
+
+ static String normalizeAddress(String urlString, boolean usePlaintext) {
+ URI uri;
+ try {
+ uri = new URI(urlString);
+ } catch (URISyntaxException e) {
+ throw new SDKException("error trying to parse URL [" + urlString + "]", e);
+ }
+
+ final String scheme = usePlaintext ? "http" : "https";
+ if (uri.getHost() == null) {
+ // if there is no host then we are likely dealing with a host and port
+ try {
+ uri = new URI(scheme, null, uri.getScheme(), Integer.parseInt(uri.getSchemeSpecificPart()), null, null, null);
+ } catch (URISyntaxException e) {
+ throw new SDKException("error trying to create URL for host and port[" + urlString + "]", e);
+ }
+ }
+ final int port;
+ if (uri.getPort() == -1) {
+ port = usePlaintext ? 80 : 443;
+ } else {
+ port = uri.getPort();
+ }
+
+ try {
+ var returnUrl = new URI(scheme, null, uri.getHost(), port, null, null, null).toString();
+ logger.debug("normalized url [{}] to [{}]", urlString, returnUrl);
+ return returnUrl;
+ } catch (URISyntaxException e) {
+ throw new SDKException("error creating KAS address", e);
+ }
+ }
+}
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/Autoconfigure.java b/sdk/src/main/java/io/opentdf/platform/sdk/Autoconfigure.java
index 7db8476a..fa4ded47 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/Autoconfigure.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/Autoconfigure.java
@@ -1,6 +1,6 @@
package io.opentdf.platform.sdk;
-import com.google.common.base.Supplier;
+import com.connectrpc.ResponseMessageKt;
import io.opentdf.platform.policy.Attribute;
import io.opentdf.platform.policy.AttributeRuleTypeEnum;
import io.opentdf.platform.policy.AttributeValueSelector;
@@ -8,10 +8,9 @@
import io.opentdf.platform.policy.KasPublicKeyAlgEnum;
import io.opentdf.platform.policy.KeyAccessServer;
import io.opentdf.platform.policy.Value;
-import io.opentdf.platform.policy.attributes.AttributesServiceGrpc.AttributesServiceFutureStub;
+import io.opentdf.platform.policy.attributes.AttributesServiceClient;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsRequest;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsResponse;
-import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsResponse.AttributeAndValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -30,7 +29,7 @@
import java.util.Objects;
import java.util.Set;
import java.util.StringJoiner;
-import java.util.concurrent.ExecutionException;
+import java.util.function.Supplier;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@@ -106,7 +105,7 @@ public AttributeNameFQN(String url) throws AutoConfigureException {
}
try {
- URLDecoder.decode(matcher.group(2), StandardCharsets.UTF_8.name());
+ URLDecoder.decode(matcher.group(2), StandardCharsets.UTF_8);
} catch (Exception e) {
throw new AutoConfigureException("invalid type: error in attribute name [" + matcher.group(2) + "]");
}
@@ -689,7 +688,7 @@ public static Granter newGranterFromAttributes(Value... attrValues) throws AutoC
if (!v.hasAttribute()) {
throw new AutoConfigureException("tried to use an attribute that is not initialized");
}
- return AttributeAndValue.newBuilder()
+ return GetAttributeValuesByFqnsResponse.AttributeAndValue.newBuilder()
.setValue(v)
.setAttribute(v.getAttribute())
.build();
@@ -699,65 +698,60 @@ public static Granter newGranterFromAttributes(Value... attrValues) throws AutoC
}
// Gets a list of directory of KAS grants for a list of attribute FQNs
- static Granter newGranterFromService(AttributesServiceFutureStub as, KASKeyCache keyCache, AttributeValueFQN... fqns) throws AutoConfigureException {
-
+ public static Granter newGranterFromService(AttributesServiceClient as, KASKeyCache keyCache, AttributeValueFQN... fqns) throws AutoConfigureException {
GetAttributeValuesByFqnsRequest request = GetAttributeValuesByFqnsRequest.newBuilder()
.addAllFqns(Arrays.stream(fqns).map(AttributeValueFQN::toString).collect(Collectors.toList()))
.setWithValue(AttributeValueSelector.newBuilder().setWithKeyAccessGrants(true).build())
.build();
- GetAttributeValuesByFqnsResponse av = null;
- try {
- av = as.getAttributeValuesByFqns(request).get();
- } catch (ExecutionException e) {
- throw new AutoConfigureException("error getting attributes during autoconfiguration", e);
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- throw new AutoConfigureException("interrupted while getting attributes during autoconfiguration", e);
- }
+ GetAttributeValuesByFqnsResponse av = ResponseMessageKt.getOrThrow(
+ as.getAttributeValuesByFqnsBlocking(request, Collections.emptyMap()).execute()
+ );
return getGranter(keyCache, new ArrayList<>(av.getFqnAttributeValuesMap().values()));
}
- private static Granter getGranter(@Nullable KASKeyCache keyCache, List values) {
- Granter grants = new Granter(values.stream().map(AttributeAndValue::getValue).map(Value::getFqn).map(AttributeValueFQN::new).collect(Collectors.toList()));
+ private static List getGrants(GetAttributeValuesByFqnsResponse.AttributeAndValue attributeAndValue) {
+ var val = attributeAndValue.getValue();
+ var attribute = attributeAndValue.getAttribute();
+
+ if (!val.getGrantsList().isEmpty()) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("adding grants from attribute value [{}]: {}", val.getFqn(), val.getGrantsList().stream().map(KeyAccessServer::getUri).collect(Collectors.toList()));
+ }
+ return val.getGrantsList();
+ } else if (!attribute.getGrantsList().isEmpty()) {
+ var attributeGrants = attribute.getGrantsList();
+ if (logger.isDebugEnabled()) {
+ logger.debug("adding grants from attribute [{}]: {}", attribute.getFqn(), attributeGrants.stream().map(KeyAccessServer::getId).collect(Collectors.toList()));
+ }
+ return attributeGrants;
+ } else if (!attribute.getNamespace().getGrantsList().isEmpty()) {
+ var nsGrants = attribute.getNamespace().getGrantsList();
+ if (logger.isDebugEnabled()) {
+ logger.debug("adding grants from namespace [{}]: [{}]", attribute.getNamespace().getName(), nsGrants.stream().map(KeyAccessServer::getId).collect(Collectors.toList()));
+ }
+ return nsGrants;
+ } else {
+ // this is needed to mark the fact that we have an empty
+ if (logger.isDebugEnabled()) {
+ logger.debug("didn't find any grants on value, attribute, or namespace for attribute value [{}]", val.getFqn());
+ }
+ return Collections.emptyList();
+ }
+
+ }
+
+ private static Granter getGranter(@Nullable KASKeyCache keyCache, List values) {
+ Granter grants = new Granter(values.stream().map(GetAttributeValuesByFqnsResponse.AttributeAndValue::getValue).map(Value::getFqn).map(AttributeValueFQN::new).collect(Collectors.toList()));
for (var attributeAndValue: values) {
- var val = attributeAndValue.getValue();
- var attribute = attributeAndValue.getAttribute();
- String fqnstr = val.getFqn();
+ var attributeGrants = getGrants(attributeAndValue);
+ String fqnstr = attributeAndValue.getValue().getFqn();
AttributeValueFQN fqn = new AttributeValueFQN(fqnstr);
-
- if (!val.getGrantsList().isEmpty()) {
- if (logger.isDebugEnabled()) {
- logger.debug("adding grants from attribute value [{}]: {}", val.getFqn(), val.getGrantsList().stream().map(KeyAccessServer::getUri).collect(Collectors.toList()));
- }
- grants.addAllGrants(fqn, val.getGrantsList(), attribute);
- if (keyCache != null) {
- storeKeysToCache(val.getGrantsList(), keyCache);
- }
- } else if (!attribute.getGrantsList().isEmpty()) {
- var attributeGrants = attribute.getGrantsList();
- if (logger.isDebugEnabled()) {
- logger.debug("adding grants from attribute [{}]: {}", attribute.getFqn(), attributeGrants.stream().map(KeyAccessServer::getId).collect(Collectors.toList()));
- }
- grants.addAllGrants(fqn, attributeGrants, attribute);
- if (keyCache != null) {
- storeKeysToCache(attributeGrants, keyCache);
- }
- } else if (!attribute.getNamespace().getGrantsList().isEmpty()) {
- var nsGrants = attribute.getNamespace().getGrantsList();
- if (logger.isDebugEnabled()) {
- logger.debug("adding grants from namespace [{}]: [{}]", attribute.getNamespace().getName(), nsGrants.stream().map(KeyAccessServer::getId).collect(Collectors.toList()));
- }
- grants.addAllGrants(fqn, nsGrants, attribute);
- if (keyCache != null) {
- storeKeysToCache(nsGrants, keyCache);
- }
- } else {
- // this is needed to mark the fact that we have an empty
- grants.addAllGrants(fqn, List.of(), attribute);
- logger.debug("didn't find any grants on value, attribute, or namespace for attribute value [{}]", fqnstr);
+ grants.addAllGrants(fqn, attributeGrants, attributeAndValue.getAttribute());
+ if (keyCache != null) {
+ storeKeysToCache(attributeGrants, keyCache);
}
}
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/Config.java b/sdk/src/main/java/io/opentdf/platform/sdk/Config.java
index 090cdaad..8b527bd3 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/Config.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/Config.java
@@ -1,13 +1,12 @@
package io.opentdf.platform.sdk;
+import io.opentdf.platform.policy.Value;
import io.opentdf.platform.sdk.Autoconfigure.AttributeValueFQN;
import io.opentdf.platform.sdk.nanotdf.ECCMode;
import io.opentdf.platform.sdk.nanotdf.Header;
import io.opentdf.platform.sdk.nanotdf.NanoTDFType;
import io.opentdf.platform.sdk.nanotdf.SymmetricAndPayloadConfig;
-import io.opentdf.platform.policy.Value;
-
import java.net.URI;
import java.net.URISyntaxException;
import java.util.*;
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/KASClient.java b/sdk/src/main/java/io/opentdf/platform/sdk/KASClient.java
index 73f5a5c4..6d64ac9d 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/KASClient.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/KASClient.java
@@ -1,5 +1,9 @@
package io.opentdf.platform.sdk;
+import com.connectrpc.Code;
+import com.connectrpc.ConnectException;
+import com.connectrpc.ResponseMessageKt;
+import com.connectrpc.impl.ProtocolClient;
import com.google.gson.Gson;
import com.nimbusds.jose.JOSEException;
import com.nimbusds.jose.JWSAlgorithm;
@@ -8,10 +12,7 @@
import com.nimbusds.jose.jwk.RSAKey;
import com.nimbusds.jwt.JWTClaimsSet;
import com.nimbusds.jwt.SignedJWT;
-import io.grpc.ManagedChannel;
-import io.grpc.StatusRuntimeException;
-import io.grpc.Status;
-import io.opentdf.platform.kas.AccessServiceGrpc;
+import io.opentdf.platform.kas.AccessServiceClient;
import io.opentdf.platform.kas.PublicKeyRequest;
import io.opentdf.platform.kas.PublicKeyResponse;
import io.opentdf.platform.kas.RewrapRequest;
@@ -21,17 +22,16 @@
import io.opentdf.platform.sdk.nanotdf.NanoTDFType;
import io.opentdf.platform.sdk.TDF.KasBadRequestException;
-import java.nio.charset.StandardCharsets;
+import okhttp3.OkHttpClient;
+
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
-import java.net.MalformedURLException;
-import java.net.URL;
import java.time.Duration;
import java.time.Instant;
-import java.util.ArrayList;
+import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
-import java.util.function.Function;
+import java.util.function.BiFunction;
import static io.opentdf.platform.sdk.TDF.GLOBAL_KEY_SALT;
import static java.lang.String.format;
@@ -41,9 +41,11 @@
* This class provides methods to retrieve public keys, unwrap encrypted keys,
* and manage key caches.
*/
-public class KASClient implements SDK.KAS {
+class KASClient implements SDK.KAS {
- private final Function channelFactory;
+ private final OkHttpClient httpClient;
+ private final BiFunction protocolClientFactory;
+ private final boolean usePlaintext;
private final RSASSASigner signer;
private AsymDecryption decryptor;
private String clientPublicKey;
@@ -52,12 +54,13 @@ public class KASClient implements SDK.KAS {
/***
* A client that communicates with KAS
*
- * @param channelFactory A function that produces channels that can be used to
* communicate
* @param dpopKey
*/
- public KASClient(Function channelFactory, RSAKey dpopKey) {
- this.channelFactory = channelFactory;
+ KASClient(OkHttpClient httpClient, BiFunction protocolClientFactory, RSAKey dpopKey, boolean usePlaintext) {
+ this.httpClient = httpClient;
+ this.protocolClientFactory = protocolClientFactory;
+ this.usePlaintext = usePlaintext;
try {
this.signer = new RSASSASigner(dpopKey);
} catch (JOSEException e) {
@@ -68,12 +71,17 @@ public KASClient(Function channelFactory, RSAKey dpopKey
@Override
public KASInfo getECPublicKey(Config.KASInfo kasInfo, NanoTDFType.ECCurve curve) {
- var r = getStub(kasInfo.URL)
- .publicKey(
- PublicKeyRequest.newBuilder().setAlgorithm(String.format("ec:%s", curve.toString())).build());
+ var req = PublicKeyRequest.newBuilder().setAlgorithm(format("ec:%s", curve.toString())).build();
+ var r = getStub(kasInfo.URL).publicKeyBlocking(req, Collections.emptyMap()).execute();
+ PublicKeyResponse res;
+ try {
+ res = ResponseMessageKt.getOrThrow(r);
+ } catch (Exception e) {
+ throw new SDKException("error getting public key", e);
+ }
var k2 = kasInfo.clone();
- k2.KID = r.getKid();
- k2.PublicKey = r.getPublicKey();
+ k2.KID = res.getKid();
+ k2.PublicKey = res.getPublicKey();
return k2;
}
@@ -88,7 +96,13 @@ public Config.KASInfo getPublicKey(Config.KASInfo kasInfo) {
? PublicKeyRequest.getDefaultInstance()
: PublicKeyRequest.newBuilder().setAlgorithm(kasInfo.Algorithm).build();
- PublicKeyResponse resp = getStub(kasInfo.URL).publicKey(request);
+ var req = getStub(kasInfo.URL).publicKeyBlocking(request, Collections.emptyMap()).execute();
+ PublicKeyResponse resp;
+ try {
+ resp = RequestHelper.getOrThrow(req);
+ } catch (ConnectException e) {
+ throw new SDKException("error getting public key", e);
+ }
var kiCopy = new Config.KASInfo();
kiCopy.KID = resp.getKid();
@@ -105,40 +119,10 @@ public KASKeyCache getKeyCache() {
return this.kasKeyCache;
}
- public static String normalizeAddress(String urlString) {
- URL url;
- try {
- url = new URL(urlString);
- } catch (MalformedURLException e) {
- // if there is no protocol then they either gave us
- // a correct address or one we don't know how to fix
- return urlString;
- }
-
- // otherwise we take the specified port or default
- // based on whether the URL uses a scheme that
- // implies TLS
- int port;
- if (url.getPort() == -1) {
- if ("http".equals(url.getProtocol())) {
- port = 80;
- } else {
- port = 443;
- }
- } else {
- port = url.getPort();
- }
-
- return format("%s:%d", url.getHost(), port);
- }
-
@Override
public synchronized void close() {
- var entries = new ArrayList<>(stubs.values());
- stubs.clear();
- for (var entry : entries) {
- entry.channel.shutdownNow();
- }
+ this.httpClient.dispatcher().cancelAll();
+ this.httpClient.connectionPool().evictAll();
}
static class RewrapRequestBody {
@@ -165,7 +149,7 @@ static class NanoTDFRewrapRequestBody {
@Override
public byte[] unwrap(Manifest.KeyAccess keyAccess, String policy, KeyType sessionKeyType) {
ECKeyPair ecKeyPair = null;
-
+
if (sessionKeyType.isEc()) {
var curveName = sessionKeyType.getCurveName();
ecKeyPair = new ECKeyPair(curveName, ECKeyPair.ECAlgorithm.ECDH);
@@ -204,35 +188,36 @@ public byte[] unwrap(Manifest.KeyAccess keyAccess, String policy, KeyType sessi
.setSignedRequestToken(jwt.serialize())
.build();
RewrapResponse response;
+ var req = getStub(keyAccess.url).rewrapBlocking(request, Collections.emptyMap()).execute();
try {
- response = getStub(keyAccess.url).rewrap(request);
- var wrappedKey = response.getEntityWrappedKey().toByteArray();
- if (sessionKeyType != KeyType.RSA2048Key) {
+ response = RequestHelper.getOrThrow(req);
+ } catch (ConnectException e) {
+ if (e.getCode() == Code.INVALID_ARGUMENT) {
+ // 400 Bad Request
+ throw new KasBadRequestException("rewrap request 400: " + e);
+ }
+ throw new SDKException("error unwrapping key", e);
+ }
- if (ecKeyPair == null) {
- throw new SDKException("ECKeyPair is null. Unable to proceed with the unwrap operation.");
- }
+ var wrappedKey = response.getEntityWrappedKey().toByteArray();
+ if (sessionKeyType != KeyType.RSA2048Key) {
- var kasEphemeralPublicKey = response.getSessionPublicKey();
- var publicKey = ECKeyPair.publicKeyFromPem(kasEphemeralPublicKey);
- byte[] symKey = ECKeyPair.computeECDHKey(publicKey, ecKeyPair.getPrivateKey());
+ if (ecKeyPair == null) {
+ throw new SDKException("ECKeyPair is null. Unable to proceed with the unwrap operation.");
+ }
- var sessionKey = ECKeyPair.calculateHKDF(GLOBAL_KEY_SALT, symKey);
+ var kasEphemeralPublicKey = response.getSessionPublicKey();
+ var publicKey = ECKeyPair.publicKeyFromPem(kasEphemeralPublicKey);
+ byte[] symKey = ECKeyPair.computeECDHKey(publicKey, ecKeyPair.getPrivateKey());
- AesGcm gcm = new AesGcm(sessionKey);
- AesGcm.Encrypted encrypted = new AesGcm.Encrypted(wrappedKey);
- return gcm.decrypt(encrypted);
- } else {
- return decryptor.decrypt(wrappedKey);
- }
- } catch (StatusRuntimeException e) {
- if (e.getStatus().getCode() == Status.Code.INVALID_ARGUMENT) {
- // 400 Bad Request
- throw new KasBadRequestException("rewrap request 400: " + e.toString());
- }
- throw e;
+ var sessionKey = ECKeyPair.calculateHKDF(GLOBAL_KEY_SALT, symKey);
+
+ AesGcm gcm = new AesGcm(sessionKey);
+ AesGcm.Encrypted encrypted = new AesGcm.Encrypted(wrappedKey);
+ return gcm.decrypt(encrypted);
+ } else {
+ return decryptor.decrypt(wrappedKey);
}
-
}
public byte[] unwrapNanoTDF(NanoTDFType.ECCurve curve, String header, String kasURL) {
@@ -245,7 +230,7 @@ public byte[] unwrapNanoTDF(NanoTDFType.ECCurve curve, String header, String kas
keyAccess.protocol = "kas";
NanoTDFRewrapRequestBody body = new NanoTDFRewrapRequestBody();
- body.algorithm = String.format("ec:%s", curve.toString());
+ body.algorithm = format("ec:%s", curve.toString());
body.clientPublicKey = keyPair.publicKeyInPEMFormat();
body.keyAccess = keyAccess;
@@ -264,12 +249,18 @@ public byte[] unwrapNanoTDF(NanoTDFType.ECCurve curve, String header, String kas
throw new SDKException("error signing KAS request", e);
}
- var request = RewrapRequest
+ var req = RewrapRequest
.newBuilder()
.setSignedRequestToken(jwt.serialize())
.build();
- var response = getStub(keyAccess.url).rewrap(request);
+ var request = getStub(keyAccess.url).rewrapBlocking(req, Collections.emptyMap()).execute();
+ RewrapResponse response;
+ try {
+ response = RequestHelper.getOrThrow(request);
+ } catch (ConnectException e) {
+ throw new SDKException("error rewrapping key", e);
+ }
var wrappedKey = response.getEntityWrappedKey().toByteArray();
// Generate symmetric key
@@ -291,27 +282,13 @@ public byte[] unwrapNanoTDF(NanoTDFType.ECCurve curve, String header, String kas
return gcm.decrypt(encrypted);
}
- private final HashMap stubs = new HashMap<>();
-
- private static class CacheEntry {
- final ManagedChannel channel;
- final AccessServiceGrpc.AccessServiceBlockingStub stub;
-
- private CacheEntry(ManagedChannel channel, AccessServiceGrpc.AccessServiceBlockingStub stub) {
- this.channel = channel;
- this.stub = stub;
- }
- }
+ private final HashMap stubs = new HashMap<>();
// make this protected so we can test the address normalization logic
- synchronized AccessServiceGrpc.AccessServiceBlockingStub getStub(String url) {
- var realAddress = normalizeAddress(url);
- if (!stubs.containsKey(realAddress)) {
- var channel = channelFactory.apply(realAddress);
- var stub = AccessServiceGrpc.newBlockingStub(channel);
- stubs.put(realAddress, new CacheEntry(channel, stub));
- }
-
- return stubs.get(realAddress).stub;
+ synchronized AccessServiceClient getStub(String url) {
+ return stubs.computeIfAbsent(AddressNormalizer.normalizeAddress(url, usePlaintext), (String address) -> {
+ var client = protocolClientFactory.apply(httpClient, address);
+ return new AccessServiceClient(client);
+ });
}
}
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java b/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java
index 4890d859..5b7bcf99 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/NanoTDF.java
@@ -1,5 +1,6 @@
package io.opentdf.platform.sdk;
+import com.connectrpc.ResponseMessageKt;
import io.opentdf.platform.policy.kasregistry.ListKeyAccessServersRequest;
import io.opentdf.platform.policy.kasregistry.ListKeyAccessServersResponse;
import io.opentdf.platform.sdk.TDF.KasAllowlistException;
@@ -12,7 +13,6 @@
import java.nio.charset.StandardCharsets;
import java.security.*;
import java.util.*;
-import java.util.concurrent.ExecutionException;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
@@ -219,20 +219,13 @@ public void readNanoTDF(ByteBuffer nanoTDF, OutputStream outputStream, String pl
readNanoTDF(nanoTDF, outputStream, Config.newNanoTDFReaderConfig(), platformUrl);
}
+
public void readNanoTDF(ByteBuffer nanoTDF, OutputStream outputStream,
- Config.NanoTDFReaderConfig nanoTdfReaderConfig, String platformUrl) throws IOException, SDKException {
+ Config.NanoTDFReaderConfig nanoTdfReaderConfig, String platformUrl) throws IOException {
if (!nanoTdfReaderConfig.ignoreKasAllowlist && (nanoTdfReaderConfig.kasAllowlist == null || nanoTdfReaderConfig.kasAllowlist.isEmpty())) {
ListKeyAccessServersRequest request = ListKeyAccessServersRequest.newBuilder()
.build();
- ListKeyAccessServersResponse response = null;
- try {
- response = services.kasRegistry().listKeyAccessServers(request).get();
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- throw new SDKException("interrupted while getting kas registry", e);
- } catch (ExecutionException e) {
- throw new SDKException("error getting kas registry", e);
- }
+ ListKeyAccessServersResponse response = ResponseMessageKt.getOrThrow(services.kasRegistry().listKeyAccessServersBlocking(request, Collections.emptyMap()).execute());
nanoTdfReaderConfig.kasAllowlist = new HashSet<>();
var kases = response.getKeyAccessServersList();
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/SDK.java b/sdk/src/main/java/io/opentdf/platform/sdk/SDK.java
index 541eb200..780f68b0 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/SDK.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/SDK.java
@@ -1,19 +1,12 @@
package io.opentdf.platform.sdk;
-import io.grpc.ClientInterceptor;
-import io.grpc.ManagedChannel;
-import io.opentdf.platform.authorization.AuthorizationServiceGrpc;
-import io.opentdf.platform.authorization.AuthorizationServiceGrpc.AuthorizationServiceFutureStub;
-import io.opentdf.platform.policy.attributes.AttributesServiceGrpc;
-import io.opentdf.platform.policy.attributes.AttributesServiceGrpc.AttributesServiceFutureStub;
-import io.opentdf.platform.policy.namespaces.NamespaceServiceGrpc;
-import io.opentdf.platform.policy.namespaces.NamespaceServiceGrpc.NamespaceServiceFutureStub;
-import io.opentdf.platform.policy.resourcemapping.ResourceMappingServiceGrpc;
-import io.opentdf.platform.policy.resourcemapping.ResourceMappingServiceGrpc.ResourceMappingServiceFutureStub;
-import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceGrpc;
-import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceGrpc.SubjectMappingServiceFutureStub;
-import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceGrpc;
-import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceGrpc.KeyAccessServerRegistryServiceFutureStub;
+import com.connectrpc.Interceptor;
+import io.opentdf.platform.authorization.AuthorizationServiceClient;
+import io.opentdf.platform.policy.attributes.AttributesServiceClient;
+import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceClient;
+import io.opentdf.platform.policy.namespaces.NamespaceServiceClient;
+import io.opentdf.platform.policy.resourcemapping.ResourceMappingServiceClient;
+import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceClient;
import io.opentdf.platform.sdk.nanotdf.NanoTDFType;
import javax.net.ssl.TrustManager;
@@ -33,7 +26,7 @@
public class SDK implements AutoCloseable {
private final Services services;
private final TrustManager trustManager;
- private final ClientInterceptor authInterceptor;
+ private final Interceptor authInterceptor;
private final String platformUrl;
/**
@@ -68,82 +61,30 @@ byte[] unwrap(Manifest.KeyAccess keyAccess, String policy,
* It extends the AutoCloseable interface, allowing for the release of resources when no longer needed.
*/
public interface Services extends AutoCloseable {
- AuthorizationServiceFutureStub authorization();
+ AttributesServiceClient attributes();
- AttributesServiceFutureStub attributes();
+ NamespaceServiceClient namespaces();
- NamespaceServiceFutureStub namespaces();
+ SubjectMappingServiceClient subjectMappings();
- SubjectMappingServiceFutureStub subjectMappings();
+ ResourceMappingServiceClient resourceMappings();
- ResourceMappingServiceFutureStub resourceMappings();
+ AuthorizationServiceClient authorization();
- KeyAccessServerRegistryServiceFutureStub kasRegistry();
+ KeyAccessServerRegistryServiceClient kasRegistry();
KAS kas();
-
- static Services newServices(ManagedChannel channel, KAS kas) {
- var attributeService = AttributesServiceGrpc.newFutureStub(channel);
- var namespaceService = NamespaceServiceGrpc.newFutureStub(channel);
- var subjectMappingService = SubjectMappingServiceGrpc.newFutureStub(channel);
- var resourceMappingService = ResourceMappingServiceGrpc.newFutureStub(channel);
- var authorizationService = AuthorizationServiceGrpc.newFutureStub(channel);
- var kasRegistryService = KeyAccessServerRegistryServiceGrpc.newFutureStub(channel);
-
- return new Services() {
- @Override
- public void close() throws Exception {
- channel.shutdownNow();
- kas.close();
- }
-
- @Override
- public AttributesServiceFutureStub attributes() {
- return attributeService;
- }
-
- @Override
- public NamespaceServiceFutureStub namespaces() {
- return namespaceService;
- }
-
- @Override
- public SubjectMappingServiceFutureStub subjectMappings() {
- return subjectMappingService;
- }
-
- @Override
- public ResourceMappingServiceFutureStub resourceMappings() {
- return resourceMappingService;
- }
-
- @Override
- public AuthorizationServiceFutureStub authorization() {
- return authorizationService;
- }
-
- @Override
- public KeyAccessServerRegistryServiceFutureStub kasRegistry() {
- return kasRegistryService;
- }
-
- @Override
- public KAS kas() {
- return kas;
- }
- };
- }
}
public Optional getTrustManager() {
return Optional.ofNullable(trustManager);
}
- public Optional getAuthInterceptor() {
+ public Optional getAuthInterceptor() {
return Optional.ofNullable(authInterceptor);
}
- SDK(Services services, TrustManager trustManager, ClientInterceptor authInterceptor, String platformUrl) {
+ SDK(Services services, TrustManager trustManager, Interceptor authInterceptor, String platformUrl) {
this.platformUrl = platformUrl;
this.services = services;
this.trustManager = trustManager;
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/SDKBuilder.java b/sdk/src/main/java/io/opentdf/platform/sdk/SDKBuilder.java
index 074f6ab7..874e5352 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/SDKBuilder.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/SDKBuilder.java
@@ -1,5 +1,13 @@
package io.opentdf.platform.sdk;
+import com.connectrpc.ConnectException;
+import com.connectrpc.Interceptor;
+import com.connectrpc.ProtocolClientConfig;
+import com.connectrpc.extensions.GoogleJavaProtobufStrategy;
+import com.connectrpc.impl.ProtocolClient;
+import com.connectrpc.okhttp.ConnectOkHttpClient;
+import com.connectrpc.protocols.GETConfiguration;
+import com.connectrpc.protocols.NetworkProtocol;
import com.nimbusds.jose.JOSEException;
import com.nimbusds.jose.jwk.KeyUse;
import com.nimbusds.jose.jwk.RSAKey;
@@ -16,15 +24,23 @@
import com.nimbusds.oauth2.sdk.token.TokenTypeURI;
import com.nimbusds.oauth2.sdk.tokenexchange.TokenExchangeGrant;
import com.nimbusds.openid.connect.sdk.op.OIDCProviderMetadata;
-import io.grpc.*;
+import io.opentdf.platform.authorization.AuthorizationServiceClient;
+import io.opentdf.platform.policy.attributes.AttributesServiceClient;
+import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceClient;
+import io.opentdf.platform.policy.namespaces.NamespaceServiceClient;
+import io.opentdf.platform.policy.resourcemapping.ResourceMappingServiceClient;
+import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceClient;
import io.opentdf.platform.wellknownconfiguration.GetWellKnownConfigurationRequest;
import io.opentdf.platform.wellknownconfiguration.GetWellKnownConfigurationResponse;
-import io.opentdf.platform.wellknownconfiguration.WellKnownServiceGrpc;
+import io.opentdf.platform.wellknownconfiguration.WellKnownServiceClient;
import nl.altindag.ssl.SSLFactory;
import nl.altindag.ssl.pem.util.PemUtils;
+import okhttp3.OkHttpClient;
+import okhttp3.Protocol;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.annotation.Nonnull;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509ExtendedTrustManager;
import java.io.File;
@@ -33,9 +49,10 @@
import java.io.InputStream;
import java.nio.file.Path;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import java.util.UUID;
-import java.util.function.Function;
+import java.util.function.BiFunction;
/**
* A builder class for creating instances of the SDK class.
@@ -129,12 +146,20 @@ public SDKBuilder clientSecret(String clientID, String clientSecret) {
return this;
}
+ /**
+ * If set to `true` `http` connections to platform services are allowed. In particular,
+ * use this option to unwrap TDFs using KASs that are not using TLS. Also, if KASs use
+ * : addresses then this option must be set in order for the SDK to
+ * call the KAS without TLS.
+ * @param usePlainText
+ * @return
+ */
public SDKBuilder useInsecurePlaintextConnection(Boolean usePlainText) {
this.usePlainText = usePlainText;
return this;
}
- private GRPCAuthInterceptor getGrpcAuthInterceptor(RSAKey rsaKey) {
+ private Interceptor getAuthInterceptor(RSAKey rsaKey) {
if (platformEndpoint == null) {
throw new SDKException("cannot build an SDK without specifying the platform endpoint");
}
@@ -146,23 +171,15 @@ private GRPCAuthInterceptor getGrpcAuthInterceptor(RSAKey rsaKey) {
}
// we don't add the auth listener to this channel since it is only used to call
- // the
- // well known endpoint
- ManagedChannel bootstrapChannel = null;
+ // the well known endpoint
GetWellKnownConfigurationResponse config;
+ var httpClient = getHttpClient();
+ ProtocolClient bootstrapClient = getUnauthenticatedProtocolClient(platformEndpoint, httpClient) ;
+ var stub = new WellKnownServiceClient(bootstrapClient);
try {
- bootstrapChannel = getManagedChannelBuilder(platformEndpoint).build();
- var stub = WellKnownServiceGrpc.newBlockingStub(bootstrapChannel);
- try {
- config = stub.getWellKnownConfiguration(GetWellKnownConfigurationRequest.getDefaultInstance());
- } catch (StatusRuntimeException e) {
- Status status = Status.fromThrowable(e);
- throw new SDKException(String.format("Got grpc status [%s] when getting configuration", status), e);
- }
- } finally {
- if (bootstrapChannel != null) {
- bootstrapChannel.shutdown();
- }
+ config = RequestHelper.getOrThrow(stub.getWellKnownConfigurationBlocking(GetWellKnownConfigurationRequest.getDefaultInstance(), Collections.emptyMap()).execute());
+ } catch (ConnectException e) {
+ throw new SDKException(String.format("Got grpc status [%s] when getting configuration", e.getCode()), e);
}
String platformIssuer;
@@ -171,7 +188,6 @@ private GRPCAuthInterceptor getGrpcAuthInterceptor(RSAKey rsaKey) {
.getConfiguration()
.getFieldsOrThrow(PLATFORM_ISSUER)
.getStringValue();
-
} catch (IllegalArgumentException e) {
logger.warn(
"no `platform_issuer` found in well known configuration. requests from the SDK will be unauthenticated",
@@ -194,17 +210,17 @@ private GRPCAuthInterceptor getGrpcAuthInterceptor(RSAKey rsaKey) {
if (this.authzGrant == null) {
this.authzGrant = new ClientCredentialsGrant();
}
-
- return new GRPCAuthInterceptor(clientAuth, rsaKey, providerMetadata.getTokenEndpointURI(), this.authzGrant, sslFactory);
+ var ts = new TokenSource(clientAuth, rsaKey, providerMetadata.getTokenEndpointURI(), this.authzGrant, sslFactory);
+ return new AuthInterceptor(ts);
}
static class ServicesAndInternals {
- final ClientInterceptor interceptor;
+ final Interceptor interceptor;
final TrustManager trustManager;
final SDK.Services services;
- ServicesAndInternals(ClientInterceptor interceptor, TrustManager trustManager, SDK.Services services) {
+ ServicesAndInternals(Interceptor interceptor, TrustManager trustManager, SDK.Services services) {
this.interceptor = interceptor;
this.trustManager = trustManager;
this.services = services;
@@ -222,23 +238,72 @@ ServicesAndInternals buildServices() {
throw new SDKException("Error generating DPoP key", e);
}
- var authInterceptor = getGrpcAuthInterceptor(dpopKey);
- ManagedChannel channel;
- Function managedChannelFactory;
- if (authInterceptor == null) {
- channel = getManagedChannelBuilder(platformEndpoint).build();
- managedChannelFactory = (String endpoint) -> getManagedChannelBuilder(endpoint).build();
-
- } else {
- channel = getManagedChannelBuilder(platformEndpoint).intercept(authInterceptor).build();
- managedChannelFactory = (String endpoint) -> getManagedChannelBuilder(endpoint).intercept(authInterceptor)
- .build();
- }
- var client = new KASClient(managedChannelFactory, dpopKey);
+ this.platformEndpoint = AddressNormalizer.normalizeAddress(this.platformEndpoint, this.usePlainText);
+ var authInterceptor = getAuthInterceptor(dpopKey);
+ var kasClient = getKASClient(dpopKey, authInterceptor);
+ var httpClient = getHttpClient();
+ var client = getProtocolClient(platformEndpoint, httpClient, authInterceptor);
+ var attributeService = new AttributesServiceClient(client);
+ var namespaceService = new NamespaceServiceClient(client);
+ var subjectMappingService = new SubjectMappingServiceClient(client);
+ var resourceMappingService = new ResourceMappingServiceClient(client);
+ var authorizationService = new AuthorizationServiceClient(client);
+ var kasRegistryService = new KeyAccessServerRegistryServiceClient(client);
+
+ var services = new SDK.Services() {
+ @Override
+ public void close() {
+ kasClient.close();
+ httpClient.dispatcher().executorService().shutdown();
+ httpClient.connectionPool().evictAll();
+ }
+
+ @Override
+ public AttributesServiceClient attributes() {
+ return attributeService;
+ }
+
+ @Override
+ public NamespaceServiceClient namespaces() {
+ return namespaceService;
+ }
+
+ @Override
+ public SubjectMappingServiceClient subjectMappings() {
+ return subjectMappingService;
+ }
+
+ @Override
+ public ResourceMappingServiceClient resourceMappings() {
+ return resourceMappingService;
+ }
+
+ @Override
+ public AuthorizationServiceClient authorization() {
+ return authorizationService;
+ }
+
+ @Override
+ public KeyAccessServerRegistryServiceClient kasRegistry() {
+ return kasRegistryService;
+ }
+
+ @Override
+ public SDK.KAS kas() {
+ return kasClient;
+ }
+ };
+
return new ServicesAndInternals(
authInterceptor,
sslFactory == null ? null : sslFactory.getTrustManager().orElse(null),
- SDK.Services.newServices(channel, client));
+ services);
+ }
+
+ @Nonnull
+ private KASClient getKASClient(RSAKey dpopKey, Interceptor interceptor) {
+ BiFunction protocolClientFactory = (OkHttpClient client, String address) -> getProtocolClient(address, client, interceptor);
+ return new KASClient(getHttpClient(), protocolClientFactory, dpopKey, usePlainText);
}
public SDK build() {
@@ -246,32 +311,39 @@ public SDK build() {
return new SDK(services.services, services.trustManager, services.interceptor, platformEndpoint);
}
- /**
- * This produces a channel configured with all the available SDK options. The
- * only
- * reason it can't take in an interceptor is because we need to create a channel
- * that
- * doesn't have any authentication when we are bootstrapping
- *
- * @param endpoint The endpoint that we are creating the channel for
- * @return {@type ManagedChannelBuilder>} configured with the SDK options
- */
- private ManagedChannelBuilder> getManagedChannelBuilder(String endpoint) {
- // normalize the endpoint, ends with just host:port
- endpoint = KASClient.normalizeAddress(endpoint);
-
- ManagedChannelBuilder> channelBuilder;
- if (sslFactory != null && !usePlainText) {
- channelBuilder = Grpc.newChannelBuilder(endpoint, TlsChannelCredentials.newBuilder()
- .trustManager(sslFactory.getTrustManager().get()).build());
- } else {
- channelBuilder = ManagedChannelBuilder.forTarget(endpoint);
- }
+ private ProtocolClient getUnauthenticatedProtocolClient(String endpoint, OkHttpClient httpClient) {
+ return getProtocolClient(endpoint, httpClient, null);
+ }
+ private ProtocolClient getProtocolClient(String endpoint, OkHttpClient httpClient, Interceptor authInterceptor) {
+ var protocolClientConfig = new ProtocolClientConfig(
+ endpoint,
+ new GoogleJavaProtobufStrategy(),
+ NetworkProtocol.GRPC,
+ null,
+ GETConfiguration.Enabled.INSTANCE,
+ authInterceptor == null ? Collections.emptyList() : List.of(ignoredConfig -> authInterceptor)
+ );
+
+ return new ProtocolClient(new ConnectOkHttpClient(httpClient), protocolClientConfig);
+ }
+
+ private OkHttpClient getHttpClient() {
+ // using a single http client is apparently the best practice, subject to everyone wanting to
+ // have the same protocols
+ var httpClient = new OkHttpClient.Builder();
if (usePlainText) {
- channelBuilder = channelBuilder.usePlaintext();
+ // we can only connect using HTTP/2 without any negotiation when using plain test
+ httpClient.protocols(List.of(Protocol.H2_PRIOR_KNOWLEDGE));
+ }
+ if (sslFactory != null) {
+ var trustManager = sslFactory.getTrustManager();
+ if (trustManager.isEmpty()) {
+ throw new SDKException("SSL factory must have a trust manager");
+ }
+ httpClient.sslSocketFactory(sslFactory.getSslSocketFactory(), trustManager.get());
}
- return channelBuilder;
+ return httpClient.build();
}
SSLFactory getSslFactory() {
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/TDF.java b/sdk/src/main/java/io/opentdf/platform/sdk/TDF.java
index 47d03e48..1640fb79 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/TDF.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/TDF.java
@@ -1,5 +1,6 @@
package io.opentdf.platform.sdk;
+import com.connectrpc.ConnectException;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.nimbusds.jose.*;
@@ -28,7 +29,6 @@
import java.security.*;
import java.text.ParseException;
import java.util.*;
-import java.util.concurrent.ExecutionException;
/**
* The TDF class is responsible for handling operations related to
@@ -660,12 +660,9 @@ Reader loadTDF(SeekableByteChannel tdf, Config.TDFReaderConfig tdfReaderConfig,
.build();
ListKeyAccessServersResponse response;
try {
- response = services.kasRegistry().listKeyAccessServers(request).get();
- } catch (ExecutionException e) {
- throw new SDKException("error getting key access servers", e);
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- throw new SDKException("interrupted while getting list of kases", e);
+ response = RequestHelper.getOrThrow(services.kasRegistry().listKeyAccessServersBlocking(request, Collections.emptyMap()).execute());
+ } catch (ConnectException e) {
+ throw new SDKException("error getting kas servers", e);
}
tdfReaderConfig.kasAllowlist = new HashSet<>();
diff --git a/sdk/src/main/java/io/opentdf/platform/sdk/GRPCAuthInterceptor.java b/sdk/src/main/java/io/opentdf/platform/sdk/TokenSource.java
similarity index 59%
rename from sdk/src/main/java/io/opentdf/platform/sdk/GRPCAuthInterceptor.java
rename to sdk/src/main/java/io/opentdf/platform/sdk/TokenSource.java
index a7e4e8b2..97e02a0c 100644
--- a/sdk/src/main/java/io/opentdf/platform/sdk/GRPCAuthInterceptor.java
+++ b/sdk/src/main/java/io/opentdf/platform/sdk/TokenSource.java
@@ -14,45 +14,38 @@
import com.nimbusds.oauth2.sdk.http.HTTPRequest;
import com.nimbusds.oauth2.sdk.http.HTTPResponse;
import com.nimbusds.oauth2.sdk.token.AccessToken;
-import io.grpc.CallOptions;
-import io.grpc.Channel;
-import io.grpc.ClientCall;
-import io.grpc.ClientInterceptor;
-import io.grpc.ForwardingClientCall;
-import io.grpc.Metadata;
-import io.grpc.MethodDescriptor;
import nl.altindag.ssl.SSLFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URI;
import java.net.URISyntaxException;
+import java.net.URL;
import java.time.Instant;
/**
- * The GRPCAuthInterceptor class is responsible for intercepting client calls before they are sent
- * to the server. It adds authentication headers to the requests by fetching and caching access
- * tokens.
+ * The TokenSource class is responsible for providing authorization tokens. It handles
+ * timeouts and creating OIDC calls. It is thread-safe.
*/
-class GRPCAuthInterceptor implements ClientInterceptor {
+class TokenSource {
private Instant tokenExpiryTime;
private AccessToken token;
private final ClientAuthentication clientAuth;
private final RSAKey rsaKey;
private final URI tokenEndpointURI;
private final AuthorizationGrant authzGrant;
- private SSLFactory sslFactory;
- private static final Logger logger = LoggerFactory.getLogger(GRPCAuthInterceptor.class);
+ private final SSLFactory sslFactory;
+ private static final Logger logger = LoggerFactory.getLogger(TokenSource.class);
/**
- * Constructs a new GRPCAuthInterceptor with the specified client authentication and RSA key.
+ * Constructs a new TokenSource with the specified client authentication and RSA key.
*
* @param clientAuth the client authentication to be used by the interceptor
* @param rsaKey the RSA key to be used by the interceptor
* @param sslFactory Optional SSLFactory for Requests
*/
- public GRPCAuthInterceptor(ClientAuthentication clientAuth, RSAKey rsaKey, URI tokenEndpointURI, AuthorizationGrant authzGrant, SSLFactory sslFactory) {
+ public TokenSource(ClientAuthentication clientAuth, RSAKey rsaKey, URI tokenEndpointURI, AuthorizationGrant authzGrant, SSLFactory sslFactory) {
this.clientAuth = clientAuth;
this.rsaKey = rsaKey;
this.tokenEndpointURI = tokenEndpointURI;
@@ -60,43 +53,43 @@ public GRPCAuthInterceptor(ClientAuthentication clientAuth, RSAKey rsaKey, URI t
this.authzGrant = authzGrant;
}
- /**
- * Intercepts the client call before it is sent to the server.
- *
- * @param method The method descriptor for the call.
- * @param callOptions The call options for the call.
- * @param next The next channel in the channel pipeline.
- * @param The type of the request message.
- * @param The type of the response message.
- * @return A client call with the intercepted behavior.
- */
- @Override
- public ClientCall interceptCall(MethodDescriptor method,
- CallOptions callOptions, Channel next) {
- return new ForwardingClientCall.SimpleForwardingClientCall<>(next.newCall(method, callOptions)) {
- @Override
- public void start(Listener responseListener, Metadata headers) {
- // Get the access token
- AccessToken t = getToken();
- headers.put(Metadata.Key.of("Authorization", Metadata.ASCII_STRING_MARSHALLER),
- "DPoP " + t.getValue());
-
- // Build the DPoP proof for each request
- try {
- DPoPProofFactory dpopFactory = new DefaultDPoPProofFactory(rsaKey, JWSAlgorithm.RS256);
-
- URI uri = new URI("/" + method.getFullMethodName());
- SignedJWT proof = dpopFactory.createDPoPJWT("POST", uri, t);
- headers.put(Metadata.Key.of("DPoP", Metadata.ASCII_STRING_MARSHALLER),
- proof.serialize());
- } catch (URISyntaxException e) {
- throw new RuntimeException("Invalid URI syntax for DPoP proof creation", e);
- } catch (JOSEException e) {
- throw new RuntimeException("Error creating DPoP proof", e);
- }
- super.start(responseListener, headers);
- }
- };
+ class AuthHeaders {
+ private final String authHeader;
+ private final String dpopHeader;
+
+ public AuthHeaders(String authHeader, String dpopHeader) {
+ this.authHeader = authHeader;
+ this.dpopHeader = dpopHeader;
+ }
+
+ public String getAuthHeader() {
+ return authHeader;
+ }
+
+ public String getDpopHeader() {
+ return dpopHeader;
+ }
+ }
+
+ public AuthHeaders getAuthHeaders(URL url, String method) {
+ // Get the access token
+ AccessToken t = getToken();
+
+ // Build the DPoP proof for each request
+ String dpopProof;
+ try {
+ DPoPProofFactory dpopFactory = new DefaultDPoPProofFactory(rsaKey, JWSAlgorithm.RS256);
+ SignedJWT proof = dpopFactory.createDPoPJWT(method, url.toURI(), t);
+ dpopProof = proof.serialize();
+ } catch (URISyntaxException e) {
+ throw new SDKException("Invalid URI syntax for DPoP proof creation", e);
+ } catch (JOSEException e) {
+ throw new SDKException("Error creating DPoP proof", e);
+ }
+
+ return new AuthHeaders(
+ "DPoP " + t.getValue(),
+ dpopProof);
}
/**
@@ -131,10 +124,9 @@ private synchronized AccessToken getToken() {
tokenResponse = TokenResponse.parse(httpResponse);
if (!tokenResponse.indicatesSuccess()) {
ErrorObject error = tokenResponse.toErrorResponse().getErrorObject();
- throw new RuntimeException("Token request failed: " + error);
+ throw new SDKException("failure to get token. description = [" + error.getDescription() + "] error code = [" + error.getCode() + "] error uri = [" + error.getURI() + "]");
}
-
var tokens = tokenResponse.toSuccessResponse().getTokens();
if (tokens.getDPoPAccessToken() != null) {
logger.trace("retrieved a new DPoP access token");
@@ -157,8 +149,7 @@ private synchronized AccessToken getToken() {
}
} catch (Exception e) {
- // TODO Auto-generated catch block
- throw new RuntimeException("failed to get token", e);
+ throw new SDKException("failed to get token", e);
}
return this.token;
}
diff --git a/sdk/src/main/kotlin/io/opentdf/platform/sdk/AuthInterceptor.kt b/sdk/src/main/kotlin/io/opentdf/platform/sdk/AuthInterceptor.kt
new file mode 100644
index 00000000..c7eaa3ad
--- /dev/null
+++ b/sdk/src/main/kotlin/io/opentdf/platform/sdk/AuthInterceptor.kt
@@ -0,0 +1,54 @@
+package io.opentdf.platform.sdk
+
+import com.connectrpc.Interceptor
+import com.connectrpc.StreamFunction
+import com.connectrpc.UnaryFunction
+import com.connectrpc.http.UnaryHTTPRequest
+import com.connectrpc.http.clone
+
+private class AuthInterceptor(private val ts: TokenSource) : Interceptor{
+ override fun streamFunction(): StreamFunction {
+ return StreamFunction(
+ requestFunction = { request ->
+ val requestHeaders = mutableMapOf>()
+ val authHeaders = ts.getAuthHeaders(request.url, "POST");
+ requestHeaders["Authorization"] = listOf(authHeaders.authHeader)
+ requestHeaders["DPoP"] = listOf(authHeaders.dpopHeader)
+
+ return@StreamFunction request.clone(
+ url = request.url,
+ contentType = request.contentType,
+ headers = requestHeaders,
+ timeout = request.timeout,
+ methodSpec = request.methodSpec,
+ )
+ },
+ requestBodyFunction = { resp -> resp },
+ streamResultFunction = { streamResult -> streamResult },
+ )
+ }
+
+ override fun unaryFunction(): UnaryFunction {
+ return UnaryFunction(
+ requestFunction = { request ->
+ val requestHeaders = mutableMapOf>()
+ val authHeaders = ts.getAuthHeaders(request.url, request.httpMethod.name);
+ requestHeaders["Authorization"] = listOf(authHeaders.authHeader)
+ requestHeaders["DPoP"] = listOf(authHeaders.dpopHeader)
+
+ return@UnaryFunction UnaryHTTPRequest(
+ url = request.url,
+ contentType = request.contentType,
+ headers = requestHeaders,
+ message = request.message,
+ timeout = request.timeout,
+ methodSpec = request.methodSpec,
+ httpMethod = request.httpMethod
+ )
+ },
+ responseFunction = { resp ->
+ resp
+ },
+ )
+ }
+}
\ No newline at end of file
diff --git a/sdk/src/main/kotlin/io/opentdf/platform/sdk/RequestHelper.kt b/sdk/src/main/kotlin/io/opentdf/platform/sdk/RequestHelper.kt
new file mode 100644
index 00000000..bf377157
--- /dev/null
+++ b/sdk/src/main/kotlin/io/opentdf/platform/sdk/RequestHelper.kt
@@ -0,0 +1,21 @@
+package io.opentdf.platform.sdk
+
+import com.connectrpc.ConnectException
+import com.connectrpc.ResponseMessage
+import com.connectrpc.getOrThrow
+
+class RequestHelper {
+ companion object {
+ /**
+ * Kotlin doesn't have checked exceptions (importantly it doesn't declare them).
+ * This means that if a Kotlin function throws a checked exception, you can't
+ * catch it in Java unless it uses the {@class kotlin.jvm.Throws} annotation.
+ * We wrap the getOrThrow() method in a static method with the annotation we
+ * need to catch a {@class ConnectException} in Java.
+ */
+ @JvmStatic @Throws(ConnectException::class)
+ fun getOrThrow(responseMessage: ResponseMessage): T {
+ return responseMessage.getOrThrow();
+ }
+ }
+}
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/AddressNormalizerTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/AddressNormalizerTest.java
new file mode 100644
index 00000000..079eb97a
--- /dev/null
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/AddressNormalizerTest.java
@@ -0,0 +1,27 @@
+package io.opentdf.platform.sdk;
+
+
+import org.junit.jupiter.api.Test;
+
+import static io.opentdf.platform.sdk.AddressNormalizer.normalizeAddress;
+import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
+
+class AddressNormalizerTest {
+
+ @Test
+ void testAddressNormalizationWithHTTPSClient() {
+ assertThat(normalizeAddress("http://example.org", false)).isEqualTo("https://example.org:443");
+ // default to https if no scheme is provided
+ assertThat(normalizeAddress("example.org:1234", false)).isEqualTo("https://example.org:1234");
+ assertThat(normalizeAddress("ftp://example.org", false)).isEqualTo("https://example.org:443");
+ }
+
+ @Test
+ void testAddressNormaliationWithInsecureHTTPClient() {
+ assertThat(normalizeAddress("http://localhost:8080", true)).isEqualTo("http://localhost:8080");
+ assertThat(normalizeAddress("http://example.org", true)).isEqualTo("http://example.org:80");
+ // default to http if no scheme is provided
+ assertThat(normalizeAddress("example.org:1234", true)).isEqualTo("http://example.org:1234");
+ assertThat(normalizeAddress("sftp://example.org", true)).isEqualTo("http://example.org:80");
+ }
+}
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/AutoconfigureTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/AutoconfigureTest.java
index bb9faf18..59cd0912 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/AutoconfigureTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/AutoconfigureTest.java
@@ -6,14 +6,23 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import com.connectrpc.ResponseMessage;
+import com.connectrpc.UnaryBlockingCall;
import io.opentdf.platform.policy.Attribute;
+import io.opentdf.platform.policy.AttributeRuleTypeEnum;
+import io.opentdf.platform.policy.KasPublicKey;
+import io.opentdf.platform.policy.KasPublicKeyAlgEnum;
+import io.opentdf.platform.policy.KasPublicKeySet;
+import io.opentdf.platform.policy.KeyAccessServer;
+import io.opentdf.platform.policy.Namespace;
+import io.opentdf.platform.policy.PublicKey;
import io.opentdf.platform.policy.Value;
-import io.opentdf.platform.policy.attributes.AttributesServiceGrpc;
+import io.opentdf.platform.policy.attributes.AttributesServiceClient;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsRequest;
import io.opentdf.platform.policy.attributes.GetAttributeValuesByFqnsResponse;
import io.opentdf.platform.sdk.Autoconfigure.AttributeValueFQN;
@@ -21,25 +30,16 @@
import io.opentdf.platform.sdk.Autoconfigure.Granter.BooleanKeyExpression;
import io.opentdf.platform.sdk.Autoconfigure.KeySplitStep;
import io.opentdf.platform.sdk.Autoconfigure.Granter;
-import io.opentdf.platform.policy.Namespace;
-import io.opentdf.platform.policy.PublicKey;
-import io.opentdf.platform.policy.KeyAccessServer;
-import io.opentdf.platform.policy.AttributeRuleTypeEnum;
-import io.opentdf.platform.policy.KasPublicKey;
-import io.opentdf.platform.policy.KasPublicKeyAlgEnum;
-import io.opentdf.platform.policy.KasPublicKeySet;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
-import com.google.common.util.concurrent.SettableFuture;
-
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -574,33 +574,36 @@ public void testReasonerSpecificity() {
);
for (ReasonerTestCase tc : testCases) {
- assertDoesNotThrow(() -> {
- AttributesServiceGrpc.AttributesServiceFutureStub attributeGrpcStub = mock(
- AttributesServiceGrpc.AttributesServiceFutureStub.class);
- lenient().when(attributeGrpcStub.getAttributeValuesByFqns(any(GetAttributeValuesByFqnsRequest.class)))
- .thenAnswer(
- invocation -> {
- GetAttributeValuesByFqnsResponse resp = getResponse(
- (GetAttributeValuesByFqnsRequest) invocation.getArguments()[0]);
- SettableFuture future = SettableFuture.create();
- future.set(resp); // Set the request as the future's result
- return future;
- });
-
- Granter reasoner = Autoconfigure.newGranterFromService(attributeGrpcStub, new KASKeyCache(),
- tc.getPolicy().toArray(new AttributeValueFQN[0]));
- assertThat(reasoner).isNotNull();
-
- var wrapper = new Object() {
- int i = 0;
- };
- List plan = reasoner.plan(tc.getDefaults(), () -> {
- return String.valueOf(wrapper.i++ + 1);
- }
+ var attributeService = mock(AttributesServiceClient.class);
+ when(attributeService.getAttributeValuesByFqnsBlocking(any(), any())).thenAnswer(invocation -> {
+ var request = (GetAttributeValuesByFqnsRequest) invocation.getArgument(0);
+ return new UnaryBlockingCall() {
+ @Override
+ public ResponseMessage execute() {
+ return new ResponseMessage.Success<>(getResponse(request), Collections.emptyMap(), Collections.emptyMap());
+ }
- );
- assertThat(plan).hasSameElementsAs(tc.getPlan());
+ @Override
+ public void cancel() {
+ // not really calling anything
+ }
+ };
});
+
+
+ Granter reasoner = Autoconfigure.newGranterFromService(attributeService, new KASKeyCache(),
+ tc.getPolicy().toArray(new AttributeValueFQN[0]));
+ assertThat(reasoner).isNotNull();
+
+ var wrapper = new Object() {
+ int i = 0;
+ };
+ List plan = reasoner.plan(tc.getDefaults(), () -> {
+ return String.valueOf(wrapper.i++ + 1);
+ }
+
+ );
+ assertThat(plan).hasSameElementsAs(tc.getPlan());
}
}
@@ -869,7 +872,7 @@ GetAttributeValuesByFqnsResponse getResponseWithGrants(GetAttributeValuesByFqnsR
}
@Test
- void testKeyCacheFromGrants() throws InterruptedException, ExecutionException {
+ void testKeyCacheFromGrants() {
// Create the KasPublicKey object
KasPublicKey kasPublicKey1 = KasPublicKey.newBuilder()
.setAlg(KasPublicKeyAlgEnum.KAS_PUBLIC_KEY_ALG_ENUM_EC_SECP256R1)
@@ -896,21 +899,23 @@ void testKeyCacheFromGrants() throws InterruptedException, ExecutionException {
.setUri("https://example.com/kas")
.build();
- AttributesServiceGrpc.AttributesServiceFutureStub attributeGrpcStub = mock(
- AttributesServiceGrpc.AttributesServiceFutureStub.class);
- lenient().when(attributeGrpcStub.getAttributeValuesByFqns(any(GetAttributeValuesByFqnsRequest.class)))
- .thenAnswer(
- invocation -> {
- GetAttributeValuesByFqnsResponse resp = getResponseWithGrants(
- (GetAttributeValuesByFqnsRequest) invocation.getArguments()[0], List.of(kas1));
- SettableFuture future = SettableFuture.create();
- future.set(resp); // Set the request as the future's result
- return future;
- });
+ AttributesServiceClient attributesServiceClient = mock(AttributesServiceClient.class);
+ when(attributesServiceClient.getAttributeValuesByFqnsBlocking(any(), any())).thenAnswer(invocation -> {
+ var request = (GetAttributeValuesByFqnsRequest)invocation.getArgument(0);
+ return new UnaryBlockingCall(){
+ @Override
+ public ResponseMessage execute() {
+ return new ResponseMessage.Success<>(getResponseWithGrants(request, List.of(kas1)), Collections.emptyMap(), Collections.emptyMap());
+ }
+ @Override public void cancel() {
+ // not really calling anything
+ }
+ };
+ });
KASKeyCache keyCache = new KASKeyCache();
- Granter reasoner = Autoconfigure.newGranterFromService(attributeGrpcStub, keyCache,
+ Granter reasoner = Autoconfigure.newGranterFromService(attributesServiceClient, keyCache,
List.of(clsS, rel2gbr, rel2usa, n2kHCS, n2kSI).toArray(new AttributeValueFQN[0]));
assertThat(reasoner).isNotNull();
@@ -928,7 +933,6 @@ void testKeyCacheFromGrants() throws InterruptedException, ExecutionException {
assertEquals("test-kid-2", storedKASInfo2.KID);
assertEquals("rsa:2048", storedKASInfo2.Algorithm);
assertEquals("public-key-pem-2", storedKASInfo2.PublicKey);
-
}
}
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/FakeServices.java b/sdk/src/test/java/io/opentdf/platform/sdk/FakeServices.java
index dca1e6e1..b3573593 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/FakeServices.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/FakeServices.java
@@ -1,31 +1,31 @@
package io.opentdf.platform.sdk;
-import io.opentdf.platform.authorization.AuthorizationServiceGrpc;
-import io.opentdf.platform.policy.attributes.AttributesServiceGrpc;
-import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceGrpc;
-import io.opentdf.platform.policy.namespaces.NamespaceServiceGrpc;
-import io.opentdf.platform.policy.resourcemapping.ResourceMappingServiceGrpc;
-import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceGrpc;
+import io.opentdf.platform.authorization.AuthorizationServiceClient;
+import io.opentdf.platform.policy.attributes.AttributesServiceClient;
+import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceClient;
+import io.opentdf.platform.policy.namespaces.NamespaceServiceClient;
+import io.opentdf.platform.policy.resourcemapping.ResourceMappingServiceClient;
+import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceClient;
import java.util.Objects;
public class FakeServices implements SDK.Services {
- private final AuthorizationServiceGrpc.AuthorizationServiceFutureStub authorizationService;
- private final AttributesServiceGrpc.AttributesServiceFutureStub attributesService;
- private final NamespaceServiceGrpc.NamespaceServiceFutureStub namespaceService;
- private final SubjectMappingServiceGrpc.SubjectMappingServiceFutureStub subjectMappingService;
- private final ResourceMappingServiceGrpc.ResourceMappingServiceFutureStub resourceMappingService;
- private final KeyAccessServerRegistryServiceGrpc.KeyAccessServerRegistryServiceFutureStub keyAccessServerRegistryServiceFutureStub;
+ private final AuthorizationServiceClient authorizationService;
+ private final AttributesServiceClient attributesService;
+ private final NamespaceServiceClient namespaceService;
+ private final SubjectMappingServiceClient subjectMappingService;
+ private final ResourceMappingServiceClient resourceMappingService;
+ private final KeyAccessServerRegistryServiceClient keyAccessServerRegistryServiceFutureStub;
private final SDK.KAS kas;
public FakeServices(
- AuthorizationServiceGrpc.AuthorizationServiceFutureStub authorizationService,
- AttributesServiceGrpc.AttributesServiceFutureStub attributesService,
- NamespaceServiceGrpc.NamespaceServiceFutureStub namespaceService,
- SubjectMappingServiceGrpc.SubjectMappingServiceFutureStub subjectMappingService,
- ResourceMappingServiceGrpc.ResourceMappingServiceFutureStub resourceMappingService,
- KeyAccessServerRegistryServiceGrpc.KeyAccessServerRegistryServiceFutureStub keyAccessServerRegistryServiceFutureStub,
+ AuthorizationServiceClient authorizationService,
+ AttributesServiceClient attributesService,
+ NamespaceServiceClient namespaceService,
+ SubjectMappingServiceClient subjectMappingService,
+ ResourceMappingServiceClient resourceMappingService,
+ KeyAccessServerRegistryServiceClient keyAccessServerRegistryServiceFutureStub,
SDK.KAS kas) {
this.authorizationService = authorizationService;
this.attributesService = attributesService;
@@ -37,32 +37,32 @@ public FakeServices(
}
@Override
- public AuthorizationServiceGrpc.AuthorizationServiceFutureStub authorization() {
+ public AuthorizationServiceClient authorization() {
return Objects.requireNonNull(authorizationService);
}
@Override
- public AttributesServiceGrpc.AttributesServiceFutureStub attributes() {
+ public AttributesServiceClient attributes() {
return Objects.requireNonNull(attributesService);
}
@Override
- public NamespaceServiceGrpc.NamespaceServiceFutureStub namespaces() {
+ public NamespaceServiceClient namespaces() {
return Objects.requireNonNull(namespaceService);
}
@Override
- public SubjectMappingServiceGrpc.SubjectMappingServiceFutureStub subjectMappings() {
+ public SubjectMappingServiceClient subjectMappings() {
return Objects.requireNonNull(subjectMappingService);
}
@Override
- public ResourceMappingServiceGrpc.ResourceMappingServiceFutureStub resourceMappings() {
+ public ResourceMappingServiceClient resourceMappings() {
return Objects.requireNonNull(resourceMappingService);
}
@Override
- public KeyAccessServerRegistryServiceGrpc.KeyAccessServerRegistryServiceFutureStub kasRegistry() {
+ public KeyAccessServerRegistryServiceClient kasRegistry() {
return Objects.requireNonNull(keyAccessServerRegistryServiceFutureStub);
}
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/FakeServicesBuilder.java b/sdk/src/test/java/io/opentdf/platform/sdk/FakeServicesBuilder.java
index 31e33087..2a80f53d 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/FakeServicesBuilder.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/FakeServicesBuilder.java
@@ -1,47 +1,47 @@
package io.opentdf.platform.sdk;
-import io.opentdf.platform.authorization.AuthorizationServiceGrpc;
-import io.opentdf.platform.policy.attributes.AttributesServiceGrpc;
-import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceGrpc;
-import io.opentdf.platform.policy.namespaces.NamespaceServiceGrpc;
-import io.opentdf.platform.policy.resourcemapping.ResourceMappingServiceGrpc;
-import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceGrpc;
+import io.opentdf.platform.authorization.AuthorizationServiceClient;
+import io.opentdf.platform.policy.attributes.AttributesServiceClient;
+import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceClient;
+import io.opentdf.platform.policy.namespaces.NamespaceServiceClient;
+import io.opentdf.platform.policy.resourcemapping.ResourceMappingServiceClient;
+import io.opentdf.platform.policy.subjectmapping.SubjectMappingServiceClient;
public class FakeServicesBuilder {
- private AuthorizationServiceGrpc.AuthorizationServiceFutureStub authorizationService;
- private AttributesServiceGrpc.AttributesServiceFutureStub attributesService;
- private NamespaceServiceGrpc.NamespaceServiceFutureStub namespaceService;
- private SubjectMappingServiceGrpc.SubjectMappingServiceFutureStub subjectMappingService;
- private ResourceMappingServiceGrpc.ResourceMappingServiceFutureStub resourceMappingService;
- private KeyAccessServerRegistryServiceGrpc.KeyAccessServerRegistryServiceFutureStub keyAccessServerRegistryServiceFutureStub;
+ private AuthorizationServiceClient authorizationService;
+ private AttributesServiceClient attributesService;
+ private NamespaceServiceClient namespaceService;
+ private SubjectMappingServiceClient subjectMappingService;
+ private ResourceMappingServiceClient resourceMappingService;
+ private KeyAccessServerRegistryServiceClient keyAccessServerRegistryServiceFutureStub;
private SDK.KAS kas;
- public FakeServicesBuilder setAuthorizationService(AuthorizationServiceGrpc.AuthorizationServiceFutureStub authorizationService) {
+ public FakeServicesBuilder setAuthorizationService(AuthorizationServiceClient authorizationService) {
this.authorizationService = authorizationService;
return this;
}
- public FakeServicesBuilder setAttributesService(AttributesServiceGrpc.AttributesServiceFutureStub attributesService) {
+ public FakeServicesBuilder setAttributesService(AttributesServiceClient attributesService) {
this.attributesService = attributesService;
return this;
}
- public FakeServicesBuilder setNamespaceService(NamespaceServiceGrpc.NamespaceServiceFutureStub namespaceService) {
+ public FakeServicesBuilder setNamespaceService(NamespaceServiceClient namespaceService) {
this.namespaceService = namespaceService;
return this;
}
- public FakeServicesBuilder setSubjectMappingService(SubjectMappingServiceGrpc.SubjectMappingServiceFutureStub subjectMappingService) {
+ public FakeServicesBuilder setSubjectMappingService(SubjectMappingServiceClient subjectMappingService) {
this.subjectMappingService = subjectMappingService;
return this;
}
- public FakeServicesBuilder setResourceMappingService(ResourceMappingServiceGrpc.ResourceMappingServiceFutureStub resourceMappingService) {
+ public FakeServicesBuilder setResourceMappingService(ResourceMappingServiceClient resourceMappingService) {
this.resourceMappingService = resourceMappingService;
return this;
}
- public FakeServicesBuilder setKeyAccessServerRegistryService(KeyAccessServerRegistryServiceGrpc.KeyAccessServerRegistryServiceFutureStub keyAccessServerRegistryServiceFutureStub) {
+ public FakeServicesBuilder setKeyAccessServerRegistryService(KeyAccessServerRegistryServiceClient keyAccessServerRegistryServiceFutureStub) {
this.keyAccessServerRegistryServiceFutureStub = keyAccessServerRegistryServiceFutureStub;
return this;
}
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/KASClientTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/KASClientTest.java
index 496f4708..a6af98ff 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/KASClientTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/KASClientTest.java
@@ -1,5 +1,11 @@
package io.opentdf.platform.sdk;
+import com.connectrpc.ProtocolClientConfig;
+import com.connectrpc.extensions.GoogleJavaProtobufStrategy;
+import com.connectrpc.impl.ProtocolClient;
+import com.connectrpc.okhttp.ConnectOkHttpClient;
+import com.connectrpc.protocols.GETConfiguration;
+import com.connectrpc.protocols.NetworkProtocol;
import com.google.gson.Gson;
import com.google.protobuf.ByteString;
import com.nimbusds.jose.JOSEException;
@@ -7,8 +13,6 @@
import com.nimbusds.jose.crypto.RSASSAVerifier;
import com.nimbusds.jose.jwk.RSAKey;
import com.nimbusds.jwt.SignedJWT;
-import io.grpc.ManagedChannel;
-import io.grpc.ManagedChannelBuilder;
import io.grpc.Server;
import io.grpc.ServerBuilder;
import io.grpc.stub.StreamObserver;
@@ -17,26 +21,34 @@
import io.opentdf.platform.kas.PublicKeyResponse;
import io.opentdf.platform.kas.RewrapRequest;
import io.opentdf.platform.kas.RewrapResponse;
+import okhttp3.OkHttpClient;
+import okhttp3.Protocol;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.security.interfaces.RSAPublicKey;
import java.text.ParseException;
import java.util.Base64;
+import java.util.List;
import java.util.Random;
import java.util.concurrent.atomic.AtomicReference;
-import java.util.function.Function;
+import java.util.function.BiFunction;
import static io.opentdf.platform.sdk.SDKBuilderTest.getRandomPort;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
public class KASClientTest {
-
- private static final Function channelFactory = (String url) -> ManagedChannelBuilder
- .forTarget(url)
- .usePlaintext()
+ OkHttpClient httpClient = new OkHttpClient.Builder()
+ .protocols(List.of(Protocol.H2_PRIOR_KNOWLEDGE))
.build();
+ BiFunction aclientFactory = (OkHttpClient client, String endpoint) -> {
+ return new ProtocolClient(
+ new ConnectOkHttpClient(httpClient),
+ new ProtocolClientConfig(endpoint, new GoogleJavaProtobufStrategy(), NetworkProtocol.GRPC, null, GETConfiguration.Enabled.INSTANCE)
+ );
+ };
+
@Test
void testGettingPublicKey() throws IOException {
AccessServiceGrpc.AccessServiceImplBase accessService = new AccessServiceGrpc.AccessServiceImplBase() {
@@ -51,17 +63,14 @@ public void publicKey(PublicKeyRequest request, StreamObserver channelFactory = (String url) -> ManagedChannelBuilder
- .forTarget(url)
- .usePlaintext()
- .build();
+
var keypair = CryptoUtils.generateRSAKeypair();
var dpopKey = new RSAKey.Builder((RSAPublicKey) keypair.getPublic()).privateKey(keypair.getPrivate())
.build();
- try (var kas = new KASClient(channelFactory, dpopKey)) {
+ try (var kas = new KASClient(httpClient, aclientFactory, dpopKey, true)) {
Config.KASInfo kasInfo = new Config.KASInfo();
- kasInfo.URL = "localhost:" + rewrapServer.getPort();
+ kasInfo.URL = "http://localhost:" + rewrapServer.getPort();
assertThat(kas.getPublicKey(kasInfo).PublicKey).isEqualTo("тај је клуц");
}
} finally {
@@ -85,18 +94,16 @@ public void publicKey(PublicKeyRequest request, StreamObserver channelFactory = (String url) -> ManagedChannelBuilder
- .forTarget(url)
- .usePlaintext()
- .build();
var keypair = CryptoUtils.generateRSAKeypair();
var dpopKey = new RSAKey.Builder((RSAPublicKey) keypair.getPublic()).privateKey(keypair.getPrivate())
.build();
- try (var kas = new KASClient(channelFactory, dpopKey)) {
+ try (var kas = new KASClient(httpClient, aclientFactory, dpopKey, true)) {
Config.KASInfo kasInfo = new Config.KASInfo();
- kasInfo.URL = "localhost:" + server.getPort();
+ kasInfo.URL = "http://localhost:" + server.getPort();
assertThat(kas.getPublicKey(kasInfo).KID).isEqualTo("r1");
+ } catch (Exception e) {
+ throw e;
}
} finally {
if (server != null) {
@@ -156,10 +163,10 @@ public void rewrap(RewrapRequest request, StreamObserver respons
rewrapServer = startServer(accessService);
byte[] plaintextKey;
byte[] rewrapResponse;
- try (var kas = new KASClient(channelFactory, dpopKey)) {
+ try (var kas = new KASClient(httpClient, aclientFactory, dpopKey, true)) {
Manifest.KeyAccess keyAccess = new Manifest.KeyAccess();
- keyAccess.url = "localhost:" + rewrapServer.getPort();
+ keyAccess.url = "http://localhost:" + rewrapServer.getPort();
plaintextKey = new byte[32];
new Random().nextBytes(plaintextKey);
var serverWrappedKey = new AsymEncryption(serverKeypair.getPublic()).encrypt(plaintextKey);
@@ -176,27 +183,39 @@ public void rewrap(RewrapRequest request, StreamObserver respons
}
@Test
- public void testAddressNormalization() {
+ void testAddressNormalizationWithHTTPSClient() {
var lastAddress = new AtomicReference();
var dpopKeypair = CryptoUtils.generateRSAKeypair();
var dpopKey = new RSAKey.Builder((RSAPublicKey) dpopKeypair.getPublic()).privateKey(dpopKeypair.getPrivate())
.build();
- var kasClient = new KASClient(addr -> {
+ var httpsKASClient = new KASClient(httpClient, (client, addr) -> {
lastAddress.set(addr);
- return ManagedChannelBuilder.forTarget(addr).build();
- }, dpopKey);
+ return aclientFactory.apply(client, addr);
+ }, dpopKey, false);
- var stub = kasClient.getStub("http://localhost:8080");
- assertThat(lastAddress.get()).isEqualTo("localhost:8080");
- var otherStub = kasClient.getStub("https://localhost:8080");
- assertThat(lastAddress.get()).isEqualTo("localhost:8080");
+ var stub = httpsKASClient.getStub("http://localhost:8080");
+ assertThat(lastAddress.get()).isEqualTo("https://localhost:8080");
+ var otherStub = httpsKASClient.getStub("https://localhost:8080");
+ assertThat(lastAddress.get()).isEqualTo("https://localhost:8080");
assertThat(stub).isSameAs(otherStub);
+ }
- kasClient.getStub("https://example.org");
- assertThat(lastAddress.get()).isEqualTo("example.org:443");
-
- kasClient.getStub("http://example.org");
- assertThat(lastAddress.get()).isEqualTo("example.org:80");
+ @Test
+ void testAddressNormalizationWithInsecureHTTPClient() {
+ var lastAddress = new AtomicReference();
+ var dpopKeypair = CryptoUtils.generateRSAKeypair();
+ var dpopKey = new RSAKey.Builder((RSAPublicKey) dpopKeypair.getPublic()).privateKey(dpopKeypair.getPrivate())
+ .build();
+ var httpsKASClient = new KASClient(httpClient, (client, addr) -> {
+ lastAddress.set(addr);
+ return aclientFactory.apply(client, addr);
+ }, dpopKey, true);
+
+ var c1 = httpsKASClient.getStub("http://example.org");
+ assertThat(lastAddress.get()).isEqualTo("http://example.org:80");
+ var c2 = httpsKASClient.getStub("example.org:80");
+ assertThat(lastAddress.get()).isEqualTo("http://example.org:80");
+ assertThat(c1).isSameAs(c2);
}
private static Server startServer(AccessServiceGrpc.AccessServiceImplBase accessService) throws IOException {
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java
index 2eeb4af3..d8e6e397 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/NanoTDFTest.java
@@ -1,7 +1,9 @@
package io.opentdf.platform.sdk;
+import com.connectrpc.ResponseMessage;
+import com.connectrpc.UnaryBlockingCall;
import io.opentdf.platform.policy.KeyAccessServer;
-import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceGrpc.KeyAccessServerRegistryServiceFutureStub;
+import io.opentdf.platform.policy.kasregistry.KeyAccessServerRegistryServiceClient;
import io.opentdf.platform.policy.kasregistry.ListKeyAccessServersRequest;
import io.opentdf.platform.policy.kasregistry.ListKeyAccessServersResponse;
import io.opentdf.platform.sdk.Config.KASInfo;
@@ -14,14 +16,13 @@
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
import java.nio.ByteBuffer;
import java.security.KeyPair;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Base64;
+import java.util.Collections;
import java.util.List;
import java.util.Random;
@@ -47,7 +48,7 @@ public class NanoTDFTest {
private static final String KID = "r1";
- protected static KeyAccessServerRegistryServiceFutureStub kasRegistryService;
+ protected static KeyAccessServerRegistryServiceClient kasRegistryService;
protected static List registeredKases = List.of(
"https://api.example.com/kas",
"https://other.org/kas2",
@@ -124,7 +125,7 @@ public KASKeyCache getKeyCache(){
@BeforeAll
static void setupMocks() {
- kasRegistryService = mock(KeyAccessServerRegistryServiceFutureStub.class);
+ kasRegistryService = mock(KeyAccessServerRegistryServiceClient.class);
List kasRegEntries = new ArrayList<>();
for (String kasUrl : registeredKases ) {
kasRegEntries.add(KeyAccessServer.newBuilder()
@@ -135,11 +136,18 @@ static void setupMocks() {
.build();
// Stub the listKeyAccessServers method
- when(kasRegistryService.listKeyAccessServers(any(ListKeyAccessServersRequest.class)))
- .thenReturn(com.google.common.util.concurrent.Futures.immediateFuture(mockResponse));
- io.grpc.Channel mockChannel = mock(io.grpc.Channel.class);
- when(mockChannel.authority()).thenReturn("mock:8080");
- when(kasRegistryService.getChannel()).thenReturn(mockChannel);
+ when(kasRegistryService.listKeyAccessServersBlocking(any(ListKeyAccessServersRequest.class), any()))
+ .thenReturn(new UnaryBlockingCall<>() {
+ @Override
+ public ResponseMessage execute() {
+ return new ResponseMessage.Success<>(mockResponse, Collections.emptyMap(), Collections.emptyMap());
+ }
+
+ @Override
+ public void cancel() {
+ // this never happens in tests
+ }
+ });
}
private static ArrayList keypairs = new ArrayList<>();
@@ -194,7 +202,7 @@ void encryptionAndDecryptionWithValidKey() throws Exception {
}
}
- void runBasicTest(String kasUrl, boolean allowed, KeyAccessServerRegistryServiceFutureStub kasReg, NanoTDFReaderConfig decryptConfig) throws Exception {
+ void runBasicTest(String kasUrl, boolean allowed, KeyAccessServerRegistryServiceClient kasReg, NanoTDFReaderConfig decryptConfig) throws Exception {
var kasInfos = new ArrayList<>();
var kasInfo = new Config.KASInfo();
kasInfo.URL = kasUrl;
diff --git a/sdk/src/test/java/io/opentdf/platform/sdk/SDKBuilderTest.java b/sdk/src/test/java/io/opentdf/platform/sdk/SDKBuilderTest.java
index 1f1af97e..f8262da6 100644
--- a/sdk/src/test/java/io/opentdf/platform/sdk/SDKBuilderTest.java
+++ b/sdk/src/test/java/io/opentdf/platform/sdk/SDKBuilderTest.java
@@ -1,5 +1,6 @@
package io.opentdf.platform.sdk;
+import com.connectrpc.ResponseMessageKt;
import com.google.protobuf.Struct;
import com.google.protobuf.Value;
import io.grpc.Metadata;
@@ -29,7 +30,10 @@
import org.apache.commons.io.IOUtils;
import org.junit.jupiter.api.Test;
-import java.io.*;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.nio.charset.StandardCharsets;
@@ -39,7 +43,7 @@
import java.security.cert.X509Certificate;
import java.util.Arrays;
import java.util.Base64;
-import java.util.concurrent.ExecutionException;
+import java.util.Collections;
import java.util.concurrent.atomic.AtomicReference;
import static org.assertj.core.api.Assertions.assertThat;
@@ -153,7 +157,7 @@ void sdkServicesSetup(boolean useSSLPlatform, boolean useSSLIDP) throws Exceptio
WellKnownServiceGrpc.WellKnownServiceImplBase wellKnownService = new WellKnownServiceGrpc.WellKnownServiceImplBase() {
@Override
public void getWellKnownConfiguration(GetWellKnownConfigurationRequest request,
- StreamObserver responseObserver) {
+ StreamObserver responseObserver) {
var val = Value.newBuilder().setStringValue(issuer).build();
var config = Struct.newBuilder().putFields("platform_issuer", val).build();
var response = GetWellKnownConfigurationResponse
@@ -183,6 +187,15 @@ public void getWellKnownConfiguration(GetWellKnownConfigurationRequest request,
.directExecutor()
.addService(wellKnownService)
.addService(new NamespaceServiceGrpc.NamespaceServiceImplBase() {
+ @Override
+ public void getNamespace(GetNamespaceRequest request,
+ StreamObserver responseObserver) {
+ var response = GetNamespaceResponse
+ .newBuilder()
+ .build();
+ responseObserver.onNext(response);
+ responseObserver.onCompleted();
+ }
})
.intercept(new ServerInterceptor() {
@Override
@@ -236,7 +249,7 @@ public ServerCall.Listener interceptCall(ServerCall ServerCall.Listener interceptCall(ServerCall ServerCall.Listener interceptCall(ServerCall kasRegEntries = new ArrayList<>();
for (Config.KASInfo kasInfo : getRSAKASInfos()) {
kasRegEntries.add(KeyAccessServer.newBuilder()
@@ -146,11 +145,18 @@ static void setupKeyPairsAndMocks() {
.build();
// Stub the listKeyAccessServers method
- when(kasRegistryService.listKeyAccessServers(any(ListKeyAccessServersRequest.class)))
- .thenReturn(com.google.common.util.concurrent.Futures.immediateFuture(mockResponse));
- io.grpc.Channel mockChannel = mock(io.grpc.Channel.class);
- when(mockChannel.authority()).thenReturn("mock:8080");
- when(kasRegistryService.getChannel()).thenReturn(mockChannel);
+ when(kasRegistryService.listKeyAccessServersBlocking(any(ListKeyAccessServersRequest.class), any()))
+ .thenReturn(new UnaryBlockingCall<>() {
+ @Override
+ public ResponseMessage execute() {
+ return new ResponseMessage.Success<>(mockResponse, Collections.emptyMap(), Collections.emptyMap());
+ }
+
+ @Override
+ public void cancel() {
+ // this never happens in tests
+ }
+ });
}
@Test
@@ -247,7 +253,7 @@ void testSimpleTDFWithAssertionWithRS256() throws Exception {
keypair.getPrivate());
var rsaKasInfo = new Config.KASInfo();
- rsaKasInfo.URL = "https://example.com/kas"+Integer.toString(0);
+ rsaKasInfo.URL = "https://example.com/kas"+ 0;
Config.TDFConfig config = Config.newTDFConfig(
Config.withAutoconfigure(false),
@@ -540,7 +546,7 @@ public void testCreateTDFWithMimeType() throws Exception {
}
@Test
- public void legacyTDFRoundTrips() throws IOException, NoSuchAlgorithmException {
+ void legacyTDFRoundTrips() throws IOException, NoSuchAlgorithmException {
final String mimeType = "application/pdf";
var assertionConfig1 = new AssertionConfig();
assertionConfig1.id = "assertion1";
@@ -603,7 +609,7 @@ public void legacyTDFRoundTrips() throws IOException, NoSuchAlgorithmException {
@Test
void testKasAllowlist() throws Exception {
- KeyAccessServerRegistryServiceFutureStub kasRegistryServiceNoUrl = mock(KeyAccessServerRegistryServiceFutureStub.class);
+ KeyAccessServerRegistryServiceClient kasRegistryServiceNoUrl = mock(KeyAccessServerRegistryServiceClient.class);
List kasRegEntries = new ArrayList<>();
kasRegEntries.add(KeyAccessServer.newBuilder()
.setUri("http://example.com/kas0").build());
@@ -613,12 +619,19 @@ void testKasAllowlist() throws Exception {
.build();
// Stub the listKeyAccessServers method
- when(kasRegistryServiceNoUrl.listKeyAccessServers(any(ListKeyAccessServersRequest.class)))
- .thenReturn(com.google.common.util.concurrent.Futures.immediateFuture(mockResponse));
- io.grpc.Channel mockChannel = mock(io.grpc.Channel.class);
- when(mockChannel.authority()).thenReturn("mock:8080");
- when(kasRegistryServiceNoUrl.getChannel()).thenReturn(mockChannel);
-
+ when(kasRegistryServiceNoUrl.listKeyAccessServersBlocking(any(ListKeyAccessServersRequest.class), any()))
+ .thenReturn(new UnaryBlockingCall<>() {
+ @Override
+ public ResponseMessage execute() {
+ return new ResponseMessage.Success<>(mockResponse, Collections.emptyMap(), Collections.emptyMap());
+ }
+
+ @Override
+ public void cancel() {
+ // we never do this during tests
+ }
+ }
+ );
var rsaKasInfo = new Config.KASInfo();
rsaKasInfo.URL = "https://example.com/kas"+Integer.toString(0);