Skip to content

Commit

Permalink
Merge branch 'main' into update/main/protobuf
Browse files Browse the repository at this point in the history
  • Loading branch information
yshyn-iohk authored May 21, 2024
2 parents 7ee7790 + c5df2a8 commit ab6df69
Show file tree
Hide file tree
Showing 44 changed files with 227 additions and 477 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,7 @@ package object error {
final case class TooManyDidPublicKeyAccess(limit: Int, access: Option[Int]) extends OperationValidationError
final case class TooManyDidServiceAccess(limit: Int, access: Option[Int]) extends OperationValidationError
final case class InvalidArgument(msg: String) extends OperationValidationError
final case class InvalidPublicKeyData(ids: Seq[String]) extends OperationValidationError
final case class InvalidMasterKeyType(ids: Seq[String]) extends OperationValidationError
final case class InvalidMasterKeyData(ids: Seq[String]) extends OperationValidationError
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import org.hyperledger.identus.shared.crypto.Apollo
import zio.*

import scala.collection.immutable.ArraySeq
import scala.util.Failure

object DIDOperationValidator {
final case class Config(
Expand Down Expand Up @@ -51,7 +50,6 @@ private object CreateOperationValidator extends BaseOperationValidator {
_ <- validateUniquePublicKeyId(operation, extractKeyIds)
_ <- validateUniqueServiceId(operation, extractServiceIds)
_ <- validateMasterKeyIsSecp256k1(operation, extractKeyData)
_ <- validateKeyData(operation, extractKeyData)
_ <- validateKeyIdIsUriFragment(operation, extractKeyIds)
_ <- validateKeyIdLength(config)(operation, extractKeyIds)
_ <- validateServiceIdIsUriFragment(operation, extractServiceIds)
Expand Down Expand Up @@ -102,7 +100,6 @@ private object UpdateOperationValidator extends BaseOperationValidator {
_ <- validateMaxPublicKeysAccess(config)(operation, extractKeyIds)
_ <- validateMaxServiceAccess(config)(operation, extractServiceIds)
_ <- validateMasterKeyIsSecp256k1(operation, extractKeyData)
_ <- validateKeyData(operation, extractKeyData)
_ <- validateKeyIdIsUriFragment(operation, extractKeyIds)
_ <- validateKeyIdLength(config)(operation, extractKeyIds)
_ <- validateServiceIdIsUriFragment(operation, extractServiceIds)
Expand Down Expand Up @@ -360,45 +357,26 @@ private trait BaseOperationValidator {
UriUtils.normalizeUri(uri).contains(uri)
}

protected def validateKeyData[T <: PrismDIDOperation](
operation: T,
keyDataExtractor: KeyDataExtractor[T]
): Either[OperationValidationError, Unit] = {
val keys = keyDataExtractor(operation)
val apollo = Apollo.default
val parsedKeys = keys.map { case (id, _, keyData) =>
val pk = keyData match {
case PublicKeyData.ECKeyData(EllipticCurve.SECP256K1, x, y) =>
apollo.secp256k1.publicKeyFromCoordinate(x.toByteArray, y.toByteArray)
case PublicKeyData.ECKeyData(EllipticCurve.ED25519, x, _) =>
apollo.ed25519.publicKeyFromEncoded(x.toByteArray)
case PublicKeyData.ECKeyData(EllipticCurve.X25519, x, _) =>
apollo.x25519.publicKeyFromEncoded(x.toByteArray)
case PublicKeyData.ECCompressedKeyData(EllipticCurve.SECP256K1, data) =>
apollo.secp256k1.publicKeyFromEncoded(data.toByteArray)
case PublicKeyData.ECCompressedKeyData(EllipticCurve.ED25519, data) =>
apollo.ed25519.publicKeyFromEncoded(data.toByteArray)
case PublicKeyData.ECCompressedKeyData(EllipticCurve.X25519, data) =>
apollo.x25519.publicKeyFromEncoded(data.toByteArray)
}
id -> pk
}

val invalidKeyDataIds = parsedKeys.collect { case (id, Failure(_)) => id }
if (invalidKeyDataIds.isEmpty) Right(())
else Left(OperationValidationError.InvalidPublicKeyData(invalidKeyDataIds))
}

protected def validateMasterKeyIsSecp256k1[T <: PrismDIDOperation](
operation: T,
keyDataExtractor: KeyDataExtractor[T]
): Either[OperationValidationError, Unit] = {
val keys = keyDataExtractor(operation)
val masterKeys = keys.collect { case (id, InternalKeyPurpose.Master, keyData) => id -> keyData }
val invalidKeyIds = masterKeys.filter(_._2.crv != EllipticCurve.SECP256K1).map(_._1)
val invalidKeyIds = masterKeys
.filter { case (_, pk) =>
pk match {
case PublicKeyData.ECKeyData(EllipticCurve.SECP256K1, x, y) =>
Apollo.default.secp256k1.publicKeyFromCoordinate(x.toByteArray, y.toByteArray).isFailure
case PublicKeyData.ECCompressedKeyData(EllipticCurve.SECP256K1, data) =>
Apollo.default.secp256k1.publicKeyFromEncoded(data.toByteArray).isFailure
case _ => true // master key must be secp256k1
}
}
.map(_._1)

if (invalidKeyIds.isEmpty) Right(())
else Left(OperationValidationError.InvalidMasterKeyType(invalidKeyIds))
else Left(OperationValidationError.InvalidMasterKeyData(invalidKeyIds))
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -340,24 +340,6 @@ object DIDOperationValidatorSpec extends ZIOSpecDefault {
invalidArgumentContainsString("service id is invalid: [Wrong service]")
)
},
test("reject CreateOperation when publicKeyData is invalid") {
val op = createPrismDIDOperation(
publicKeys = Seq(
PublicKey(
id = "key-0",
purpose = VerificationRelationship.Authentication,
publicKeyData = PublicKeyData.ECKeyData(
crv = EllipticCurve.SECP256K1,
x = Base64UrlString.fromStringUnsafe("00"),
y = Base64UrlString.fromStringUnsafe("00")
)
)
)
)
assert(DIDOperationValidator(Config.default).validate(op))(
isLeft(equalTo(OperationValidationError.InvalidPublicKeyData(Seq("key-0"))))
)
},
test("reject CreateOperation when master key is not a secp256k1 key") {
val op = createPrismDIDOperation(
internalKeys = Seq(
Expand All @@ -369,11 +351,20 @@ object DIDOperationValidatorSpec extends ZIOSpecDefault {
x = Base64UrlString.fromStringUnsafe("11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo"),
y = Base64UrlString.fromStringUnsafe("")
)
),
InternalPublicKey(
id = "master1",
purpose = InternalKeyPurpose.Master,
publicKeyData = PublicKeyData.ECKeyData(
crv = EllipticCurve.SECP256K1,
x = Base64UrlString.fromStringUnsafe("11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo"),
y = Base64UrlString.fromStringUnsafe("")
)
)
)
)
assert(DIDOperationValidator(Config.default).validate(op))(
isLeft(equalTo(OperationValidationError.InvalidMasterKeyType(Seq("master0"))))
isLeft(equalTo(OperationValidationError.InvalidMasterKeyData(Seq("master0", "master1"))))
)
}
).provideLayer(testLayer)
Expand Down Expand Up @@ -597,38 +588,32 @@ object DIDOperationValidatorSpec extends ZIOSpecDefault {
invalidArgumentContainsString("must not have both 'type' and 'serviceEndpoints' empty")
)
},
test("reject UpdateOperation publicKeyData is invalid") {
val action = UpdateDIDAction.AddKey(
PublicKey(
id = "key0",
purpose = VerificationRelationship.Authentication,
test("reject UpdateOperation when master key is not a secp256k1 key") {
val action1 = UpdateDIDAction.AddInternalKey(
InternalPublicKey(
id = "master0",
purpose = InternalKeyPurpose.Master,
publicKeyData = PublicKeyData.ECKeyData(
crv = EllipticCurve.SECP256K1,
x = Base64UrlString.fromStringUnsafe("00"),
y = Base64UrlString.fromStringUnsafe("00")
crv = EllipticCurve.ED25519,
x = Base64UrlString.fromStringUnsafe("11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo"),
y = Base64UrlString.fromStringUnsafe("")
)
)
)
val op = updatePrismDIDOperation(Seq(action))
assert(DIDOperationValidator(Config.default).validate(op))(
isLeft(equalTo(OperationValidationError.InvalidPublicKeyData(Seq("key0"))))
)
},
test("reject UpdateOperation when master key is not a secp256k1 key") {
val action = UpdateDIDAction.AddInternalKey(
val action2 = UpdateDIDAction.AddInternalKey(
InternalPublicKey(
id = "master0",
id = "master1",
purpose = InternalKeyPurpose.Master,
publicKeyData = PublicKeyData.ECKeyData(
crv = EllipticCurve.ED25519,
crv = EllipticCurve.SECP256K1,
x = Base64UrlString.fromStringUnsafe("11qYAYKxCrfVS_7TyWQHOg7hcvPapiMlrwIaaPcHURo"),
y = Base64UrlString.fromStringUnsafe("")
)
)
)
val op = updatePrismDIDOperation(Seq(action))
val op = updatePrismDIDOperation(Seq(action1, action2))
assert(DIDOperationValidator(Config.default).validate(op))(
isLeft(equalTo(OperationValidationError.InvalidMasterKeyType(Seq("master0"))))
isLeft(equalTo(OperationValidationError.InvalidMasterKeyData(Seq("master0", "master1"))))
)
}
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,13 +203,10 @@ final case class ManagedDIDKeyTemplate(
@description(ManagedDIDKeyTemplate.annotations.purpose.description)
@encodedExample(ManagedDIDKeyTemplate.annotations.purpose.example)
purpose: Purpose,
// @description(ManagedDIDKeyTemplate.annotations.curve.description)
// @encodedExample(ManagedDIDKeyTemplate.annotations.curve.example)
// curve: Option[Curve]
) {
// TODO: this curve option is hidden for now, to be added back after integration test with node
def curve: Option[Curve] = None
}
@description(ManagedDIDKeyTemplate.annotations.curve.description)
@encodedExample(ManagedDIDKeyTemplate.annotations.curve.example)
curve: Option[Curve]
)

object ManagedDIDKeyTemplate {
object annotations {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,25 @@
Related ADR/AIP: [Introduce REST HTTP for existing Node services](https://input-output.atlassian.net/wiki/spaces/AV2/pages/3454500948/AIP+-+001)

## Context and Problem Statement
PRISM Platform v2 will contain the REST API. The decision was made by team consensus during the first AOH meeting to follow "OpenAPI specification first" approach and generate stubs, server side and client side code based on OAS.
Identus Platform will contain the REST API. The decision was made by team consensus during the first AOH meeting to follow "OpenAPI specification first" approach and generate stubs, server side and client side code based on OAS.
Following this strategy we currently have 4-5 OAS files (Castor, Pollux, Mercury, Configuration).

The following tool was selected for code generation: [OpenAPI Tools](https://github.com/OpenAPITools/openapi-generator)

Instead of using the yaml file as OpenAPI specification and openapi-generator for server and client stub generation - this ADR proposes to use [Tapir](https://tapir.softwaremill.com/en/latest/index.html) Scala library as DSL for OpenAPI specification, `interpret` the endpoint defitions as Scala server and client stub, generate the yaml file, and use openapi-generator for client stubs.

Technology stack that is going to be used in PRISM v2 backend: Scala 3 + ZIO ecosystem
Technology stack that is going to be used in the Identus platform backend: Scala 3 + ZIO ecosystem

Akka framework after version 2.6.x cannot be used because [Lightbend changed the license type to BSL 1.1](https://www.lightbend.com/blog/why-we-are-changing-the-license-for-akka).
Akka framework after version 2.6.x cannot be used because [Lightbend changed the license type to BSL 1.1](https://www.lightbend.com/blog/why-we-are-changing-the-license-for-akka).

Looks like Akka 2.6.x still can be used according to [License FQA](https://www.lightbend.com/akka/license-faq)

Currently, we have a code generation for Akka that is wrapped up into ZIO. Code generation mustache templates for ZIO-http are not available in OpenAPI tools.
Currently, we have a code generation for Akka that is wrapped up into ZIO. Code generation mustache templates for ZIO-http are not available in OpenAPI tools.

Mustache templates and code generation doesn't work out of the box, so the original templates where copied to the project and fixed by @Shota and @Pat.
Mustache templates and code generation doesn't work out of the box, so the original templates where copied to the project and fixed by @Shota and @Pat.
Current templates and generator contains constraints that were reported by [@Pat](https://docs.google.com/document/d/1WhUtflM_o-5uSx9LW76lycz2kbk071cVZiv6EtVwhAQ/edit#heading=h.ywcvgffenpz) and [@Shota](https://input-output-rnd.slack.com/archives/G018JE9NHAM/p1664563129397819), this requires engineering time to adopt the OAS for a code generation. @Ben says that we can live with these constraints

Generally, OAS files are written by the engineers with different experience and different view on formatting, schemas, normalization, datatype. For instance, in current templates don't have
Generally, OAS files are written by the engineers with different experience and different view on formatting, schemas, normalization, datatype. For instance, in current templates don't have
- a consistent way for paginating the entities
- standard Responses for 4xx and 5xx errors
- normalized data types (we use ```anyOf```, ```allOf```)
Expand Down Expand Up @@ -57,21 +57,21 @@ Quality and formatting of autogenerated code depend on the template (not all tem
- reuse endpoint definitions for creating server and client stubs in Scala
- align the server side of REST API with the current technology stack (ZIO + ecosystem)
- have a control over the codebase and data types
- reduce time-of-maintenance of the code (either OAS should be adapted for generator or mustache templates should be fixed)
- reduce time-of-maintenance of the code (either OAS should be adapted for generator or mustache templates should be fixed)
- functional way of implementation of non-functional requirement (metrics, tracing, logging)
- straight forward generation of Swagger UI, Redoc documentation and Async API documentation based on endpoint definitions

## Considered Options

- use OpenAPI tools (edit OAS manually, generate server stub for Akka and client stubs for any other languages)
- use OpenAPI tools (edit OAS manually, generate server stub for Akka and client stubs for any other languages)
- use OpenAPI tools, but generate code for other server-side library (Play, Finch, Lagom)
- use Tapir library (edit endpoint definitions as Scala code, reuse endpoint definitions for server stubs, generate OAS based on endpoint definitions, generate client stubs for any other language)

## Decision Outcome

Chosen option:"use Tapir library" till the end of the year, evaluate this solution in 2023

All endpoint definition are written in Tapir DSL.
All endpoint definition are written in Tapir DSL.

OpenAPI specification generated based on endpoint definition and is published as an artefact. (must be a part of CI)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,30 +1,30 @@
# Store private keys of Issuers inside prism-agent
# Store private keys of Issuers inside the cloud-agent

- Status: accepted
- Deciders: Benjamin Voiturier, Pat Losoponkul, Miloš Džepina, Shailesh Patil, Shota Jolbordi, Bart Suichies, Ezequiel Postan, Yurii Shynbuiev, David Poltorak
- Date: 2022-10-05

## Context and Problem Statement

While each holder has a wallet application on the phone (edge agent) to store private keys, contacts, and credentials, PRISM 2.0 will provide a custodial solution to Issuers and Verifiers. Thus they won't have their wallets or store/manage keys. There needs to be storage for the private keys of issuers and Verifiers on the PRISM side.
While each holder has a wallet application on the phone (edge agent) to store private keys, contacts, and credentials, Identus Cloud Agent will provide a custodial solution to Issuers and Verifiers. Thus they won't have their wallets or store/manage keys. There needs to be storage for the private keys of Issuers and Verifiers on the Cloud Agent side.


## Considered Options

- Having issuers store and manage their own keys on the edge wallet (prism 1.4 approach)
- Storing keys in a dedicated wallet application that is connected to prism-agent (cloud agent)
- Having prism-agent store and manage keys directly
- Having issuers store and manage their own keys on the edge wallet (Prism 1.4 approach)
- Storing keys in a dedicated wallet application that is connected to cloud agent
- Having cloud agent store and manage keys directly


## Decision Outcome

Chosen option: Option 3, because it is the simplest approach that satisfies the needs of providing the Issuer and Verifier with key storage while also not requiring them to manage their own keys. Option 3 was chosen instead of Option 2 because it achieves the same goal but does not require work on integrating another wallet application, so in short, it is simpler and faster to implement.
Chosen option: Option 3, because it is the simplest approach that satisfies the needs of providing the Issuer and Verifier with key storage while also not requiring them to manage their own keys. Option 3 was chosen instead of Option 2 because it achieves the same goal but does not require work on integrating another wallet application, so in short, it is simpler and faster to implement.

### Negative Consequences <!-- optional -->

While Option 3 is simpler to implement then Option 2 and provides basic functionality required to solve the problem emphasized in [ Context and Problem Statement](#context-and-problem-statement), it does not provide full functionality and security of widely used and well tested wallet application. Therefore this decision is considered to be temporary and made only in the interest of solving the problem as fast as possible.
While Option 3 is simpler to implement then Option 2 and provides basic functionality required to solve the problem emphasized in [Context and Problem Statement](#context-and-problem-statement), it does not provide full functionality and security of widely used and well tested wallet application. Therefore this decision is considered to be temporary and made only in the interest of solving the problem as fast as possible.


## Links

- [Recording of the meeting where decision was made](https://drive.google.com/file/d/120YyW2IEpl-F-6kF0V0Fau4bM7BbQ6mT/view?usp=sharing)
- [Recording of the meeting where decision was made](https://drive.google.com/file/d/120YyW2IEpl-F-6kF0V0Fau4bM7BbQ6mT/view?usp=sharing)
Loading

0 comments on commit ab6df69

Please sign in to comment.