From 0dc8d9e72c05fff7928f31ee89cb257c2d46d0c5 Mon Sep 17 00:00:00 2001 From: Ezequiel Postan Date: Tue, 16 Apr 2024 21:54:35 -0300 Subject: [PATCH] refactor: Delete legacy operation related to credentials (#831) * ATL-6668: Delete legacy operation related to credentials This commit deletes the IssueCredentialBatchOperation and RevokeCredentialsOperation. It does not delete node API, daos nor repositories related to credentials * ATL-6668: Delete more files related to VCs This commit deletes even more files related to VC operations. It also removes some dependencies on the old SDK * ATL-6668: Delete unused tables This commit deletes tables and indexes from VC legacy operations * ATL-6668: Fix formatting * ATL-6668: Address review comments This commit deletes references to legacy VC code in protobuf and sql definitions --- build.sbt | 19 +- node/src/main/protobuf/common_models.proto | 84 +++ node/src/main/protobuf/health.proto | 63 +++ node/src/main/protobuf/node_api.proto | 313 +++++++++++ node/src/main/protobuf/node_models.proto | 252 +++++++++ node/src/main/protobuf/status.proto | 47 ++ .../db/migration/V1__create_tables.sql | 52 -- .../io/iohk/atala/prism/node/NodeApp.scala | 3 - .../node/NodeExplorerGrpcServiceImpl.scala | 6 - .../prism/node/NodeGrpcServiceImpl.scala | 60 --- .../atala/prism/node/UnderlyingLedger.scala | 4 +- .../cardano/models/AtalaObjectMetadata.scala | 12 +- .../atala/prism/node/errors/PrismError.scala | 2 +- .../atala/prism/node/interop/implicits.scala | 8 - .../logging/GeneralLoggableInstances.scala | 13 - .../node/metrics/OperationsCounters.scala | 6 - .../node/metrics/StatisticsCounters.scala | 2 - .../prism/node/models/AtalaObjectId.scala | 4 +- .../prism/node/models/AtalaObjectInfo.scala | 14 +- .../atala/prism/node/models/package.scala | 30 +- .../IssueCredentialBatchOperation.scala | 130 ----- .../prism/node/operations/ParsingUtils.scala | 25 +- .../RevokeCredentialsOperation.scala | 153 ------ .../atala/prism/node/operations/package.scala | 16 +- .../node/operations/protocolVersion.scala | 4 +- .../CredentialBatchesRepository.scala | 94 ---- .../node/repositories/daos/BaseDAO.scala | 9 - .../daos/CredentialBatchesDAO.scala | 125 ----- .../node/repositories/daos/package.scala | 81 +-- .../CredentialBatchesRepositoryLogs.scala | 55 -- .../CredentialBatchesRepositoryMetrics.scala | 33 -- .../services/BlockProcessingService.scala | 6 +- .../node/services/CardanoLedgerService.scala | 4 +- .../node/services/InMemoryLedgerService.scala | 4 +- .../prism/node/services/NodeService.scala | 94 +--- .../services/ObjectManagementService.scala | 10 +- .../node/services/StatisticsService.scala | 12 +- .../node/services/SubmissionService.scala | 8 +- .../services/logs/NodeServiceLogging.scala | 28 +- .../services/logs/UnderlyingLedgerLogs.scala | 2 +- .../prism/node/services/models/package.scala | 30 +- .../atala/prism/node/utils/GrpcUtils.scala | 2 +- .../atala/prism/node/DataPreparation.scala | 76 +-- .../prism/node/NodeExplorerServiceSpec.scala | 19 +- .../atala/prism/node/NodeServiceSpec.scala | 463 +--------------- .../models/AtalaObjectMetadataSpec.scala | 10 +- .../node/metrics/OperationsCounterSpec.scala | 6 - .../node/models/AtalaObjectInfoSpec.scala | 6 +- .../prism/node/nonce/ClientHelperSpec.scala | 4 +- .../IssueCredentialBatchOperationSpec.scala | 327 ------------ .../RevokeCredentialsOperationSpec.scala | 498 ------------------ .../prism/node/poc/CredVerification.scala | 139 ----- .../atala/prism/node/poc/EncodedSizes.scala | 79 --- .../node/poc/GenericCredentialsSDK.scala | 45 -- .../io/iohk/atala/prism/node/poc/Wallet.scala | 237 --------- .../prism/node/poc/batch/Connector.scala | 32 -- .../atala/prism/node/poc/batch/FlowPoC.scala | 297 ----------- .../node/poc/batch/ManagementConsole.scala | 71 --- .../poc/estimations/CardanoFeeEstimator.scala | 348 ------------ .../CredentialBatchesRepositorySpec.scala | 278 ---------- ...aObjectTransactionSubmissionsDAOSpec.scala | 6 +- .../daos/AtalaObjectsDAOSpec.scala | 12 +- .../prism/node/repositories/package.scala | 5 - .../services/BlockProcessingServiceSpec.scala | 20 +- .../CardanoLedgerServiceIntegrationSpec.scala | 8 +- .../services/CardanoLedgerServiceSpec.scala | 12 +- .../ObjectManagementServiceSpec.scala | 8 +- .../node/services/SubmissionServiceSpec.scala | 14 +- .../prism/node/utils/GrpcUtilsSpec.scala | 13 +- .../prism/node/utils/NodeClientUtils.scala | 52 -- 70 files changed, 913 insertions(+), 4091 deletions(-) create mode 100644 node/src/main/protobuf/common_models.proto create mode 100644 node/src/main/protobuf/health.proto create mode 100644 node/src/main/protobuf/node_api.proto create mode 100644 node/src/main/protobuf/node_models.proto create mode 100644 node/src/main/protobuf/status.proto delete mode 100644 node/src/main/scala/io/iohk/atala/prism/node/operations/IssueCredentialBatchOperation.scala delete mode 100644 node/src/main/scala/io/iohk/atala/prism/node/operations/RevokeCredentialsOperation.scala delete mode 100644 node/src/main/scala/io/iohk/atala/prism/node/repositories/CredentialBatchesRepository.scala delete mode 100644 node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/CredentialBatchesDAO.scala delete mode 100644 node/src/main/scala/io/iohk/atala/prism/node/repositories/logs/CredentialBatchesRepositoryLogs.scala delete mode 100644 node/src/main/scala/io/iohk/atala/prism/node/repositories/metrics/CredentialBatchesRepositoryMetrics.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/operations/IssueCredentialBatchOperationSpec.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/operations/RevokeCredentialsOperationSpec.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/poc/CredVerification.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/poc/EncodedSizes.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/poc/GenericCredentialsSDK.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/poc/Wallet.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/poc/batch/Connector.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/poc/batch/FlowPoC.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/poc/batch/ManagementConsole.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/poc/estimations/CardanoFeeEstimator.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/repositories/CredentialBatchesRepositorySpec.scala delete mode 100644 node/src/test/scala/io/iohk/atala/prism/node/utils/NodeClientUtils.scala diff --git a/build.sbt b/build.sbt index ea61acbfd5..ceb4c8ffd4 100644 --- a/build.sbt +++ b/build.sbt @@ -108,19 +108,10 @@ lazy val Dependencies = new { // We have to exclude bouncycastle since for some reason bitcoinj depends on bouncycastle jdk15to18 // (i.e. JDK 1.5 to 1.8), but we are using JDK 11 - val prismCredentials = - "io.iohk.atala" % "prism-credentials-jvm" % versions.prismSdk excludeAll ExclusionRule( - organization = "org.bouncycastle" - ) - val prismProtos = - "io.iohk.atala" % "prism-protos-jvm" % versions.prismSdk % "protobuf-src" intransitive () - val vaultProtos = - "io.iohk.atala" % "vault-api-jvm" % versions.vaultSdk % "protobuf-src" intransitive () - // Can be used only in tests! - val prismApi = - "io.iohk.atala" % "prism-api-jvm" % versions.prismSdk % Test excludeAll ExclusionRule( - organization = "org.bouncycastle" - ) + val prismCrypto = + "io.iohk.atala" % "prism-crypto-jvm" % versions.prismSdk + val prismIdentity = + "io.iohk.atala" % "prism-identity-jvm" % versions.prismSdk // Test dependencies val catsScalatest = @@ -163,7 +154,7 @@ lazy val Dependencies = new { val sttpDependencies = Seq(sttpCore, sttpCE2) val tofuDependencies = Seq(tofu, tofuLogging, tofuDerevoTagless) val prismDependencies = - Seq(prismCredentials, prismProtos, prismApi, vaultProtos) + Seq(prismCrypto, prismIdentity) val scalapbDependencies = Seq( "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf", "com.thesamet.scalapb" %% "scalapb-runtime-grpc" % scalapb.compiler.Version.scalapbVersion diff --git a/node/src/main/protobuf/common_models.proto b/node/src/main/protobuf/common_models.proto new file mode 100644 index 0000000000..818b8755c8 --- /dev/null +++ b/node/src/main/protobuf/common_models.proto @@ -0,0 +1,84 @@ +syntax = "proto3"; + +option java_multiple_files = true; +option java_package = "io.iohk.atala.prism.protos"; + +package io.iohk.atala.prism.protos; + +import "status.proto"; +import "google/protobuf/timestamp.proto"; + +/** + * A request that can be used to check service health. + * All PRISM services expose an RPC that accepts this message as request. + */ +message HealthCheckRequest {} + +/** + * A response that represents service health. + * Status code 0 with empty response represents a healthy and reachable service, + * while all other status codes represent issues with the service. + */ +message HealthCheckResponse {} + +/** + * The supported ledger types. Specifies which chain is used for storing transactions. + */ +enum Ledger { + reserved 2; // Removed BITCOIN_TESTNET + reserved "BITCOIN_TESTNET"; + reserved 3; // Removed BITCOIN_MAINNET + reserved "BITCOIN_MAINNET"; + + UNKNOWN_LEDGER = 0; // Invalid default value. + IN_MEMORY = 1; // Store transactions in memory instead of blockchain, used only for development. + CARDANO_TESTNET = 4; // Cardano testnet, used for testing. + CARDANO_MAINNET = 5; // Cardano mainnet, used in production. +} + +/** + * Information about a ledger block. + * See Ledger documentation for details on which ledgers are possible. + */ +message BlockInfo { + reserved 2; // Removed timestamp_deprecated field + reserved "timestamp_deprecated"; + + int32 number = 1; // Number of the block in the ledger. + int32 index = 3; // Index of the transaction within the block. + google.protobuf.Timestamp timestamp = 4; // Timestamp when the block was created. +} + +/** + * Information about a ledger transaction and the block that the transaction is included in. + */ +message TransactionInfo { + string transaction_id = 1; // Transaction ID. + Ledger ledger = 2; // Ledger the transaction was published to. + BlockInfo block = 3; // Block the transaction was included in. +} + +/** + * The status of an Atala operation. + */ +enum OperationStatus { + UNKNOWN_OPERATION = 0; // The operation hasn't been received by the node service yet. + PENDING_SUBMISSION = 1; // The transaction containing this operation hasn't been published to the chain yet. + AWAIT_CONFIRMATION = 2; // The transaction containing this operation has been published to the chain, but hasn't been processed by PRISM yet. + CONFIRMED_AND_APPLIED = 3; // The operation has been successfully applied to the PRISM. + CONFIRMED_AND_REJECTED = 4; // The operation has been processed by PRISM, but rejected because of some error. +} + +message AtalaErrorMessage { + google.rpc.Status status = 1; +} + +message AtalaMessage { + oneof message { + AtalaErrorMessage atala_error_message = 9; + } +} + +message ConnectionsStatusRequest { + repeated string connection_tokens = 1; +} diff --git a/node/src/main/protobuf/health.proto b/node/src/main/protobuf/health.proto new file mode 100644 index 0000000000..53e6e03259 --- /dev/null +++ b/node/src/main/protobuf/health.proto @@ -0,0 +1,63 @@ +// Copyright 2015 The gRPC Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The canonical version of this proto can be found at +// https://github.com/grpc/grpc-proto/blob/master/grpc/health/v1/health.proto + +syntax = "proto3"; + +package grpc.health.v1; + +option csharp_namespace = "Grpc.Health.V1"; +option go_package = "google.golang.org/grpc/health/grpc_health_v1"; +//option java_multiple_files = true; +//option java_outer_classname = "HealthProto"; +//option java_package = "io.grpc.health.v1"; + +message HealthCheckRequest { + string service = 1; +} + +message HealthCheckResponse { + enum ServingStatus { + UNKNOWN = 0; + SERVING = 1; + NOT_SERVING = 2; + SERVICE_UNKNOWN = 3; // Used only by the Watch method. + } + ServingStatus status = 1; +} + +service Health { + // If the requested service is unknown, the call will fail with status + // NOT_FOUND. + rpc Check(HealthCheckRequest) returns (HealthCheckResponse); + + // Performs a watch for the serving status of the requested service. + // The server will immediately send back a message indicating the current + // serving status. It will then subsequently send a new message whenever + // the service's serving status changes. + // + // If the requested service is unknown when the call is received, the + // server will send a message setting the serving status to + // SERVICE_UNKNOWN but will *not* terminate the call. If at some + // future point, the serving status of the service becomes known, the + // server will send a new message with the service's serving status. + // + // If the call terminates with status UNIMPLEMENTED, then clients + // should assume this method is not supported and should not retry the + // call. If the call terminates with any other status (including OK), + // clients should retry the call with appropriate exponential backoff. + rpc Watch(HealthCheckRequest) returns (stream HealthCheckResponse); +} diff --git a/node/src/main/protobuf/node_api.proto b/node/src/main/protobuf/node_api.proto new file mode 100644 index 0000000000..6c966d8573 --- /dev/null +++ b/node/src/main/protobuf/node_api.proto @@ -0,0 +1,313 @@ +syntax = "proto3"; + +option java_multiple_files = true; +option java_package = "io.iohk.atala.prism.protos"; + +package io.iohk.atala.prism.protos; + +import "common_models.proto"; +import "node_models.proto"; + +import "google/protobuf/timestamp.proto"; + +/** + * Service for PRISM Node API. Provides a way to store, retrieve and update + * Decentralized identifiers (DIDs) into/from the underlying blockchain. + */ +service NodeService { + /** + * PUBLIC + * + * Sends a request that can be used to check service health. + * All PRISM services expose an RPC that accepts this message as request. + */ + rpc HealthCheck(HealthCheckRequest) returns (HealthCheckResponse) {} + + /** + * PUBLIC + * + * Retrieves a DID Document associated to a DID. + * + * Errors: + * - Invalid long form DID (INVALID_ARGUMENT) + * - DID format not supported (INVALID_ARGUMENT) + * - Invalid DID (INVALID_ARGUMENT) + * - Unknown DID (INVALID_ARGUMENT) + */ + rpc GetDidDocument(GetDidDocumentRequest) returns (GetDidDocumentResponse) {} + + /** + * PUBLIC + * + * Retrieves the Node version info. + */ + rpc GetNodeBuildInfo(GetNodeBuildInfoRequest) returns (GetNodeBuildInfoResponse) {} + + /** + * PUBLIC + * + * Retrieves the Node version info. + */ + rpc GetNodeNetworkProtocolInfo(GetNodeNetworkProtocolInfoRequest) returns (GetNodeNetworkProtocolInfoResponse) {} + + /** + * PUBLIC + * + * Retrieves the status of an operation. + */ + rpc GetOperationInfo(GetOperationInfoRequest) returns (GetOperationInfoResponse) {} + + /** + * PUBLIC + * + * Timestamp of the latest block processed by PRISM Node. + */ + rpc GetLastSyncedBlockTimestamp(GetLastSyncedBlockTimestampRequest) returns (GetLastSyncedBlockTimestampResponse) {} + + /** + * PUBLIC + * + * Schedules a list of operations for further publication. + */ + rpc ScheduleOperations(ScheduleOperationsRequest) returns (ScheduleOperationsResponse) {} +} + +service NodeExplorerService { + /** + * WHITELISTED_DID + * + * Return a list of scheduled but unconfirmed operations. + */ + rpc GetScheduledOperations(GetScheduledOperationsRequest) returns (GetScheduledOperationsResponse) {} + + /** + * WHITELISTED_DID + * + * Return a list of wallet transactions. + */ + rpc GetWalletTransactionsPaginated(GetWalletTransactionsRequest) returns (GetWalletTransactionsResponse) {} + + /** + * WHITELISTED_DID + * + * Return the Node Wallet Balance + */ + rpc GetWalletBalance(GetWalletBalanceRequest) returns (GetWalletBalanceResponse) {} + + /** + * WHITELISTED_DID + * + * Retrieves list of available metrics. + */ + rpc GetAvailableMetrics(GetAvailableMetricsRequest) returns (GetAvailableMetricsResponse) {} + + /** + * WHITELISTED_DID + * + * Get statistics + */ + rpc GetNodeStatistics(GetNodeStatisticsRequest) returns (GetNodeStatisticsResponse) {} +} + +/** + * Retrieve statistics from the Node. + */ +message GetNodeStatisticsRequest { + repeated string metrics = 1; +} + +/** + * Statistics from the Node. + */ +message GetNodeStatisticsResponse { + repeated double metrics = 1; +} + +/** + * Request to find metrics exposed by Node. + * See NodeService.GetAvailableMetrics for more information. + */ +message GetAvailableMetricsRequest {} + +/** + * Response with a list of metrics exposed by Node. + * See NodeService.GetAvailableMetrics for more information. + */ +message GetAvailableMetricsResponse { + repeated string metrics = 1; +} + +/** + * Request to find a DID Document. + * See NodeService.GetDidDocument for more information. + */ +message GetDidDocumentRequest { + string did = 1; // The DID. +} +/** + * Response to a DID Document query. + * See NodeService.GetDidDocument for more information. + */ +message GetDidDocumentResponse { + DIDData document = 1; // The DID Document. + google.protobuf.Timestamp last_synced_block_timestamp = 5; // Timestamp of the latest synchronized block. + bytes last_update_operation = 6; // The hash of the last did update operation. +} + +/** + * Request to get the Node version info. + * See NodeService.GetNodeBuildInfo for more information. + */ +message GetNodeBuildInfoRequest { +} +/** + * Response with the Node version info. + * See NodeService.GetNodeBuildInfo for more information. + */ +message GetNodeBuildInfoResponse { + reserved 4; + + string version = 1; // The actual version. + string scala_version = 2; // The Scala version used to compile the app. + string sbt_version = 3; // The SBT version used to compile the app. +} + +/** + * Request to get the Node version info. + * See NodeService.GetNodeBuildInfo for more information. + */ +message GetNodeNetworkProtocolInfoRequest { +} +/** + * Response with the Node Protocol version info. + * See NodeService.GetNodeProtocolVersionInfo for more information. + */ +message GetNodeNetworkProtocolInfoResponse { + ProtocolVersion supported_network_protocol_version = 5; // Network protocol version number supported by Node. + ProtocolVersion current_network_protocol_version = 6; // Current network protocol version number. +} + +/** + * Request to get the operation status. + * See NodeService.GetOperationInfo for more information. + */ +message GetOperationInfoRequest { + bytes operation_id = 1; // Operation identifier. The identifier is returned to the corresponding operation request. +} +/** + * Response with the current operation status. + * See NodeService.GetOperationInfo for more information. + */ +message GetOperationInfoResponse { + OperationStatus operation_status = 1; // Contains the status of this operation. + string transaction_id = 3; // Transaction identifier containing the operation. Presented only when operation was approved by the ledger. + google.protobuf.Timestamp last_synced_block_timestamp = 2; // Timestamp of the latest synchronized block. + string details = 4; // Contains additional information about the operation state. For example, error descriptions. Can be empty. +} + +/** + * Request to retrieve the timestamp of the latest synchronized (processed by PRISM Node) block. + * See NodeService.GetLastSyncedBlockTimestampRequest for more information. + */ +message GetLastSyncedBlockTimestampRequest { +} +/** + * Response with the timestamp of the latest synchronized (processed by PRISM Node) block. + * See NodeService.GetLastSyncedBlockTimestampResponse for more information. + */ +message GetLastSyncedBlockTimestampResponse { + /** + * Timestamp of the latest synchronized (processed by PRISM Node) block. + */ + google.protobuf.Timestamp last_synced_block_timestamp = 1; +} + +/** + * Request to retrieve transaction info and operations outputs. + */ +message ScheduleOperationsRequest { + /** + * List of signed operations to apply. The operations will be applied in the order specified here. + */ + repeated SignedAtalaOperation signed_operations = 1; // a list of signed operations +} + +/** + * Response with the transaction info and operations outputs. + */ +message ScheduleOperationsResponse { + /** + * The responses for scheduled operations, ordered the same as the operations sent in ScheduleOperationsRequest. + */ + repeated OperationOutput outputs = 1; +} + +/** + * Request to retrieve all scheduled but not confirmed Atala operations. + */ +message GetScheduledOperationsRequest { + /** + * Operations of which type should be returned. + */ + OperationType operationsType = 1; + + enum OperationType { + AnyOperationType = 0; // Any operation + CreateDidOperationOperationType = 1; + UpdateDidOperationOperationType = 2; + ProtocolVersionUpdateOperationType = 5; + } +} + +message GetScheduledOperationsResponse { + repeated SignedAtalaOperation scheduled_operations = 1; // a list of scheduled operations +} + +/** + * Request to retrieve wallet transactions, either ongoing or confirmed. + * Pagination included. + */ +message GetWalletTransactionsRequest { + TransactionState state = 1; // Transaction state: either ongoing or confirmed + string last_seen_transaction_id = 2; // Last seen transaction id + int32 limit = 3; // The maximum number of transactions to return; must be greater than 0. + + enum TransactionState { + Ongoing = 0; // Transactions which hasn't been confirmed by Prism Node + Confirmed = 1; // Transactions which ahs been confirmed by Prism Node + } +} + +message GetWalletTransactionsResponse { + repeated TransactionInfo transactions = 1; +} + +message GetWalletBalanceRequest { +} +message GetWalletBalanceResponse { + bytes balance = 1; +} + +// Used to encode the responses of the operations issued in an AtalaBlock. +message OperationOutput { + oneof result { + // Represents the response provided by IssueCredentialBatchOperation. + CreateDIDOutput create_did_output = 2; + // Represents the response provided by UpdateDIDOperation. + UpdateDIDOutput update_did_output = 3; + // Represents the response provided by ProtocolVersionUpdateOperation. + ProtocolVersionUpdateOutput protocol_version_update_output = 7; + DeactivateDIDOutput deactivate_did_output = 8; + } + oneof operation_maybe { + bytes operation_id = 5; // Operation identifier. + string error = 6; // Error description if PRISM Node service haven't scheduled the operation. + } +} + +message CreateDIDOutput { + string did_suffix = 1; +} +message UpdateDIDOutput {} +message ProtocolVersionUpdateOutput {} +message DeactivateDIDOutput {} diff --git a/node/src/main/protobuf/node_models.proto b/node/src/main/protobuf/node_models.proto new file mode 100644 index 0000000000..6d4e4aec3f --- /dev/null +++ b/node/src/main/protobuf/node_models.proto @@ -0,0 +1,252 @@ +syntax = "proto3"; + +option java_multiple_files = true; +option java_package = "io.iohk.atala.prism.protos"; + +package io.iohk.atala.prism.protos; + +import "common_models.proto"; +import "google/protobuf/timestamp.proto"; + +// Includes timestamping details about a blockchain's block. +message TimestampInfo { + reserved 1; // Removed blockTimestamp_deprecated field + reserved "blockTimestamp_deprecated"; + + uint32 block_sequence_number = 2; // The transaction index inside the underlying block. + uint32 operation_sequence_number = 3; // The operation index inside the AtalaBlock. + google.protobuf.Timestamp block_timestamp = 4; // The timestamp provided from the underlying blockchain. +} + +// Every key has a single purpose: +enum KeyUsage { + // UNKNOWN_KEY is an invalid value - Protobuf uses 0 if no value is provided and we want the user to explicitly choose the usage. + UNKNOWN_KEY = 0; + + // This is the most privileged key-type, when any other key is lost, you could use this to recover the others. + MASTER_KEY = 1; + + // This key-type is used for issuing credentials only, it should be kept in a safe place + // to avoid malicious credentials being issued. + ISSUING_KEY = 2; + + // This key-type is used to establish a shared symmetric key for secure end-to end communication + // use this key-type to encrypt the content. + KEY_AGREEMENT_KEY = 3; + + // This key-type is used to authenticate requests or logging into services. + AUTHENTICATION_KEY = 4; + + // This key-type is used for revoking credentials only, it should be kept in a safe place + // to avoid malicious credentials being issued. + REVOCATION_KEY = 5; + + // This key-type is used to specify a verification method that might be used by the DID subject to invoke a cryptographic capability, + // such as the authorization to update the DID Document. + CAPABILITY_INVOCATION_KEY = 6; + + // This is used to specify a mechanism that might be used by the DID subject to delegate a cryptographic capability to another party, + // such as delegating the authority to access a specific HTTP API to a subordinate. + CAPABILITY_DELEGATION_KEY = 7; +} + +/** + * Holds the necessary data to recover an Elliptic Curve (EC)'s public key. + * @exclude TODO: Consider renaming this to ECPublicKeyData. + */ +message ECKeyData { + string curve = 1; // The curve name, like secp256k1. + bytes x = 2; // The x coordinate, represented as bytes. + bytes y = 3; // The y coordinate, represented as bytes. +} + +/** + * Holds the compressed representation of data needed to recover Elliptic Curve (EC)'s public key. + * @exclude TODO: Consider renaming this to CompressedECPublicKeyData. + */ +message CompressedECKeyData { + string curve = 1; // The curve name, like secp256k1. + bytes data = 2; // compressed Elliptic Curve (EC) public key data. +} + +/** + * Represents a public key with metadata, necessary for a DID document. + * @exclude TODO: Consider renaming this to something more specific, like DIDPublicKey. + */ +message PublicKey { + reserved 3, 4; + string id = 1; // The key identifier within the DID Document. + KeyUsage usage = 2; // The key's purpose. + LedgerData added_on = 5; // The ledger details related to the event that added the key to the DID Document. + LedgerData revoked_on = 6; // The ledger details related to the event that revoked the key to the DID Document. + + // The key's representation. + oneof key_data { + ECKeyData ec_key_data = 8; // The Elliptic Curve (EC) key. + CompressedECKeyData compressed_ec_key_data = 9; // Compressed Elliptic Curve (EC) key. + }; +} + +// The DID Document's data. +message DIDData { + string id = 1; // The DID suffix, where DID is in form did:prism:[DID suffix] + repeated PublicKey public_keys = 2; // The keys that belong to this DID Document. + repeated Service services = 3; // The list of services that belong to this DID Document. + repeated string context = 4; // The list of @context values to consider on JSON-LD representations +} + +// The operation to create a public DID. +message CreateDIDOperation { + DIDCreationData did_data = 1; // DIDCreationData with public keys and services + + // The data necessary to create a DID. + message DIDCreationData { + reserved 1; // Removed DID id field which is empty on creation + repeated PublicKey public_keys = 2; // The keys that belong to this DID Document. + repeated Service services = 3; // The list of services that belong to this DID Document. + repeated string context = 4; // The list of @context values to consider on JSON-LD representations + } +} + +// The necessary data to add a key to a DID. +message AddKeyAction { + PublicKey key = 1; // The key to include. +} + +// The necessary data to remove a key from a DID. +message RemoveKeyAction { + string keyId = 1; // the key id to remove +} + +message AddServiceAction { + Service service = 1; +} + +message RemoveServiceAction { + string serviceId = 1; +} + +message UpdateServiceAction { + string serviceId = 1; // scoped to the did, unique per did + string type = 2; // new type if provided + // Will replace all existing service endpoints of the service with provided ones + string service_endpoints = 3; +} + +message PatchContextAction { + repeated string context = 1; // The list of strings to use by resolvers during resolution when producing a JSON-LD output +} + +// The potential details that can be updated in a DID. +message UpdateDIDAction { + + // The action to perform. + oneof action { + AddKeyAction add_key = 1; // Used to add a new key to the DID. + RemoveKeyAction remove_key = 2; // Used to remove a key from the DID. + AddServiceAction add_service = 3; // Used to add a new service to a DID, + RemoveServiceAction remove_service = 4; // Used to remove an existing service from a DID, + UpdateServiceAction update_service = 5; // Used to Update a list of service endpoints of a given service on a given DID. + PatchContextAction patch_context = 6; // Used to Update a list of `@context` strings used during resolution for a given DID. + } +} + +// Specifies the necessary data to update a public DID. +message UpdateDIDOperation { + bytes previous_operation_hash = 1; // The hash of the operation that issued the DID. + string id = 2; // @exclude TODO: To be redefined after we start using this operation. + repeated UpdateDIDAction actions = 3; // The actual updates to perform on the DID. +} + +// Specifies the protocol version update +message ProtocolVersionUpdateOperation { + string proposer_did = 1; // The DID suffix that proposes the protocol update. + ProtocolVersionInfo version = 2; // Information of the new version +} + + +message ProtocolVersion { + // Represent the major version + int32 major_version = 1; + // Represent the minor version + int32 minor_version = 2; +} + +message ProtocolVersionInfo { + reserved 2, 3; + string version_name = 1; // (optional) name of the version + int32 effective_since = 4; // Cardano block number that tells since which block the update is enforced + + // New major and minor version to be announced, + // If major value changes, the node MUST stop issuing and reading operations, and upgrade before `effective_since` because the new protocol version. + // If minor value changes, the node can opt to not update. All events _published_ by this node would be also + // understood by other nodes with the same major version. However, there may be new events that this node won't _read_ + ProtocolVersion protocol_version = 5; +} + +message DeactivateDIDOperation { + bytes previous_operation_hash = 1; // The hash of the operation that issued the DID. + string id = 2; // DID Suffix of the DID to be deactivated +} + +// The possible operations affecting the blockchain. +message AtalaOperation { + // The actual operation. + oneof operation { + // Used to create a public DID. + CreateDIDOperation create_did = 1; + + // Used to update an existing public DID. + UpdateDIDOperation update_did = 2; + + // Used to announce new protocol update + ProtocolVersionUpdateOperation protocol_version_update = 5; + + // Used to deactivate DID + DeactivateDIDOperation deactivate_did = 6; + }; +} + +// A signed operation, necessary to post anything on the blockchain. +message SignedAtalaOperation { + string signed_with = 1; // The key ID used to sign the operation, it must belong to the DID that signs the operation. + bytes signature = 2; // The actual signature. + AtalaOperation operation = 3; // The operation that was signed. +} + +// Ledger data associated to a protocol event. +// Note that the difference with TransactionInfo is that this message contains a full +// timestamp, and there is no expectation for it to be optional. +message LedgerData { + string transaction_id = 1; // ID of the transaction. + Ledger ledger = 2; // Ledger the transaction was published to. + TimestampInfo timestamp_info = 3; // The timestamp of the protocol event. +} + +message Service { + string id = 1; + string type = 2; + + string service_endpoint = 3; // can be one URI, JSON object, or array of either URIs of objects + LedgerData added_on = 4; // (when present) The ledger details related to the event that added the service. + LedgerData deleted_on = 5; // (when present) The ledger details related to the event that revoked the service. +} + +/** + * Represent a block that holds operations. + * @exclude Intended for internal usage inside. Not publicly accessible from gRPC. + */ +message AtalaBlock { + reserved 1; // Represents the version of the block. Deprecated + repeated SignedAtalaOperation operations = 2; // A signed operation, necessary to post anything on the blockchain. +} + +/** + * Wraps an AtalaBlock and its metadata. + * @exclude Intended for internal usage inside. Not publicly accessible from gRPC. + */ +message AtalaObject { + reserved 1, 2, 3; // Removed block_hash field. + reserved "block_hash", "block_operation_count", "block_byte_length"; + AtalaBlock block_content = 4; // The block content. +} \ No newline at end of file diff --git a/node/src/main/protobuf/status.proto b/node/src/main/protobuf/status.proto new file mode 100644 index 0000000000..5bd51aa2f3 --- /dev/null +++ b/node/src/main/protobuf/status.proto @@ -0,0 +1,47 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/any.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/rpc/status;status"; +option java_multiple_files = true; +option java_outer_classname = "StatusProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). Each `Status` message contains +// three pieces of data: error code, error message, and error details. +// +// You can find out more about this error model and how to work with it in the +// [API Design Guide](https://cloud.google.com/apis/design/errors). +message Status { + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + int32 code = 1; + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + string message = 2; + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + repeated google.protobuf.Any details = 3; +} \ No newline at end of file diff --git a/node/src/main/resources/db/migration/V1__create_tables.sql b/node/src/main/resources/db/migration/V1__create_tables.sql index ed6c7eb99b..88cf2d05ea 100644 --- a/node/src/main/resources/db/migration/V1__create_tables.sql +++ b/node/src/main/resources/db/migration/V1__create_tables.sql @@ -28,9 +28,6 @@ CREATE DOMAIN public.blockhash_type AS BYTEA CONSTRAINT blockhash_type_check CHE CREATE DOMAIN public.content_hash AS BYTEA CONSTRAINT content_hash_check CHECK (length(VALUE) = 32); -CREATE DOMAIN public.credential_hash AS BYTEA CONSTRAINT credential_hash_check CHECK (length(VALUE) = 32); - - CREATE DOMAIN public.did AS text COLLATE "default" CONSTRAINT did_check CHECK (VALUE ~ '^did:[a-z0-9]+:[a-zA-Z0-9._-]*(:[a-zA-Z0-9._-]*)*$'::text); @@ -40,9 +37,6 @@ CREATE DOMAIN public.id_type AS text COLLATE "default" CONSTRAINT id_type_check CREATE TYPE public.key_usage AS ENUM ('MASTER_KEY', 'ISSUING_KEY', 'KEY_AGREEMENT_KEY', 'AUTHENTICATION_KEY', 'REVOCATION_KEY', 'CAPABILITY_INVOCATION_KEY', 'CAPABILITY_DELEGATION_KEY'); -CREATE DOMAIN public.merkle_root AS BYTEA CONSTRAINT merkle_root_check CHECK (length(VALUE) = 32); - - CREATE DOMAIN public.non_negative_int_type AS integer CONSTRAINT non_negative_int_type_check CHECK (VALUE >= 0); @@ -215,52 +209,6 @@ CREATE TABLE public.atala_operations ); -CREATE TABLE public.credential_batches -( - batch_id public.id_type NOT NULL, - last_operation public.operation_hash NOT NULL, - issuer_did_suffix public.id_type NOT NULL, - merkle_root public.merkle_root NOT NULL, - issued_on timestamptz NOT NULL, - issued_on_absn int4 NOT NULL, - issued_on_osn int4 NOT NULL, - revoked_on timestamptz NULL, - revoked_on_absn int4 NULL, - revoked_on_osn int4 NULL, - issued_on_transaction_id public.transaction_id NOT NULL, - revoked_on_transaction_id public.transaction_id NULL, - ledger varchar(32) NOT NULL, - CONSTRAINT credential_batches_pk PRIMARY KEY (batch_id), - CONSTRAINT revoke_on_check CHECK ((((revoked_on IS NULL) - AND (revoked_on_absn IS NULL) - AND (revoked_on_osn IS NULL)) - OR ((revoked_on IS NOT NULL) - AND (revoked_on_absn IS NOT NULL) - AND (revoked_on_osn IS NOT NULL)))), - CONSTRAINT credential_batches_issuer_did_suffix_fk - FOREIGN KEY (issuer_did_suffix) REFERENCES public.did_data (did_suffix) -); - - -CREATE INDEX credential_batches_issuer_did_suffix_index ON public.credential_batches USING btree (issuer_did_suffix); - - -CREATE TABLE public.revoked_credentials -( - batch_id public.id_type NOT NULL, - credential_id public.credential_hash NOT NULL, - revoked_on timestamptz NOT NULL, - revoked_on_absn int4 NOT NULL, - revoked_on_osn int4 NOT NULL, - transaction_id public.transaction_id NOT NULL, - ledger varchar(32) NOT NULL, - CONSTRAINT revoked_credentials_pk PRIMARY KEY (batch_id, - credential_id), - CONSTRAINT revoked_credentials_batch_id_fk - FOREIGN KEY (batch_id) REFERENCES public.credential_batches (batch_id) -); - - CREATE TABLE public.services ( service_id public.id_type NOT NULL, diff --git a/node/src/main/scala/io/iohk/atala/prism/node/NodeApp.scala b/node/src/main/scala/io/iohk/atala/prism/node/NodeApp.scala index 573f6cb244..1b39bf1577 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/NodeApp.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/NodeApp.scala @@ -125,13 +125,10 @@ class NodeApp(executionContext: ExecutionContext) { self => ), submissionService ) - credentialBatchesRepository <- - CredentialBatchesRepository.resource(liftedTransactor, logs) metricsCountersRepository <- MetricsCountersRepository.resource(liftedTransactor, logs) nodeService <- NodeService.resource( didDataRepository, objectManagementService, - credentialBatchesRepository, logs ) nodeStatisticsService <- StatisticsService.resource(atalaOperationsRepository, metricsCountersRepository, logs) diff --git a/node/src/main/scala/io/iohk/atala/prism/node/NodeExplorerGrpcServiceImpl.scala b/node/src/main/scala/io/iohk/atala/prism/node/NodeExplorerGrpcServiceImpl.scala index 4880ab21dd..73b097f9ca 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/NodeExplorerGrpcServiceImpl.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/NodeExplorerGrpcServiceImpl.scala @@ -19,9 +19,7 @@ import io.iohk.atala.prism.protos.node_api import io.iohk.atala.prism.protos.node_api.GetScheduledOperationsRequest.OperationType.{ AnyOperationType, CreateDidOperationOperationType, - IssueCredentialBatchOperationType, ProtocolVersionUpdateOperationType, - RevokeCredentialsOperationType, UpdateDidOperationOperationType } import io.iohk.atala.prism.protos.node_api._ @@ -102,10 +100,6 @@ class NodeExplorerGrpcServiceImpl( o.operation.isDefined && o.operation.get.operation.isCreateDid case UpdateDidOperationOperationType => o.operation.isDefined && o.operation.get.operation.isUpdateDid - case IssueCredentialBatchOperationType => - o.operation.isDefined && o.operation.get.operation.isIssueCredentialBatch - case RevokeCredentialsOperationType => - o.operation.isDefined && o.operation.get.operation.isRevokeCredentials case ProtocolVersionUpdateOperationType => o.operation.isDefined && o.operation.get.operation.isProtocolVersionUpdate case _ => false diff --git a/node/src/main/scala/io/iohk/atala/prism/node/NodeGrpcServiceImpl.scala b/node/src/main/scala/io/iohk/atala/prism/node/NodeGrpcServiceImpl.scala index 9e86cb8f86..262b11b9fb 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/NodeGrpcServiceImpl.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/NodeGrpcServiceImpl.scala @@ -9,7 +9,6 @@ import io.iohk.atala.prism.node.logging.TraceId.IOWithTraceIdContext import io.iohk.atala.prism.node.metrics.RequestMeasureUtil import io.iohk.atala.prism.node.metrics.RequestMeasureUtil.measureRequestFuture import io.iohk.atala.prism.node.errors.NodeError -import io.iohk.atala.prism.node.grpc.ProtoCodecs import io.iohk.atala.prism.node.models.AtalaObjectTransactionSubmissionStatus.InLedger import io.iohk.atala.prism.node.models.{ AtalaObjectTransactionSubmissionStatus, @@ -56,48 +55,6 @@ class NodeGrpcServiceImpl( } - override def getBatchState( - request: GetBatchStateRequest - ): Future[GetBatchStateResponse] = { - val methodName = "getBatchState" - - measureRequestFuture(serviceName, methodName) { - trace { traceId => - val query = for { - batchState <- nodeService.getBatchState(request.batchId) - } yield batchState.fold( - countAndThrowNodeError(methodName, _), - toGetBatchResponse - ) - query.run(traceId).unsafeToFuture() - } - } - } - - override def getCredentialRevocationTime( - request: GetCredentialRevocationTimeRequest - ): Future[GetCredentialRevocationTimeResponse] = { - val methodName = "getCredentialRevocationTime" - - measureRequestFuture(serviceName, methodName) { - trace { traceId => - for { - revocationTimeEither <- - nodeService - .getCredentialRevocationData(request.batchId, request.credentialHash) - .run(traceId) - .unsafeToFuture() - } yield revocationTimeEither match { - case Left(error) => countAndThrowNodeError(methodName, error) - case Right(ledgerData) => - GetCredentialRevocationTimeResponse( - revocationLedgerData = ledgerData.maybeLedgerData.map(ProtoCodecs.toLedgerData) - ).withLastSyncedBlockTimestamp(ledgerData.lastSyncedTimestamp.toProtoTimestamp) - } - } - } - } - override def scheduleOperations( request: node_api.ScheduleOperationsRequest ): Future[node_api.ScheduleOperationsResponse] = { @@ -281,23 +238,6 @@ object NodeGrpcServiceImpl { def countAndThrowNodeError(methodName: String, error: NodeError): Nothing = RequestMeasureUtil.countAndThrowNodeError(serviceName)(methodName, error.toStatus) - private def toGetBatchResponse( - in: BatchData - ) = { - val response = in.maybeBatchState.fold(GetBatchStateResponse()) { state => - val revocationLedgerData = state.revokedOn.map(ProtoCodecs.toLedgerData) - val responseBase = GetBatchStateResponse() - .withIssuerDid(state.issuerDIDSuffix.getValue) - .withMerkleRoot(ByteString.copyFrom(state.merkleRoot.getHash.getValue)) - .withIssuanceHash(ByteString.copyFrom(state.lastOperation.getValue)) - .withPublicationLedgerData(ProtoCodecs.toLedgerData(state.issuedOn)) - revocationLedgerData.fold(responseBase)( - responseBase.withRevocationLedgerData - ) - } - response.withLastSyncedBlockTimestamp(in.lastSyncedTimestamp.toProtoTimestamp) - } - private def countAndThrowGetDidDocumentError[I]( methodName: String, didRequestStr: String, diff --git a/node/src/main/scala/io/iohk/atala/prism/node/UnderlyingLedger.scala b/node/src/main/scala/io/iohk/atala/prism/node/UnderlyingLedger.scala index 6e34051d3a..879b898528 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/UnderlyingLedger.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/UnderlyingLedger.scala @@ -5,14 +5,14 @@ import derevo.tagless.applyK import io.iohk.atala.prism.node.models._ import io.iohk.atala.prism.node.cardano.models.CardanoWalletError import io.iohk.atala.prism.node.models.Balance -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models @derive(applyK) trait UnderlyingLedger[F[_]] { def getType: Ledger def publish( - obj: node_internal.AtalaObject + obj: node_models.AtalaObject ): F[Either[CardanoWalletError, PublicationInfo]] def getTransactionDetails( diff --git a/node/src/main/scala/io/iohk/atala/prism/node/cardano/models/AtalaObjectMetadata.scala b/node/src/main/scala/io/iohk/atala/prism/node/cardano/models/AtalaObjectMetadata.scala index 2dedd4ec0a..09cf177b4a 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/cardano/models/AtalaObjectMetadata.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/cardano/models/AtalaObjectMetadata.scala @@ -1,7 +1,7 @@ package io.iohk.atala.prism.node.cardano.models import io.circe.{ACursor, Json} -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import io.iohk.atala.prism.node.utils.BytesOps import scala.util.Try @@ -29,7 +29,7 @@ object AtalaObjectMetadata { def fromTransactionMetadata( metadata: TransactionMetadata - ): Option[node_internal.AtalaObject] = { + ): Option[node_models.AtalaObject] = { val prismMetadata = metadata.json.hcursor .downField(METADATA_PRISM_INDEX.toString) @@ -44,7 +44,7 @@ object AtalaObjectMetadata { private def fromTransactionMetadataV1( prismMetadata: ACursor - ): Option[node_internal.AtalaObject] = { + ): Option[node_models.AtalaObject] = { val bytes = prismMetadata .downField(CONTENT_KEY) .focus @@ -56,7 +56,7 @@ object AtalaObjectMetadata { // Either the content does not exist, is not the right type, or is truly empty None } else { - node_internal.AtalaObject.validate(bytes).toOption + node_models.AtalaObject.validate(bytes).toOption } } @@ -68,7 +68,7 @@ object AtalaObjectMetadata { } def toTransactionMetadata( - atalaObject: node_internal.AtalaObject + atalaObject: node_models.AtalaObject ): TransactionMetadata = { TransactionMetadata( Json.obj( @@ -101,7 +101,7 @@ object AtalaObjectMetadata { ) } - def estimateTxMetadataSize(atalaObject: node_internal.AtalaObject): Int = { + def estimateTxMetadataSize(atalaObject: node_models.AtalaObject): Int = { toTransactionMetadata(atalaObject).json.noSpaces.length } } diff --git a/node/src/main/scala/io/iohk/atala/prism/node/errors/PrismError.scala b/node/src/main/scala/io/iohk/atala/prism/node/errors/PrismError.scala index f8d6e29693..37d027671d 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/errors/PrismError.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/errors/PrismError.scala @@ -2,7 +2,7 @@ package io.iohk.atala.prism.node.errors import com.google.rpc.status.{Status => StatusProto} import io.grpc.Status -import io.iohk.atala.prism.protos.credential_models.{AtalaErrorMessage, AtalaMessage} +import io.iohk.atala.prism.protos.common_models.{AtalaErrorMessage, AtalaMessage} trait PrismError { def toStatus: Status diff --git a/node/src/main/scala/io/iohk/atala/prism/node/interop/implicits.scala b/node/src/main/scala/io/iohk/atala/prism/node/interop/implicits.scala index 80731a0a2b..70e57648b6 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/interop/implicits.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/interop/implicits.scala @@ -2,7 +2,6 @@ package io.iohk.atala.prism.node.interop import cats.data.NonEmptyList import doobie.{Get, Meta, Read, Write} -import io.iohk.atala.prism.credentials.CredentialBatchId import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256Digest} import doobie.implicits.legacy.instant._ import io.iohk.atala.prism.protos.models.TimestampInfo @@ -37,13 +36,6 @@ object implicits { implicit val ledgerRead: Read[Ledger] = Read[String].map { Ledger.withNameInsensitive } - implicit val credentialBatchIdRead: Read[CredentialBatchId] = - Read[String].map { CredentialBatchId.fromString } - implicit val credentialBatchIdGet: Get[CredentialBatchId] = - Get[String].map { CredentialBatchId.fromString } - implicit val credentialBatchIdWrite: Write[CredentialBatchId] = - Write[String].contramap(_.getId) - implicit val Sha256DigestWrite: Write[Sha256Digest] = Write[Array[Byte]].contramap(_.getValue) implicit val Sha256DigestRead: Read[Sha256Digest] = diff --git a/node/src/main/scala/io/iohk/atala/prism/node/logging/GeneralLoggableInstances.scala b/node/src/main/scala/io/iohk/atala/prism/node/logging/GeneralLoggableInstances.scala index 90bf70f666..8640fa5b56 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/logging/GeneralLoggableInstances.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/logging/GeneralLoggableInstances.scala @@ -1,7 +1,6 @@ package io.iohk.atala.prism.node.logging import io.grpc.Status -import io.iohk.atala.prism.credentials.CredentialBatchId import io.iohk.atala.prism.identity.{PrismDid => DID} import io.iohk.atala.prism.node.models.DidSuffix import io.iohk.atala.prism.crypto.keys.ECPublicKey @@ -45,18 +44,6 @@ object GeneralLoggableInstances { override def logShow(a: DidSuffix): String = s"{DIDSuffix=${a.value}}" } - implicit val credentialBatchIdLoggable: DictLoggable[CredentialBatchId] = - new DictLoggable[CredentialBatchId] { - override def fields[I, V, R, S](a: CredentialBatchId, i: I)(implicit - r: LogRenderer[I, V, R, S] - ): R = { - r.addString("CredentialBatchId", a.getId, i) - } - - override def logShow(a: CredentialBatchId): String = - s"{CredentialBatchId=$a}" - } - implicit val ecPublicKeyLoggable: DictLoggable[ECPublicKey] = new DictLoggable[ECPublicKey] { override def fields[I, V, R, S](a: ECPublicKey, i: I)(implicit diff --git a/node/src/main/scala/io/iohk/atala/prism/node/metrics/OperationsCounters.scala b/node/src/main/scala/io/iohk/atala/prism/node/metrics/OperationsCounters.scala index 91879d699d..c97d59b98f 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/metrics/OperationsCounters.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/metrics/OperationsCounters.scala @@ -28,9 +28,7 @@ object OperationsCounters { // Values for operations tags private val EMPTY_OPERATION_TAG_VALUE = "empty" - private val REVOKE_CREDENTIALS_TAG_VALUE = "revoke-credentials" private val CREATE_DID_TAG_VALUE = "create-did" - private val ISSUE_CREDENTIAL_BATCH_TAG_VALUE = "issue-credential-batch" private val PROTOCOL_VERSION_UPDATE_OPERATION_VALUE = "protocol-version-update" private val UPDATE_DID_OPERATION_TAG_VALUE = "did-update" private val DEACTIVATE_DID_TAG_VALUE = "deactivate-did" @@ -172,13 +170,9 @@ object OperationsCounters { private def atalaOperationToTagString: PartialFunction[AtalaOperation.Operation, String] = { case AtalaOperation.Operation.Empty => EMPTY_OPERATION_TAG_VALUE - case AtalaOperation.Operation.RevokeCredentials(_) => - REVOKE_CREDENTIALS_TAG_VALUE case AtalaOperation.Operation.CreateDid(_) => CREATE_DID_TAG_VALUE case AtalaOperation.Operation.DeactivateDid(_) => DEACTIVATE_DID_TAG_VALUE case AtalaOperation.Operation.ProtocolVersionUpdate(_) => PROTOCOL_VERSION_UPDATE_OPERATION_VALUE - case AtalaOperation.Operation.IssueCredentialBatch(_) => - ISSUE_CREDENTIAL_BATCH_TAG_VALUE // Just in case, must be impossible case AtalaOperation.Operation.UpdateDid(_) => UPDATE_DID_OPERATION_TAG_VALUE } diff --git a/node/src/main/scala/io/iohk/atala/prism/node/metrics/StatisticsCounters.scala b/node/src/main/scala/io/iohk/atala/prism/node/metrics/StatisticsCounters.scala index e66ab5ca41..4de4e5e1c4 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/metrics/StatisticsCounters.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/metrics/StatisticsCounters.scala @@ -12,8 +12,6 @@ object StatisticsCounters { case object NumberOfPendingOperations extends MetricCounter case object NumberOfPublishedDids extends MetricCounter - case object NumberOfIssuedCredentialBatches extends MetricCounter - case object NumberOfCredentialsRevoked extends MetricCounter case object NumberOfAppliedTransactions extends MetricCounter case object NumberOfRejectedTransactions extends MetricCounter } diff --git a/node/src/main/scala/io/iohk/atala/prism/node/models/AtalaObjectId.scala b/node/src/main/scala/io/iohk/atala/prism/node/models/AtalaObjectId.scala index a651bf12a1..6e664ab37b 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/models/AtalaObjectId.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/models/AtalaObjectId.scala @@ -1,7 +1,7 @@ package io.iohk.atala.prism.node.models import io.iohk.atala.prism.crypto.{Sha256, Sha256Digest} -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import io.iohk.atala.prism.node.utils.BytesOps import tofu.logging.{DictLoggable, LogRenderer} @@ -32,7 +32,7 @@ object AtalaObjectId { new AtalaObjectId(digestUnsafe.toVector) } - def of(atalaObject: node_internal.AtalaObject): AtalaObjectId = { + def of(atalaObject: node_models.AtalaObject): AtalaObjectId = { of(atalaObject.toByteArray) } diff --git a/node/src/main/scala/io/iohk/atala/prism/node/models/AtalaObjectInfo.scala b/node/src/main/scala/io/iohk/atala/prism/node/models/AtalaObjectInfo.scala index 665f154d64..4ffdef5b97 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/models/AtalaObjectInfo.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/models/AtalaObjectInfo.scala @@ -4,13 +4,13 @@ import io.iohk.atala.prism.node.models.TransactionInfo import io.iohk.atala.prism.node.cardano.TX_METADATA_MAX_SIZE import io.iohk.atala.prism.node.cardano.models.AtalaObjectMetadata import io.iohk.atala.prism.node.operations.{Operation, parseOperationsFromByteContent} -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models case class AtalaObjectInfo( objectId: AtalaObjectId, byteContent: Array[ Byte - ], // Serialization of a io.iohk.atala.prism.protos.node_internal.AtalaObject + ], // Serialization of a io.iohk.atala.prism.protos.node_models.AtalaObject operations: List[Operation], // List of parsed operations status: AtalaObjectStatus, // Status of an object may be processed (e.g. DIDs were recognized and stored in DB), merged (e.g. merged with another object) or pending transaction: Option[ @@ -31,10 +31,10 @@ case class AtalaObjectInfo( transaction ) - lazy val getAndValidateAtalaObject: Option[node_internal.AtalaObject] = - node_internal.AtalaObject.validate(byteContent).toOption + lazy val getAndValidateAtalaObject: Option[node_models.AtalaObject] = + node_models.AtalaObject.validate(byteContent).toOption - lazy val getAtalaBlock: Option[node_internal.AtalaBlock] = { + lazy val getAtalaBlock: Option[node_models.AtalaBlock] = { for { atalaObject <- getAndValidateAtalaObject atalaBlock <- atalaObject.blockContent @@ -49,8 +49,8 @@ case class AtalaObjectInfo( thatBlock <- that.getAtalaBlock } yield { val mergedBlock = - node_internal.AtalaBlock(thisBlock.operations ++ thatBlock.operations) - val obj = node_internal + node_models.AtalaBlock(thisBlock.operations ++ thatBlock.operations) + val obj = node_models .AtalaObject() .withBlockContent(mergedBlock) AtalaObjectInfo( diff --git a/node/src/main/scala/io/iohk/atala/prism/node/models/package.scala b/node/src/main/scala/io/iohk/atala/prism/node/models/package.scala index f712af042f..049257f913 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/models/package.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/models/package.scala @@ -3,15 +3,13 @@ package io.iohk.atala.prism.node import derevo.derive import enumeratum.EnumEntry.UpperSnakecase import enumeratum._ -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256Digest} +import io.iohk.atala.prism.crypto.Sha256Digest import io.iohk.atala.prism.crypto.keys.ECPublicKey import io.iohk.atala.prism.protos.models.TimestampInfo import io.iohk.atala.prism.protos.node_models import tofu.logging.derivation.loggable import java.time.Instant -import scala.util.matching.Regex package object models { sealed trait KeyUsage extends EnumEntry with UpperSnakecase { @@ -66,23 +64,6 @@ package object models { lastOperation: Sha256Digest ) - class CredentialId private (val id: String) extends AnyVal - - object CredentialId { - def apply(id: String): CredentialId = { - require( - CREDENTIAL_ID_RE.pattern.matcher(id).matches(), - s"invalid credential id: $id" - ) - - new CredentialId(id) - } - - def apply(digest: Sha256Digest): CredentialId = apply(digest.getHexValue) - - val CREDENTIAL_ID_RE: Regex = "^[0-9a-f]{64}$".r - } - @derive(loggable) case class AtalaOperationInfo( operationId: AtalaOperationId, @@ -138,15 +119,6 @@ package object models { object nodeState { - case class CredentialBatchState( - batchId: CredentialBatchId, - issuerDIDSuffix: DidSuffix, - merkleRoot: MerkleRoot, - issuedOn: LedgerData, - revokedOn: Option[LedgerData] = None, - lastOperation: Sha256Digest - ) - case class DIDPublicKeyState( didSuffix: DidSuffix, keyId: String, diff --git a/node/src/main/scala/io/iohk/atala/prism/node/operations/IssueCredentialBatchOperation.scala b/node/src/main/scala/io/iohk/atala/prism/node/operations/IssueCredentialBatchOperation.scala deleted file mode 100644 index a1e70e4ede..0000000000 --- a/node/src/main/scala/io/iohk/atala/prism/node/operations/IssueCredentialBatchOperation.scala +++ /dev/null @@ -1,130 +0,0 @@ -package io.iohk.atala.prism.node.operations - -import cats.data.EitherT -import cats.syntax.either._ -import doobie.free.connection.ConnectionIO -import doobie.implicits._ -import doobie.postgres.sqlstate -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256, Sha256Digest} -import io.iohk.atala.prism.node.models.DidSuffix -import io.iohk.atala.prism.node.models.nodeState -import io.iohk.atala.prism.node.models.nodeState.{DIDPublicKeyState, LedgerData} -import io.iohk.atala.prism.node.operations.path.{Path, ValueAtPath} -import io.iohk.atala.prism.node.repositories.daos.CredentialBatchesDAO.CreateCredentialBatchData -import io.iohk.atala.prism.node.repositories.daos.{CredentialBatchesDAO, PublicKeysDAO} -import io.iohk.atala.prism.protos.node_models - -import scala.util.Try - -case class IssueCredentialBatchOperation( - credentialBatchId: CredentialBatchId, - issuerDIDSuffix: DidSuffix, - merkleRoot: MerkleRoot, - digest: Sha256Digest, - ledgerData: nodeState.LedgerData -) extends Operation { - override val metricCounterName: String = IssueCredentialBatchOperation.metricCounterName - - override def getCorrectnessData( - keyId: String - ): EitherT[ConnectionIO, StateError, CorrectnessData] = { - for { - keyState <- EitherT[ConnectionIO, StateError, DIDPublicKeyState] { - PublicKeysDAO - .find(issuerDIDSuffix, keyId) - .map(_.toRight(StateError.UnknownKey(issuerDIDSuffix, keyId))) - } - _ <- EitherT.fromEither[ConnectionIO] { - Either.cond( - keyState.revokedOn.isEmpty, - (), - StateError.KeyAlreadyRevoked() - ) - } - data <- EitherT.fromEither[ConnectionIO] { - Either.cond( - keyState.keyUsage.canIssue, - CorrectnessData(keyState.key, None), - StateError.InvalidKeyUsed( - s"The key type expected is Issuing key. Type used: ${keyState.keyUsage}" - ): StateError - ) - } - } yield data - } - - override def applyStateImpl(_config: ApplyOperationConfig): EitherT[ConnectionIO, StateError, Unit] = - for { - _ <- EitherT { - CredentialBatchesDAO - .insert( - CreateCredentialBatchData( - credentialBatchId, - digest, - issuerDIDSuffix, - merkleRoot, - ledgerData - ) - ) - .attemptSomeSqlState { - case sqlstate.class23.UNIQUE_VIOLATION => - StateError.EntityExists( - "credential", - credentialBatchId.getId - ): StateError - case sqlstate.class23.FOREIGN_KEY_VIOLATION => - // that shouldn't happen, as key verification requires issuer in the DB, - // but putting it here just in the case - StateError.EntityMissing("issuerDID", issuerDIDSuffix.getValue) - } - } - } yield () -} - -object IssueCredentialBatchOperation extends SimpleOperationCompanion[IssueCredentialBatchOperation] { - val metricCounterName: String = "number_of_issued_credential_batches" - - override def parse( - operation: node_models.AtalaOperation, - ledgerData: LedgerData - ): Either[ValidationError, IssueCredentialBatchOperation] = { - val operationDigest = Sha256.compute(operation.toByteArray) - val issueCredentialBatchOperation = - ValueAtPath(operation, Path.root) - .child(_.getIssueCredentialBatch, "issueCredentialBatch") - - for { - credentialBatchData <- issueCredentialBatchOperation.childGet( - _.credentialBatchData, - "credentialBatchData" - ) - batchId <- credentialBatchData.parse { _ => - Option( - CredentialBatchId - .fromString( - Sha256.compute(credentialBatchData.value.toByteArray).getHexValue - ) - ).fold("Credential batchId".asLeft[CredentialBatchId])(Right(_)) - } - issuerDIDSuffix <- credentialBatchData - .child(_.issuerDid, "issuerDID") - .parse { issuerDID => - DidSuffix.fromString(issuerDID).toEither.left.map(_.getMessage) - } - merkleRoot <- credentialBatchData - .child(_.merkleRoot, "merkleRoot") - .parse { merkleRoot => - Try( - new MerkleRoot(Sha256Digest.fromBytes(merkleRoot.toByteArray)) - ).toEither.left.map(_.getMessage) - } - } yield IssueCredentialBatchOperation( - batchId, - issuerDIDSuffix, - merkleRoot, - operationDigest, - ledgerData - ) - } -} diff --git a/node/src/main/scala/io/iohk/atala/prism/node/operations/ParsingUtils.scala b/node/src/main/scala/io/iohk/atala/prism/node/operations/ParsingUtils.scala index f560b25d5e..465626b617 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/operations/ParsingUtils.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/operations/ParsingUtils.scala @@ -1,6 +1,5 @@ package io.iohk.atala.prism.node.operations -import java.time.LocalDate import cats.implicits._ import com.google.protobuf.ByteString import io.iohk.atala.prism.crypto.EC.{INSTANCE => EC} @@ -10,7 +9,7 @@ import io.iohk.atala.prism.node.models.DidSuffix import io.iohk.atala.prism.node.models.{DIDPublicKey, DIDService, KeyUsage, ProtocolConstants} import io.iohk.atala.prism.node.operations.ValidationError.{InvalidValue, MissingValue} import io.iohk.atala.prism.node.operations.path.ValueAtPath -import io.iohk.atala.prism.protos.{common_models, node_models} +import io.iohk.atala.prism.protos.node_models import io.iohk.atala.prism.node.utils.UriUtils import io.circe.parser.{parse => parseJson} import io.circe.Json @@ -21,28 +20,6 @@ object ParsingUtils { private type EitherValidationError[B] = Either[ValidationError, B] - def parseDate( - date: ValueAtPath[common_models.Date] - ): Either[ValidationError, LocalDate] = { - for { - year <- date.child(_.year, "year").parse { year => - Either - .cond(year > 0, year, "Year needs to be specified as positive value") - } - month <- date.child(_.month, "month").parse { month => - Either.cond( - month >= 1 && month <= 12, - month, - "Month has to be specified and between 1 and 12" - ) - } - parsedDate <- date.child(_.day, "day").parse { day => - Try(LocalDate.of(year, month, day)).toEither.left - .map(_ => "Day has to be specified and a proper day in the month") - } - } yield parsedDate - } - def parseKeyData( keyData: ValueAtPath[node_models.PublicKey] ): Either[ValidationError, ECPublicKey] = { diff --git a/node/src/main/scala/io/iohk/atala/prism/node/operations/RevokeCredentialsOperation.scala b/node/src/main/scala/io/iohk/atala/prism/node/operations/RevokeCredentialsOperation.scala deleted file mode 100644 index 95960634d3..0000000000 --- a/node/src/main/scala/io/iohk/atala/prism/node/operations/RevokeCredentialsOperation.scala +++ /dev/null @@ -1,153 +0,0 @@ -package io.iohk.atala.prism.node.operations - -import cats.data.EitherT -import cats.free.Free -import cats.implicits.catsSyntaxEitherId -import cats.syntax.functor._ -import doobie.free.connection.ConnectionIO -import doobie.implicits._ -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.{Sha256, Sha256Digest} -import io.iohk.atala.prism.node.models.DidSuffix -import io.iohk.atala.prism.node.models.nodeState -import io.iohk.atala.prism.node.models.nodeState.{DIDPublicKeyState, LedgerData} -import io.iohk.atala.prism.node.operations.path.{Path, ValueAtPath} -import io.iohk.atala.prism.node.repositories.daos.{CredentialBatchesDAO, PublicKeysDAO} -import io.iohk.atala.prism.protos.node_models - -case class RevokeCredentialsOperation( - credentialBatchId: CredentialBatchId, - credentialsToRevoke: List[Sha256Digest], - previousOperation: Sha256Digest, - digest: Sha256Digest, - ledgerData: nodeState.LedgerData -) extends Operation { - override val metricCounterName: String = RevokeCredentialsOperation.metricCounterName - - override def linkedPreviousOperation: Option[Sha256Digest] = Some( - previousOperation - ) - - override def getCorrectnessData( - keyId: String - ): EitherT[ConnectionIO, StateError, CorrectnessData] = { - for { - issuerPrevOp <- EitherT[ - ConnectionIO, - StateError, - (DidSuffix, Sha256Digest) - ] { - CredentialBatchesDAO - .findBatch(credentialBatchId) - .map( - _.map(cred => (cred.issuerDIDSuffix, cred.lastOperation)) - .toRight( - StateError - .EntityMissing("credential batch", credentialBatchId.getId) - ) - ) - } - (issuer, prevOp) = issuerPrevOp - keyState <- EitherT[ConnectionIO, StateError, DIDPublicKeyState] { - PublicKeysDAO - .find(issuer, keyId) - .map(_.toRight(StateError.UnknownKey(issuer, keyId))) - }.subflatMap { didKey => - Either.cond( - didKey.keyUsage.canRevoke, - didKey, - StateError.InvalidKeyUsed( - s"The key type expected is Revocation key. Type used: ${didKey.keyUsage}" - ): StateError - ) - } - _ <- EitherT.fromEither[ConnectionIO] { - Either.cond( - keyState.revokedOn.isEmpty, - (), - StateError.KeyAlreadyRevoked(): StateError - ) - } - } yield CorrectnessData(keyState.key, Some(prevOp)) - } - - override def applyStateImpl(_config: ApplyOperationConfig): EitherT[ConnectionIO, StateError, Unit] = { - def weShouldRevokeTheFullBatch: Boolean = credentialsToRevoke.isEmpty - - def revokeFullBatch() = { - CredentialBatchesDAO - .revokeEntireBatch(credentialBatchId, ledgerData) - .map { wasUpdated => - if (wasUpdated) ().asRight[StateError] - else StateError.BatchAlreadyRevoked(credentialBatchId.getId).asLeft - } - } - - def revokeSpecificCredentials() = { - CredentialBatchesDAO.findBatch(credentialBatchId).flatMap { state => - val isBatchAlreadyRevoked = state.fold(false)(_.revokedOn.nonEmpty) - if (isBatchAlreadyRevoked) { - Free.pure( - (StateError.BatchAlreadyRevoked( - credentialBatchId.getId - ): StateError).asLeft[Unit] - ) - } else { - CredentialBatchesDAO - .revokeCredentials( - credentialBatchId, - credentialsToRevoke, - ledgerData - ) - .as(().asRight[StateError]) - } - } - } - - EitherT[ConnectionIO, StateError, Unit] { - if (weShouldRevokeTheFullBatch) revokeFullBatch() - else revokeSpecificCredentials() - } - } -} - -object RevokeCredentialsOperation extends SimpleOperationCompanion[RevokeCredentialsOperation] { - val metricCounterName: String = "number_of_revoked_credentials" - - override def parse( - operation: node_models.AtalaOperation, - ledgerData: LedgerData - ): Either[ValidationError, RevokeCredentialsOperation] = { - - val operationDigest = Sha256.compute(operation.toByteArray) - val revokeOperation = ValueAtPath(operation, Path.root) - .child(_.getRevokeCredentials, "revokeCredentials") - - for { - credentialBatchId <- revokeOperation - .child(_.credentialBatchId, "credentialBatchId") - .parse { credentialBatchId => - Option( - CredentialBatchId - .fromString(credentialBatchId) - ).fold( - s"credential batch id has invalid format $credentialBatchId" - .asLeft[CredentialBatchId] - )(_.asRight) - } - credentialsToRevoke <- - ParsingUtils.parseHashList( - revokeOperation.child(_.credentialsToRevoke, "credentialsToRevoke") - ) - previousOperation <- ParsingUtils.parseHash( - revokeOperation.child(_.previousOperationHash, "previousOperationHash") - ) - } yield RevokeCredentialsOperation( - credentialBatchId, - credentialsToRevoke, - previousOperation, - operationDigest, - ledgerData - ) - } -} diff --git a/node/src/main/scala/io/iohk/atala/prism/node/operations/package.scala b/node/src/main/scala/io/iohk/atala/prism/node/operations/package.scala index 243f1178f7..3463664e94 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/operations/package.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/operations/package.scala @@ -16,7 +16,7 @@ import io.iohk.atala.prism.node.operations.ValidationError.InvalidValue import io.iohk.atala.prism.node.operations.path._ import io.iohk.atala.prism.node.operations.protocolVersion.SupportedOperations import io.iohk.atala.prism.node.repositories.daos.{MetricsCountersDAO, ProtocolVersionsDAO} -import io.iohk.atala.prism.protos.{node_internal, node_models} +import io.iohk.atala.prism.protos.node_models import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation package object operations { @@ -320,10 +320,6 @@ package object operations { CreateDIDOperation.parse(signedOperation, ledgerData) case _: node_models.AtalaOperation.Operation.UpdateDid => UpdateDIDOperation.parse(signedOperation, ledgerData) - case _: node_models.AtalaOperation.Operation.IssueCredentialBatch => - IssueCredentialBatchOperation.parse(signedOperation, ledgerData) - case _: node_models.AtalaOperation.Operation.RevokeCredentials => - RevokeCredentialsOperation.parse(signedOperation, ledgerData) case _: node_models.AtalaOperation.Operation.ProtocolVersionUpdate => ProtocolVersionUpdateOperation.parse(signedOperation, ledgerData) case _: node_models.AtalaOperation.Operation.DeactivateDid => @@ -336,13 +332,21 @@ package object operations { "Empty operation" ) ) + case op => // we need to discard unrecognized operations + Left( + InvalidValue( + Path.root, + op.getClass.getSimpleName, + s"Unsupported ${op.getClass.getSimpleName}" + ) + ) } } def parseOperationsFromByteContent( byteContent: Array[Byte] ): List[Operation] = - node_internal.AtalaObject + node_models.AtalaObject .validate(byteContent) .toOption .fold(List[Operation]()) { obj => diff --git a/node/src/main/scala/io/iohk/atala/prism/node/operations/protocolVersion.scala b/node/src/main/scala/io/iohk/atala/prism/node/operations/protocolVersion.scala index 916129aa0a..777dc85e8c 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/operations/protocolVersion.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/operations/protocolVersion.scala @@ -27,8 +27,8 @@ package object protocolVersion { (protocolV, operation) match { case ( ProtocolVersion1_0, - _: CreateDIDOperation | _: UpdateDIDOperation | _: IssueCredentialBatchOperation | - _: RevokeCredentialsOperation | _: ProtocolVersionUpdateOperation | _: DeactivateDIDOperation + _: CreateDIDOperation | _: UpdateDIDOperation | _: ProtocolVersionUpdateOperation | + _: DeactivateDIDOperation ) => true case _ => false diff --git a/node/src/main/scala/io/iohk/atala/prism/node/repositories/CredentialBatchesRepository.scala b/node/src/main/scala/io/iohk/atala/prism/node/repositories/CredentialBatchesRepository.scala deleted file mode 100644 index 3d830d2593..0000000000 --- a/node/src/main/scala/io/iohk/atala/prism/node/repositories/CredentialBatchesRepository.scala +++ /dev/null @@ -1,94 +0,0 @@ -package io.iohk.atala.prism.node.repositories - -import cats.{Applicative, Comonad, Functor} -import cats.data.EitherT -import cats.effect.Resource -import cats.syntax.comonad._ -import cats.syntax.functor._ -import derevo.derive -import derevo.tagless.applyK -import doobie.implicits._ -import doobie.util.transactor.Transactor -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.Sha256Digest -import io.iohk.atala.prism.node.errors.NodeError -import io.iohk.atala.prism.node.models.nodeState.{CredentialBatchState, LedgerData} -import io.iohk.atala.prism.node.repositories.daos.CredentialBatchesDAO -import io.iohk.atala.prism.node.metrics.TimeMeasureMetric -import io.iohk.atala.prism.node.repositories.logs.CredentialBatchesRepositoryLogs -import io.iohk.atala.prism.node.repositories.metrics.CredentialBatchesRepositoryMetrics -import io.iohk.atala.prism.node.utils.syntax.DBConnectionOps -import tofu.higherKind.Mid -import tofu.logging.{Logs, ServiceLogging} -import tofu.syntax.monoid.TofuSemigroupOps -import cats.effect.MonadCancelThrow - -@derive(applyK) -trait CredentialBatchesRepository[F[_]] { - def getBatchState( - batchId: CredentialBatchId - ): F[Either[NodeError, Option[CredentialBatchState]]] - def getCredentialRevocationTime( - batchId: CredentialBatchId, - credentialHash: Sha256Digest - ): F[Either[NodeError, Option[LedgerData]]] -} - -object CredentialBatchesRepository { - def apply[F[_]: MonadCancelThrow: TimeMeasureMetric, R[_]: Functor]( - transactor: Transactor[F], - logs: Logs[R, F] - ): R[CredentialBatchesRepository[F]] = - for { - serviceLogs <- logs.service[CredentialBatchesRepository[F]] - } yield { - implicit val implicitLogs: ServiceLogging[F, CredentialBatchesRepository[F]] = serviceLogs - val metrics: CredentialBatchesRepository[Mid[F, *]] = - new CredentialBatchesRepositoryMetrics[F]() - val logs: CredentialBatchesRepository[Mid[F, *]] = - new CredentialBatchesRepositoryLogs[F] - val mid = metrics |+| logs - mid attach new CredentialBatchesRepositoryImpl[F](transactor) - } - - def resource[F[_]: MonadCancelThrow: TimeMeasureMetric, R[ - _ - ]: Applicative: Functor]( - transactor: Transactor[F], - logs: Logs[R, F] - ): Resource[R, CredentialBatchesRepository[F]] = - Resource.eval(CredentialBatchesRepository(transactor, logs)) - - def unsafe[F[_]: MonadCancelThrow: TimeMeasureMetric, R[_]: Comonad]( - transactor: Transactor[F], - logs: Logs[R, F] - ): CredentialBatchesRepository[F] = - CredentialBatchesRepository(transactor, logs).extract -} - -private final class CredentialBatchesRepositoryImpl[F[_]: MonadCancelThrow]( - xa: Transactor[F] -) extends CredentialBatchesRepository[F] { - - def getBatchState( - batchId: CredentialBatchId - ): F[Either[NodeError, Option[CredentialBatchState]]] = - EitherT - .right[NodeError](CredentialBatchesDAO.findBatch(batchId)) - .value - .logSQLErrorsV2("getting batch state") - .transact(xa) - - def getCredentialRevocationTime( - batchId: CredentialBatchId, - credentialHash: Sha256Digest - ): F[Either[NodeError, Option[LedgerData]]] = - EitherT - .right[NodeError]( - CredentialBatchesDAO - .findRevokedCredentialLedgerData(batchId, credentialHash) - ) - .value - .logSQLErrorsV2("getting credential revocation time") - .transact(xa) -} diff --git a/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/BaseDAO.scala b/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/BaseDAO.scala index 7973007798..d40a55f333 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/BaseDAO.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/BaseDAO.scala @@ -3,7 +3,6 @@ package io.iohk.atala.prism.node.repositories.daos import doobie.util.invariant.InvalidEnum import doobie.{Get, Meta, Put} import io.circe.Json -import io.iohk.atala.prism.credentials.CredentialBatchId import io.iohk.atala.prism.crypto.EC.{INSTANCE => EC} import io.iohk.atala.prism.crypto.keys.ECPublicKey import io.iohk.atala.prism.crypto.{Sha256Digest => SHA256Digest} @@ -63,14 +62,6 @@ trait BaseDAO { ): Meta[T] = { Meta[UUID].timap(builder.apply)(_.uuid) } - - implicit val credentialBatchId: Meta[CredentialBatchId] = - Meta[String].timap(x => - Option( - CredentialBatchId - .fromString(x) - ).getOrElse(throw new RuntimeException(s"Invalid batch id: $x")) - )(_.getId) } object BaseDAO extends BaseDAO diff --git a/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/CredentialBatchesDAO.scala b/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/CredentialBatchesDAO.scala deleted file mode 100644 index 45e3e15ddb..0000000000 --- a/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/CredentialBatchesDAO.scala +++ /dev/null @@ -1,125 +0,0 @@ -package io.iohk.atala.prism.node.repositories.daos - -import java.time.Instant -import cats.syntax.functor._ -import doobie.Update -import doobie.free.connection.ConnectionIO -import doobie.implicits._ -import doobie.implicits.legacy.instant._ -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.MerkleRoot -import io.iohk.atala.prism.crypto.Sha256Digest -import io.iohk.atala.prism.node.models._ -import io.iohk.atala.prism.node.models.nodeState.{CredentialBatchState, LedgerData} -import io.iohk.atala.prism.node.repositories.daos._ -import io.iohk.atala.prism.node.interop.implicits._ -import io.iohk.atala.prism.node.utils.syntax._ - -object CredentialBatchesDAO { - case class CreateCredentialBatchData( - batchId: CredentialBatchId, - lastOperation: Sha256Digest, - issuerDIDSuffix: DidSuffix, - merkleRoot: MerkleRoot, - ledgerData: LedgerData - ) - - def insert( - data: CreateCredentialBatchData - ): ConnectionIO[Unit] = { - val issuedOn = data.ledgerData.timestampInfo - sql""" - |INSERT INTO credential_batches (batch_id, last_operation, issuer_did_suffix, merkle_root, issued_on, issued_on_absn, issued_on_osn, ledger, issued_on_transaction_id) - |VALUES (${data.batchId}, ${data.lastOperation}, ${data.issuerDIDSuffix}, ${data.merkleRoot}, ${Instant - .ofEpochMilli(issuedOn.getAtalaBlockTimestamp)}, - | ${issuedOn.getAtalaBlockSequenceNumber}, ${issuedOn.getOperationSequenceNumber}, ${data.ledgerData.ledger}, ${data.ledgerData.transactionId}) - """.stripMargin.update.run.void - } - - def findBatch( - credentialBatchId: CredentialBatchId - ): ConnectionIO[Option[CredentialBatchState]] = { - sql""" - |SELECT batch_id, issuer_did_suffix, merkle_root, issued_on_transaction_id, ledger, - | issued_on, issued_on_absn, issued_on_osn, revoked_on_transaction_id, ledger, - | revoked_on, revoked_on_absn, revoked_on_osn, last_operation - |FROM credential_batches - |WHERE batch_id = ${credentialBatchId.getId} - """.stripMargin.query[CredentialBatchState].option - } - - def revokeEntireBatch( - credentialBatchId: CredentialBatchId, - ledgerData: LedgerData - ): ConnectionIO[Boolean] = { - val revocationTimestamp = ledgerData.timestampInfo - sql""" - |UPDATE credential_batches - |SET revoked_on = ${revocationTimestamp.getAtalaBlockTimestamp.toInstant}, - | revoked_on_absn = ${revocationTimestamp.getAtalaBlockSequenceNumber}, - | revoked_on_osn = ${revocationTimestamp.getOperationSequenceNumber}, - | revoked_on_transaction_id = ${ledgerData.transactionId} - |WHERE batch_id = ${credentialBatchId.getId} AND - | revoked_on IS NULL - """.stripMargin.update.run.map(_ > 0) - } - - def revokeCredentials( - credentialBatchId: CredentialBatchId, - credentials: List[Sha256Digest], - ledgerData: LedgerData - ): ConnectionIO[Unit] = { - val revocationTimestamp = ledgerData.timestampInfo - val sql = - """INSERT INTO revoked_credentials (batch_id, credential_id, revoked_on, revoked_on_absn, revoked_on_osn, ledger, transaction_id) - |VALUES (?, ?, ?, ?, ?, ?, ?) - |ON CONFLICT (batch_id, credential_id) DO NOTHING - |""".stripMargin - Update[ - ( - CredentialBatchId, - Sha256Digest, - Instant, - Int, - Int, - Ledger, - TransactionId - ) - ](sql) - .updateMany( - credentials.map(credentialHash => - ( - credentialBatchId, - credentialHash, - revocationTimestamp.getAtalaBlockTimestamp.toInstant, - revocationTimestamp.getAtalaBlockSequenceNumber, - revocationTimestamp.getOperationSequenceNumber, - ledgerData.ledger, - ledgerData.transactionId - ) - ) - ) - .void - } - - def findRevokedCredentialLedgerData( - batchId: CredentialBatchId, - credentialHash: Sha256Digest - ): ConnectionIO[Option[LedgerData]] = { - sql"""SELECT transaction_id, ledger, revoked_on, revoked_on_absn, revoked_on_osn - |FROM revoked_credentials - |WHERE batch_id = $batchId AND - | credential_id = $credentialHash::CREDENTIAL_HASH - |""".stripMargin.query[LedgerData].option - } - - // only for testing - def findRevokedCredentials( - batchId: CredentialBatchId - ): ConnectionIO[List[(Sha256Digest, LedgerData)]] = { - sql"""SELECT credential_id, transaction_id, ledger, revoked_on, revoked_on_absn, revoked_on_osn - |FROM revoked_credentials - |WHERE batch_id = $batchId - |""".stripMargin.query[(Sha256Digest, LedgerData)].to[List] - } -} diff --git a/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/package.scala b/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/package.scala index 2384ea413d..1b39c8cc26 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/package.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/repositories/daos/package.scala @@ -4,13 +4,11 @@ import cats.data.NonEmptyList import doobie._ import doobie.postgres.implicits._ import doobie.util.invariant.InvalidEnum -import io.iohk.atala.prism.credentials.CredentialBatchId import io.iohk.atala.prism.crypto.EC.{INSTANCE => EC} import io.iohk.atala.prism.crypto.ECConfig.{INSTANCE => ECConfig} import io.iohk.atala.prism.crypto.keys.ECPublicKey -import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256Digest} import io.iohk.atala.prism.node.models._ -import io.iohk.atala.prism.node.models.nodeState.{CredentialBatchState, DIDPublicKeyState, DIDServiceState, LedgerData} +import io.iohk.atala.prism.node.models.nodeState.{DIDPublicKeyState, DIDServiceState, LedgerData} import io.iohk.atala.prism.protos.models.TimestampInfo import io.iohk.atala.prism.node.utils.syntax._ @@ -62,13 +60,6 @@ package object daos extends BaseDAO { implicit val IdTypePut: Put[IdType] = Put[String].contramap(_.getValue) implicit val IdTypeGet: Get[IdType] = Get[String].map(IdType.apply) - implicit val credentialIdPut: Put[CredentialId] = Put[String].contramap(_.id) - implicit val credentialIdGet: Get[CredentialId] = - Get[String].map(CredentialId(_)) - - implicit val credentialBatchIdMeta: Meta[CredentialBatchId] = - Meta[String].timap(CredentialBatchId.fromString)(_.getId) - implicit val didPublicKeyWrite: Write[DIDPublicKeyState] = { Write[ ( @@ -344,76 +335,6 @@ package object daos extends BaseDAO { ) } - implicit val CredentialBatchStateRead: Read[CredentialBatchState] = { - Read[ - ( - String, - String, - Array[Byte], - Array[Byte], - String, - Instant, - Int, - Int, - Option[Array[Byte]], - Option[String], - Option[Instant], - Option[Int], - Option[Int], - Array[Byte] - ) - ].map { - case ( - batchId, - suffix, - root, - issTxId, - issLedger, - issABT, - issABSN, - issOSN, - revTxIdOp, - revLedgerOp, - revABTOp, - revABSNOp, - revOSNOp, - sha - ) => - val issuedOn = LedgerData( - TransactionId.from(issTxId).get, - Ledger.withNameInsensitive(issLedger), - new TimestampInfo(issABT.toEpochMilli, issABSN, issOSN) - ) - val revokedOn = { - (revTxIdOp, revLedgerOp, revABTOp, revABSNOp, revOSNOp) match { - case ( - Some(rTrId), - Some(rLedger), - Some(rAbt), - Some(rAbsn), - Some(rOsn) - ) => - Some( - LedgerData( - TransactionId.from(rTrId).get, - Ledger.withNameInsensitive(rLedger), - new TimestampInfo(rAbt.toEpochMilli, rAbsn, rOsn) - ) - ) - case _ => None - } - } - CredentialBatchState( - CredentialBatchId.fromString(batchId), - DidSuffix(suffix), - new MerkleRoot(Sha256Digest.fromBytes(root)), - issuedOn, - revokedOn, - Sha256Digest.fromBytes(sha) - ) - } - } - implicit val protocolVersionRead: Read[ProtocolVersion] = Read[(Int, Int)] .map { case (major, minor) => diff --git a/node/src/main/scala/io/iohk/atala/prism/node/repositories/logs/CredentialBatchesRepositoryLogs.scala b/node/src/main/scala/io/iohk/atala/prism/node/repositories/logs/CredentialBatchesRepositoryLogs.scala deleted file mode 100644 index 514056e202..0000000000 --- a/node/src/main/scala/io/iohk/atala/prism/node/repositories/logs/CredentialBatchesRepositoryLogs.scala +++ /dev/null @@ -1,55 +0,0 @@ -package io.iohk.atala.prism.node.repositories.logs - -import cats.syntax.apply._ -import cats.syntax.applicativeError._ -import cats.syntax.flatMap._ -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.Sha256Digest -import io.iohk.atala.prism.node.errors -import io.iohk.atala.prism.node.models.nodeState -import io.iohk.atala.prism.node.repositories.CredentialBatchesRepository -import io.iohk.atala.prism.node.logging.GeneralLoggableInstances._ -import tofu.higherKind.Mid -import tofu.logging.ServiceLogging -import tofu.syntax.logging._ -import cats.MonadThrow - -private[repositories] final class CredentialBatchesRepositoryLogs[F[ - _ -]: MonadThrow: ServiceLogging[*[ - _ -], CredentialBatchesRepository[F]]] - extends CredentialBatchesRepository[Mid[F, *]] { - override def getBatchState( - batchId: CredentialBatchId - ): Mid[F, Either[errors.NodeError, Option[nodeState.CredentialBatchState]]] = - in => - info"getting batch state $batchId" *> in - .flatTap( - _.fold( - err => error"Encountered an error while getting batch state $batchId: $err", - res => info"getting batch state $batchId - successfully done, state found - ${res.isDefined}" - ) - ) - .onError(errorCause"Encountered an error while getting batch state $batchId" (_)) - - override def getCredentialRevocationTime( - batchId: CredentialBatchId, - credentialHash: Sha256Digest - ): Mid[F, Either[errors.NodeError, Option[nodeState.LedgerData]]] = - in => - info"getting credential revocation time for $batchId" *> in - .flatTap( - _.fold( - err => error"Encountered an error while getting credential revocation time: $err", - res => - info"getting credential revocation time for $batchId - successfully done ${res - .map(_.transactionId)}" - ) - ) - .onError( - errorCause"Encountered an error while getting credential revocation time for $batchId" ( - _ - ) - ) -} diff --git a/node/src/main/scala/io/iohk/atala/prism/node/repositories/metrics/CredentialBatchesRepositoryMetrics.scala b/node/src/main/scala/io/iohk/atala/prism/node/repositories/metrics/CredentialBatchesRepositoryMetrics.scala deleted file mode 100644 index cdf3f6f3e2..0000000000 --- a/node/src/main/scala/io/iohk/atala/prism/node/repositories/metrics/CredentialBatchesRepositoryMetrics.scala +++ /dev/null @@ -1,33 +0,0 @@ -package io.iohk.atala.prism.node.repositories.metrics - -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.Sha256Digest -import io.iohk.atala.prism.node.metrics.TimeMeasureUtil.MeasureOps -import io.iohk.atala.prism.node.metrics.{TimeMeasureMetric, TimeMeasureUtil} -import io.iohk.atala.prism.node.errors.NodeError -import io.iohk.atala.prism.node.models.nodeState.{CredentialBatchState, LedgerData} -import io.iohk.atala.prism.node.repositories.CredentialBatchesRepository -import tofu.higherKind.Mid -import cats.effect.MonadCancelThrow - -private[repositories] final class CredentialBatchesRepositoryMetrics[F[ - _ -]: TimeMeasureMetric: MonadCancelThrow] - extends CredentialBatchesRepository[Mid[F, *]] { - - private val repoName = "CredentialBatchesRepository" - private lazy val getBatchStateTimer = - TimeMeasureUtil.createDBQueryTimer(repoName, "getBatchState") - private lazy val getCredentialRevocationTimeTimer = - TimeMeasureUtil.createDBQueryTimer(repoName, "getCredentialRevocationTime") - - override def getBatchState( - batchId: CredentialBatchId - ): Mid[F, Either[NodeError, Option[CredentialBatchState]]] = - _.measureOperationTime(getBatchStateTimer) - override def getCredentialRevocationTime( - batchId: CredentialBatchId, - credentialHash: Sha256Digest - ): Mid[F, Either[NodeError, Option[LedgerData]]] = - _.measureOperationTime(getCredentialRevocationTimeTimer) -} diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/BlockProcessingService.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/BlockProcessingService.scala index 2e594f8dde..1443f5c560 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/BlockProcessingService.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/BlockProcessingService.scala @@ -16,7 +16,7 @@ import io.iohk.atala.prism.node.models.nodeState.LedgerData import io.iohk.atala.prism.node.operations._ import io.iohk.atala.prism.node.repositories.daos.AtalaOperationsDAO import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation -import io.iohk.atala.prism.protos.{node_internal, node_models} +import io.iohk.atala.prism.protos.node_models import org.slf4j.LoggerFactory import scala.util.chaining._ import scala.util.control.NonFatal @@ -27,7 +27,7 @@ trait BlockProcessingService { // Iterates over transactions in the Cardano block, retrieves operations from transaction metadata, // applies every operation to the Node state (e.g. update DID Document stored in the database) def processBlock( - block: node_internal.AtalaBlock, + block: node_models.AtalaBlock, transactionId: TransactionId, ledger: Ledger, blockTimestamp: Instant, @@ -41,7 +41,7 @@ class BlockProcessingServiceImpl(applyOperationConfig: ApplyOperationConfig) ext // ConnectionIO[Boolean] is a temporary type used to be able to unit tests this // it eventually will be replaced with ConnectionIO[Unit] override def processBlock( - block: node_internal.AtalaBlock, + block: node_models.AtalaBlock, transactionId: TransactionId, ledger: Ledger, blockTimestamp: Instant, diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/CardanoLedgerService.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/CardanoLedgerService.scala index dc0f916962..032f323549 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/CardanoLedgerService.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/CardanoLedgerService.scala @@ -19,7 +19,7 @@ import io.iohk.atala.prism.node.services.CardanoLedgerService.{CardanoBlockHandl import io.iohk.atala.prism.node.services.logs.UnderlyingLedgerLogs import io.iohk.atala.prism.node.services.models.{AtalaObjectNotification, AtalaObjectNotificationHandler} import io.iohk.atala.prism.node.{PublicationInfo, UnderlyingLedger} -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import tofu.higherKind.Mid import tofu.lift.Lift import tofu.logging.{Logs, ServiceLogging} @@ -86,7 +86,7 @@ class CardanoLedgerService[F[_]] private[services] ( /** Publishes AtalaObject containing a list of operations inside. */ override def publish( - obj: node_internal.AtalaObject + obj: node_models.AtalaObject ): F[Either[CardanoWalletError, PublicationInfo]] = { val metadata = AtalaObjectMetadata.toTransactionMetadata(obj) diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/InMemoryLedgerService.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/InMemoryLedgerService.scala index 4e9b02c3f6..75280167f6 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/InMemoryLedgerService.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/InMemoryLedgerService.scala @@ -10,7 +10,7 @@ import io.iohk.atala.prism.node.models.Balance import io.iohk.atala.prism.node.services.logs.UnderlyingLedgerLogs import io.iohk.atala.prism.node.services.models.{AtalaObjectNotification, AtalaObjectNotificationHandler} import io.iohk.atala.prism.node.{PublicationInfo, UnderlyingLedger} -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import tofu.higherKind.Mid import tofu.logging.{Logs, ServiceLogging} @@ -25,7 +25,7 @@ private final class InMemoryLedgerService[F[_]: MonadThrow]( override def getType: Ledger = Ledger.InMemory override def publish( - obj: node_internal.AtalaObject + obj: node_models.AtalaObject ): F[Either[CardanoWalletError, PublicationInfo]] = { val publcationInfoF = for { objectBytes <- obj.toByteArray.pure[F] diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/NodeService.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/NodeService.scala index 90e972b0e3..5804008b5d 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/NodeService.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/NodeService.scala @@ -6,19 +6,19 @@ import cats.{Applicative, Comonad, Functor, MonadThrow} import com.google.protobuf.ByteString import derevo.derive import derevo.tagless.applyK -import io.iohk.atala.prism.credentials.CredentialBatchId import io.iohk.atala.prism.crypto.Sha256Digest import io.iohk.atala.prism.identity.{CanonicalPrismDid, PrismDid} import io.iohk.atala.prism.node.models.AtalaOperationId import io.iohk.atala.prism.node.errors.NodeError import io.iohk.atala.prism.node.grpc.ProtoCodecs -import io.iohk.atala.prism.node.models.nodeState.{CredentialBatchState, DIDDataState, LedgerData} +import io.iohk.atala.prism.node.models.nodeState.DIDDataState import io.iohk.atala.prism.node.models.{AtalaOperationInfo, ProtocolVersion} -import io.iohk.atala.prism.node.repositories.{CredentialBatchesRepository, DIDDataRepository} +import io.iohk.atala.prism.node.repositories.DIDDataRepository import io.iohk.atala.prism.node.services.logs.NodeServiceLogging import io.iohk.atala.prism.node.services.models.{getOperationOutput, validateScheduleOperationsRequest} import io.iohk.atala.prism.protos.node_models -import io.iohk.atala.prism.protos.node_models.{DIDData, OperationOutput, SignedAtalaOperation} +import io.iohk.atala.prism.protos.node_models.{DIDData, SignedAtalaOperation} +import io.iohk.atala.prism.protos.node_api.OperationOutput import tofu.higherKind.Mid import tofu.logging.derivation.loggable import tofu.logging.{Logs, ServiceLogging} @@ -38,26 +38,6 @@ trait NodeService[F[_]] { */ def getDidDocumentByDid(didStr: String): F[Either[GettingDidError, DidDocument]] - /** Get information about credentials batch identified by `batchId`. See `BatchData` for the details. - * - * @param batchId - * identifier of the credentials batch. - */ - def getBatchState(batchId: String): F[Either[NodeError, BatchData]] - - /** Retrieves information on credential revocation. - * - * @param batchId - * batch containing the credential. - * @param credentialHash - * hash represents the credential inside the batch. - * @return - */ - def getCredentialRevocationData( - batchId: String, - credentialHash: ByteString - ): F[Either[NodeError, CredentialRevocationTime]] - /** Schedules a list of operations for further publication to the underlying ledger. * * @param ops @@ -89,8 +69,7 @@ trait NodeService[F[_]] { private final class NodeServiceImpl[F[_]: MonadThrow]( didDataRepository: DIDDataRepository[F], - objectManagement: ObjectManagementService[F], - credentialBatchesRepository: CredentialBatchesRepository[F] + objectManagement: ObjectManagementService[F] ) extends NodeService[F] { override def getDidDocumentByDid(didStr: String): F[Either[GettingDidError, DidDocument]] = Try(PrismDid.canonicalFromString(didStr)).fold( @@ -114,56 +93,6 @@ private final class NodeServiceImpl[F[_]: MonadThrow]( private def toDidDataProto(in: Option[DIDDataState], canon: CanonicalPrismDid): Option[(DIDData, Sha256Digest)] = in.map(didDataState => (ProtoCodecs.toDIDDataProto(canon.getSuffix, didDataState), didDataState.lastOperation)) - override def getBatchState(batchIdStr: String): F[Either[NodeError, BatchData]] = { - // NOTE: CredentialBatchId.fromString returns null and doesn't throw an error when string wasn't successfully parsed - Option(CredentialBatchId.fromString(batchIdStr)) - .fold( - Applicative[F].pure((NodeError.InvalidArgument(s"Invalid batch id: $batchIdStr"): NodeError).asLeft[BatchData]) - )(batchId => getBatchState(batchId)) - } - - private def getBatchState(batchId: CredentialBatchId): F[Either[NodeError, BatchData]] = - for { - lastSyncedTimestamp <- objectManagement.getLastSyncedTimestamp - maybeBatchStateE <- credentialBatchesRepository.getBatchState(batchId) - batchData = maybeBatchStateE.map(BatchData(_, lastSyncedTimestamp)) - } yield batchData - - override def getCredentialRevocationData( - batchIdStr: String, - credentialHashBS: ByteString - ): F[Either[NodeError, CredentialRevocationTime]] = { - // NOTE: CredentialBatchId.fromString returns null and doesn't throw an error when string wasn't successfully parsed - Option(CredentialBatchId.fromString(batchIdStr)) - .fold( - Applicative[F].pure( - (NodeError.InvalidArgument(s"Invalid batch id: $batchIdStr"): NodeError).asLeft[CredentialRevocationTime] - ) - )(batchId => - Try(Sha256Digest.fromBytes(credentialHashBS.toByteArray)).fold( - _ => - Applicative[F].pure( - NodeError - .InvalidArgument( - s"The given byte array does not correspond to a SHA256 hash. It must have exactly 32 bytes: ${credentialHashBS.toByteArray.map("%02X" format _).mkString}" - ) - .asLeft - ), - credentialHash => getCredentialRevocationData(batchId, credentialHash) - ) - ) - } - - private def getCredentialRevocationData( - batchId: CredentialBatchId, - credentialHash: Sha256Digest - ): F[Either[NodeError, CredentialRevocationTime]] = - for { - lastSyncedTimestamp <- objectManagement.getLastSyncedTimestamp - maybeTime <- credentialBatchesRepository.getCredentialRevocationTime(batchId, credentialHash) - credentialRevocationTime = maybeTime.map(CredentialRevocationTime(_, lastSyncedTimestamp)) - } yield credentialRevocationTime - override def parseOperations(ops: Seq[SignedAtalaOperation]): F[Either[NodeError, List[OperationOutput]]] = Applicative[F].pure { for { @@ -210,7 +139,6 @@ object NodeService { def make[I[_]: Functor, F[_]: MonadThrow]( didDataRepository: DIDDataRepository[F], objectManagement: ObjectManagementService[F], - credentialBatchesRepository: CredentialBatchesRepository[F], logs: Logs[I, F] ): I[NodeService[F]] = { for { @@ -221,8 +149,7 @@ object NodeService { val mid: NodeService[Mid[F, *]] = logs mid attach new NodeServiceImpl[F]( didDataRepository, - objectManagement, - credentialBatchesRepository + objectManagement ) } } @@ -230,31 +157,24 @@ object NodeService { def resource[I[_]: Comonad, F[_]: MonadThrow]( didDataRepository: DIDDataRepository[F], objectManagement: ObjectManagementService[F], - credentialBatchesRepository: CredentialBatchesRepository[F], logs: Logs[I, F] ): Resource[I, NodeService[F]] = Resource.eval( - make(didDataRepository, objectManagement, credentialBatchesRepository, logs) + make(didDataRepository, objectManagement, logs) ) def unsafe[I[_]: Comonad, F[_]: MonadThrow]( didDataRepository: DIDDataRepository[F], objectManagement: ObjectManagementService[F], - credentialBatchesRepository: CredentialBatchesRepository[F], logs: Logs[I, F] ): NodeService[F] = make( didDataRepository, objectManagement, - credentialBatchesRepository, logs ).extract } -final case class BatchData(maybeBatchState: Option[CredentialBatchState], lastSyncedTimestamp: Instant) - -final case class CredentialRevocationTime(maybeLedgerData: Option[LedgerData], lastSyncedTimestamp: Instant) - final case class DidDocument( maybeData: Option[DIDData], maybeOperation: Option[Sha256Digest], diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/ObjectManagementService.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/ObjectManagementService.scala index ade1bee0c2..d56faed1d7 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/ObjectManagementService.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/ObjectManagementService.scala @@ -32,7 +32,7 @@ import io.iohk.atala.prism.node.services.ObjectManagementService.SaveObjectError import io.iohk.atala.prism.node.services.logs.ObjectManagementServiceLogs import io.iohk.atala.prism.node.services.models.AtalaObjectNotification import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation -import io.iohk.atala.prism.protos.{node_internal, node_models} +import io.iohk.atala.prism.protos.node_models import io.iohk.atala.prism.node.utils.syntax.DBConnectionOps import tofu.higherKind.Mid import tofu.logging.derivation.loggable @@ -263,7 +263,7 @@ private final class ObjectManagementServiceImpl[F[_]: MonadCancelThrow]( // Deserialize object protobufObject <- Either - .fromTry(node_internal.AtalaObject.validate(obj.byteContent)) + .fromTry(node_models.AtalaObject.validate(obj.byteContent)) .leftMap(err => SaveObjectError(err.getMessage)) block = protobufObject.blockContent.get // Retrieve transaction info (transaction identifier, name of the ledger) @@ -317,9 +317,9 @@ object ObjectManagementService { def createAtalaObject( ops: List[SignedAtalaOperation] - ): node_internal.AtalaObject = { - val block = node_internal.AtalaBlock(ops) - node_internal + ): node_models.AtalaObject = { + val block = node_models.AtalaBlock(ops) + node_models .AtalaObject() .withBlockContent(block) } diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/StatisticsService.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/StatisticsService.scala index f500d04674..6c5ee1310b 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/StatisticsService.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/StatisticsService.scala @@ -10,18 +10,12 @@ import io.iohk.atala.prism.node.errors import io.iohk.atala.prism.node.metrics.StatisticsCounters import io.iohk.atala.prism.node.metrics.StatisticsCounters.MetricCounter.{ NumberOfAppliedTransactions, - NumberOfCredentialsRevoked, - NumberOfIssuedCredentialBatches, NumberOfPendingOperations, NumberOfPublishedDids, NumberOfRejectedTransactions } import io.iohk.atala.prism.node.models.AtalaOperationStatus -import io.iohk.atala.prism.node.operations.{ - CreateDIDOperation, - IssueCredentialBatchOperation, - RevokeCredentialsOperation -} +import io.iohk.atala.prism.node.operations.CreateDIDOperation import io.iohk.atala.prism.node.repositories.{AtalaOperationsRepository, MetricsCountersRepository} import io.iohk.atala.prism.node.services.logs.StatisticsServiceLogs import tofu.higherKind.Mid @@ -53,10 +47,6 @@ private final class StatisticsServiceImpl[F[_]: Applicative]( getAtalaOperationsCountByStatus(AtalaOperationStatus.RECEIVED) case NumberOfPublishedDids => metricsCountersRepository.getCounter(CreateDIDOperation.metricCounterName).map(Right(_)) - case NumberOfIssuedCredentialBatches => - metricsCountersRepository.getCounter(IssueCredentialBatchOperation.metricCounterName).map(Right(_)) - case NumberOfCredentialsRevoked => - metricsCountersRepository.getCounter(RevokeCredentialsOperation.metricCounterName).map(Right(_)) case NumberOfAppliedTransactions => getAtalaOperationsCountByStatus(AtalaOperationStatus.APPLIED) case NumberOfRejectedTransactions => diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/SubmissionService.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/SubmissionService.scala index 1205865a5f..ca5b441942 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/SubmissionService.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/SubmissionService.scala @@ -26,7 +26,7 @@ import io.iohk.atala.prism.node.repositories.{AtalaObjectsTransactionsRepository import io.iohk.atala.prism.node.services.SubmissionService.Config import io.iohk.atala.prism.node.services.logs.SubmissionServiceLogs import io.iohk.atala.prism.node.services.models.RefreshTransactionStatusesResult -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import tofu.higherKind.Mid import tofu.logging.{Logs, ServiceLogging} @@ -191,7 +191,7 @@ private class SubmissionServiceImpl[F[_]: Monad]( private def publishObjectsAndRecordTransaction( atalaObjectsWithParsedContent: List[ - (AtalaObjectInfo, node_internal.AtalaObject) + (AtalaObjectInfo, node_models.AtalaObject) ] ): F[List[TransactionInfo]] = { def justKeep( @@ -318,7 +318,7 @@ private class SubmissionServiceImpl[F[_]: Monad]( private def parseObjectContent( atalaObjectInfo: AtalaObjectInfo - ): node_internal.AtalaObject = + ): node_models.AtalaObject = atalaObjectInfo.getAndValidateAtalaObject.getOrElse { throw new RuntimeException( s"Can't extract AtalaObject content for objectId=${atalaObjectInfo.objectId}" @@ -327,7 +327,7 @@ private class SubmissionServiceImpl[F[_]: Monad]( private def publishAndRecordTransaction( atalaObjectInfo: AtalaObjectInfo, - atalaObject: node_internal.AtalaObject + atalaObject: node_models.AtalaObject ): F[Either[NodeError, TransactionInfo]] = { val publicationEitherT = for { // Publish object to the blockchain diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/logs/NodeServiceLogging.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/logs/NodeServiceLogging.scala index 322a254aad..77c1f4fe92 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/logs/NodeServiceLogging.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/logs/NodeServiceLogging.scala @@ -11,7 +11,8 @@ import io.iohk.atala.prism.node.errors import io.iohk.atala.prism.node.errors.NodeError import io.iohk.atala.prism.node.models.ProtocolVersion import io.iohk.atala.prism.node.services._ -import io.iohk.atala.prism.protos.node_models.{OperationOutput, SignedAtalaOperation} +import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation +import io.iohk.atala.prism.protos.node_api.OperationOutput import tofu.higherKind.Mid import tofu.logging.ServiceLogging import tofu.syntax.logging._ @@ -31,31 +32,6 @@ class NodeServiceLogging[F[_]: ServiceLogging[*[_], NodeService[F]]: MonadThrow] .onError(errorCause"encountered an error while $description" (_)) } - override def getBatchState(batchId: String): Mid[F, Either[errors.NodeError, BatchData]] = in => - info"getting batch state $batchId" *> in - .flatTap( - _.fold( - err => error"encountered an error while getting batch $batchId state: $err", - _ => info"getting batch $batchId state - successfully done" - ) - ) - .onError(errorCause"encountered an error while getting batch state" (_)) - - override def getCredentialRevocationData( - batchIdStr: String, - credentialHashBS: ByteString - ): Mid[F, Either[errors.NodeError, CredentialRevocationTime]] = { in => - val credentialHashHex = credentialHashBS.toByteArray.map("%02X" format _).mkString - info"getting credential revocation data [batchId=$batchIdStr, credentialHash=$credentialHashHex]" *> in - .flatTap( - _.fold( - err => error"encountered an error while getting credential revocation data for $credentialHashHex: $err", - _ => info"getting credential revocation data for $credentialHashHex - successfully done" - ) - ) - .onError(errorCause"encountered an error while getting credential revocation data " (_)) - } - override def scheduleAtalaOperations( ops: SignedAtalaOperation* ): Mid[F, List[Either[errors.NodeError, AtalaOperationId]]] = in => diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/logs/UnderlyingLedgerLogs.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/logs/UnderlyingLedgerLogs.scala index 67267019ea..7c2c18f84d 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/logs/UnderlyingLedgerLogs.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/logs/UnderlyingLedgerLogs.scala @@ -6,7 +6,7 @@ import cats.syntax.flatMap._ import io.iohk.atala.prism.node.models.{AtalaOperationId, Ledger, TransactionDetails, TransactionId} import io.iohk.atala.prism.node.cardano.models.CardanoWalletError import io.iohk.atala.prism.node.{PublicationInfo, UnderlyingLedger} -import io.iohk.atala.prism.protos.node_internal.AtalaObject +import io.iohk.atala.prism.protos.node_models.AtalaObject import tofu.higherKind.Mid import tofu.logging.ServiceLogging import tofu.syntax.logging._ diff --git a/node/src/main/scala/io/iohk/atala/prism/node/services/models/package.scala b/node/src/main/scala/io/iohk/atala/prism/node/services/models/package.scala index c43621e5bd..06bc64ee52 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/services/models/package.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/services/models/package.scala @@ -7,20 +7,20 @@ import io.iohk.atala.prism.node.errors.NodeError import io.iohk.atala.prism.node.operations.{ CreateDIDOperation, DeactivateDIDOperation, - IssueCredentialBatchOperation, ProtocolVersionUpdateOperation, - RevokeCredentialsOperation, UpdateDIDOperation, ValidationError, parseOperationWithMockedLedger } -import io.iohk.atala.prism.protos.{node_internal, node_models} -import io.iohk.atala.prism.protos.node_models.{OperationOutput, SignedAtalaOperation} +import io.iohk.atala.prism.protos.node_models +import io.iohk.atala.prism.protos.node_api +import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation +import io.iohk.atala.prism.protos.node_api.OperationOutput import tofu.logging.derivation.loggable package object models { case class AtalaObjectNotification( - atalaObject: node_internal.AtalaObject, + atalaObject: node_models.AtalaObject, transaction: TransactionInfo ) @@ -41,37 +41,25 @@ package object models { case CreateDIDOperation(id, _, _, _, _, _) => OperationOutput( OperationOutput.Result.CreateDidOutput( - node_models.CreateDIDOutput(id.getValue) + node_api.CreateDIDOutput(id.getValue) ) ) case UpdateDIDOperation(_, _, _, _, _) => OperationOutput( OperationOutput.Result.UpdateDidOutput( - node_models.UpdateDIDOutput() - ) - ) - case IssueCredentialBatchOperation(credentialBatchId, _, _, _, _) => - OperationOutput( - OperationOutput.Result.BatchOutput( - node_models.IssueCredentialBatchOutput(credentialBatchId.getId) - ) - ) - case RevokeCredentialsOperation(_, _, _, _, _) => - OperationOutput( - OperationOutput.Result.RevokeCredentialsOutput( - node_models.RevokeCredentialsOutput() + node_api.UpdateDIDOutput() ) ) case ProtocolVersionUpdateOperation(_, _, _, _, _, _) => OperationOutput( OperationOutput.Result.ProtocolVersionUpdateOutput( - node_models.ProtocolVersionUpdateOutput() + node_api.ProtocolVersionUpdateOutput() ) ) case DeactivateDIDOperation(_, _, _, _) => OperationOutput( OperationOutput.Result.DeactivateDidOutput( - node_models.DeactivateDIDOutput() + node_api.DeactivateDIDOutput() ) ) case other => diff --git a/node/src/main/scala/io/iohk/atala/prism/node/utils/GrpcUtils.scala b/node/src/main/scala/io/iohk/atala/prism/node/utils/GrpcUtils.scala index c6a157fa8a..497418419a 100644 --- a/node/src/main/scala/io/iohk/atala/prism/node/utils/GrpcUtils.scala +++ b/node/src/main/scala/io/iohk/atala/prism/node/utils/GrpcUtils.scala @@ -11,7 +11,7 @@ import java.io.File import scala.concurrent.blocking import io.grpc.stub.AbstractStub import io.iohk.atala.prism.protos.node_api.ScheduleOperationsResponse -import io.iohk.atala.prism.protos.node_models.OperationOutput +import io.iohk.atala.prism.protos.node_api.OperationOutput object GrpcUtils { diff --git a/node/src/test/scala/io/iohk/atala/prism/node/DataPreparation.scala b/node/src/test/scala/io/iohk/atala/prism/node/DataPreparation.scala index ce34e740bf..ab230efc5e 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/DataPreparation.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/DataPreparation.scala @@ -6,8 +6,7 @@ import cats.implicits._ import com.google.protobuf.ByteString import doobie.implicits._ import doobie.util.transactor.Transactor -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256, Sha256Digest} +import io.iohk.atala.prism.crypto.Sha256 import io.iohk.atala.prism.node.logging.TraceId import io.iohk.atala.prism.node.logging.TraceId.IOWithTraceIdContext import io.iohk.atala.prism.node.cardano.{LAST_SYNCED_BLOCK_NO, LAST_SYNCED_BLOCK_TIMESTAMP} @@ -18,13 +17,11 @@ import io.iohk.atala.prism.node.models._ import io.iohk.atala.prism.node.operations.ApplyOperationConfig import io.iohk.atala.prism.node.operations.CreateDIDOperationSpec.{issuingEcKeyData, masterEcKeyData} import io.iohk.atala.prism.node.repositories.daos.AtalaObjectsDAO.AtalaObjectCreateData -import io.iohk.atala.prism.node.repositories.daos.CredentialBatchesDAO.CreateCredentialBatchData import io.iohk.atala.prism.node.repositories.daos.{ AtalaObjectTransactionSubmissionsDAO, AtalaObjectsDAO, AtalaOperationsDAO, ContextDAO, - CredentialBatchesDAO, DIDDataDAO, KeyValuesDAO, PublicKeysDAO, @@ -33,7 +30,7 @@ import io.iohk.atala.prism.node.repositories.daos.{ import io.iohk.atala.prism.node.services.{BlockProcessingServiceSpec, ObjectManagementService, SubmissionService} import io.iohk.atala.prism.protos.models.TimestampInfo import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation -import io.iohk.atala.prism.protos.{node_api, node_internal, node_models} +import io.iohk.atala.prism.protos.{node_api, node_models} import org.scalatest.OptionValues._ import java.time.Instant @@ -207,77 +204,26 @@ object DataPreparation { .unsafeRunSync() } - // *************************************** - // Credential batches (slayer 0.3) - // *************************************** - - def createBatch( - batchId: CredentialBatchId, - lastOperation: Sha256Digest, - issuerDIDSuffix: DidSuffix, - merkleRoot: MerkleRoot, - issuedOn: LedgerData - )(implicit database: Transactor[IO]): Unit = { - CredentialBatchesDAO - .insert( - CreateCredentialBatchData( - batchId = batchId, - lastOperation = lastOperation, - issuerDIDSuffix = issuerDIDSuffix, - merkleRoot = merkleRoot, - ledgerData = issuedOn - ) - ) - .transact(database) - .unsafeRunSync() - } - - def revokeCredentialBatch( - batchId: CredentialBatchId, - revocationLedgerData: LedgerData - )(implicit database: Transactor[IO]): Unit = { - CredentialBatchesDAO - .revokeEntireBatch(batchId, revocationLedgerData) - .transact(database) - .unsafeRunSync() - () - } - - def revokeCredentials( - batchId: CredentialBatchId, - credentialHashes: List[Sha256Digest], - revocationLedgerData: LedgerData - )(implicit database: Transactor[IO]): Unit = { - CredentialBatchesDAO - .revokeCredentials( - batchId, - credentialHashes, - revocationLedgerData - ) - .transact(database) - .unsafeRunSync() - } - // *************************************** // Other useful methods // *************************************** def createBlock( signedOperation: node_models.SignedAtalaOperation = BlockProcessingServiceSpec.signedCreateDidOperation - ): node_internal.AtalaBlock = { - node_internal.AtalaBlock(operations = Seq(signedOperation)) + ): node_models.AtalaBlock = { + node_models.AtalaBlock(operations = Seq(signedOperation)) } def createBlock( signedOperations: List[node_models.SignedAtalaOperation] - ): node_internal.AtalaBlock = { - node_internal.AtalaBlock(operations = signedOperations) + ): node_models.AtalaBlock = { + node_models.AtalaBlock(operations = signedOperations) } def createAtalaObject( - block: node_internal.AtalaBlock = createBlock() - ): node_internal.AtalaObject = - node_internal + block: node_models.AtalaBlock = createBlock() + ): node_models.AtalaObject = + node_models .AtalaObject() .withBlockContent(block) def setAtalaObjectTransactionSubmissionStatus( @@ -316,8 +262,8 @@ object DataPreparation { atalaOperations: List[SignedAtalaOperation], status: AtalaOperationStatus )(implicit xa: Transactor[IO]): (AtalaObjectId, List[AtalaOperationId]) = { - val block = node_internal.AtalaBlock(atalaOperations) - val obj = node_internal + val block = node_models.AtalaBlock(atalaOperations) + val obj = node_models .AtalaObject() .withBlockContent(block) val objBytes = obj.toByteArray diff --git a/node/src/test/scala/io/iohk/atala/prism/node/NodeExplorerServiceSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/NodeExplorerServiceSpec.scala index b2274523a5..f46cf0e029 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/NodeExplorerServiceSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/NodeExplorerServiceSpec.scala @@ -7,7 +7,6 @@ import com.google.protobuf.ByteString import io.grpc.inprocess.{InProcessChannelBuilder, InProcessServerBuilder} import io.grpc.stub.MetadataUtils import io.grpc.{ManagedChannel, Server, StatusRuntimeException} -import io.iohk.atala.prism.node.AtalaWithPostgresSpec import io.iohk.atala.prism.node.auth.WhitelistedAuthenticatorF import io.iohk.atala.prism.node.auth.grpc.GrpcAuthenticatorInterceptor import io.iohk.atala.prism.crypto.EC.{INSTANCE => EC} @@ -33,13 +32,12 @@ import io.iohk.atala.prism.node.services.{ import io.iohk.atala.prism.node.nonce.{ClientHelper, RequestAuthenticator} import io.iohk.atala.prism.protos.node_api.GetScheduledOperationsRequest.OperationType.{ AnyOperationType, - CreateDidOperationOperationType, - IssueCredentialBatchOperationType + CreateDidOperationOperationType } import io.iohk.atala.prism.protos.node_api.NodeExplorerServiceGrpc.NodeExplorerServiceBlockingClient import io.iohk.atala.prism.protos.node_api._ import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation -import io.iohk.atala.prism.protos.{node_api, node_internal, node_models} +import io.iohk.atala.prism.protos.{node_api, node_models} import io.iohk.atala.prism.node.utils.IOUtils.ioComonad import org.mockito.scalatest.{MockitoSugar, ResetMocksAfterEachTest} import org.scalatest.BeforeAndAfterEach @@ -176,9 +174,9 @@ class NodeExplorerServiceSpec def sign(op: node_models.AtalaOperation): SignedAtalaOperation = BlockProcessingServiceSpec.signOperation(op, "master", CreateDIDOperationSpec.masterKeys.getPrivateKey) - def toAtalaObject(ops: List[node_models.AtalaOperation]): node_internal.AtalaObject = { - val block = node_internal.AtalaBlock(ops.map(sign)) - node_internal.AtalaObject( + def toAtalaObject(ops: List[node_models.AtalaOperation]): node_models.AtalaObject = { + val block = node_models.AtalaBlock(ops.map(sign)) + node_models.AtalaObject( blockContent = Some(block) ) } @@ -196,7 +194,6 @@ class NodeExplorerServiceSpec val ops2 = List[node_models.AtalaOperation]( UpdateDIDOperationSpec.exampleRemoveOperation, - IssueCredentialBatchOperationSpec.exampleOperation, CreateDIDOperationSpec.exampleOperationWithCompressedKeys ) @@ -205,14 +202,11 @@ class NodeExplorerServiceSpec CreateDIDOperationSpec.exampleOperation, UpdateDIDOperationSpec.exampleAddAndRemoveOperation, UpdateDIDOperationSpec.exampleRemoveOperation, - IssueCredentialBatchOperationSpec.exampleOperation, CreateDIDOperationSpec.exampleOperationWithCompressedKeys ) val opsCreation: List[node_models.AtalaOperation] = List(CreateDIDOperationSpec.exampleOperation, CreateDIDOperationSpec.exampleOperationWithCompressedKeys) - val opsIssuance: List[node_models.AtalaOperation] = List(IssueCredentialBatchOperationSpec.exampleOperation) - val obj1 = toAtalaObject(ops1) val obj2 = toAtalaObject(ops2) @@ -229,12 +223,9 @@ class NodeExplorerServiceSpec withNonce(service).getScheduledOperations(GetScheduledOperationsRequest(AnyOperationType)) val responseCreation = withNonce(service).getScheduledOperations(GetScheduledOperationsRequest(CreateDidOperationOperationType)) - val responseIssuance = - withNonce(service).getScheduledOperations(GetScheduledOperationsRequest(IssueCredentialBatchOperationType)) responseAny.scheduledOperations.map(_.operation.get) must be(allOps) responseCreation.scheduledOperations.map(_.operation.get) must be(opsCreation) - responseIssuance.scheduledOperations.map(_.operation.get) must be(opsIssuance) } } } diff --git a/node/src/test/scala/io/iohk/atala/prism/node/NodeServiceSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/NodeServiceSpec.scala index 20d4702003..773090166c 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/NodeServiceSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/NodeServiceSpec.scala @@ -8,9 +8,7 @@ import com.google.protobuf.ByteString import doobie.implicits._ import io.grpc.inprocess.{InProcessChannelBuilder, InProcessServerBuilder} import io.grpc.{ManagedChannel, Server, Status, StatusRuntimeException} -import io.iohk.atala.prism.node.AtalaWithPostgresSpec -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256} +import io.iohk.atala.prism.crypto.Sha256 import io.iohk.atala.prism.identity.{PrismDid => DID} import io.iohk.atala.prism.node.logging.TraceId import io.iohk.atala.prism.node.logging.TraceId.IOWithTraceIdContext @@ -18,15 +16,15 @@ import io.iohk.atala.prism.node.models.{AtalaOperationId, DidSuffix, Ledger, Tra import io.iohk.atala.prism.node.errors.NodeError import io.iohk.atala.prism.node.grpc.ProtoCodecs import io.iohk.atala.prism.node.models._ -import io.iohk.atala.prism.node.models.nodeState.{CredentialBatchState, LedgerData} +import io.iohk.atala.prism.node.models.nodeState.LedgerData import io.iohk.atala.prism.node.operations._ import io.iohk.atala.prism.node.operations.path.{Path, ValueAtPath} import io.iohk.atala.prism.node.repositories.daos.{DIDDataDAO, PublicKeysDAO} -import io.iohk.atala.prism.node.repositories.{CredentialBatchesRepository, DIDDataRepository} +import io.iohk.atala.prism.node.repositories.DIDDataRepository import io.iohk.atala.prism.node.services.{BlockProcessingServiceSpec, NodeService, ObjectManagementService} import io.iohk.atala.prism.protos.models.TimestampInfo import io.iohk.atala.prism.protos.node_api._ -import io.iohk.atala.prism.protos.node_models.OperationOutput +import io.iohk.atala.prism.protos.node_api.OperationOutput import io.iohk.atala.prism.protos.{common_models, node_api, node_models} import io.iohk.atala.prism.node.utils.IOUtils._ import io.iohk.atala.prism.node.utils.syntax._ @@ -51,8 +49,6 @@ class NodeServiceSpec private val logs = Logs.withContext[IO, IOWithTraceIdContext] private val objectManagementService = mock[ObjectManagementService[IOWithTraceIdContext]] - private val credentialBatchesRepository = - mock[CredentialBatchesRepository[IOWithTraceIdContext]] def fake[T](a: T): ReaderT[IO, TraceId, T] = ReaderT.apply[IO, TraceId, T](_ => IO.pure(a)) @@ -74,7 +70,6 @@ class NodeServiceSpec NodeService.unsafe( didDataRepository, objectManagementService, - credentialBatchesRepository, logs ) ), @@ -315,95 +310,6 @@ class NodeServiceSpec } } - "NodeService.issueCredentialBatch" should { - "schedule IssueCredentialBatch operation" in { - val operation = BlockProcessingServiceSpec.signOperation( - IssueCredentialBatchOperationSpec.exampleOperation, - "master", - CreateDIDOperationSpec.masterKeys.getPrivateKey - ) - val operationId = AtalaOperationId.of(operation) - mockOperationId(operationId) - - val response = service - .scheduleOperations( - node_api.ScheduleOperationsRequest(List(operation)) - ) - .outputs - .head - - val expectedBatchId = Sha256 - .compute( - IssueCredentialBatchOperationSpec.exampleOperation.getIssueCredentialBatch.getCredentialBatchData.toByteArray - ) - .getHexValue - - response.getBatchOutput.batchId mustBe expectedBatchId - response.getOperationId mustEqual operationId.toProtoByteString - verify(objectManagementService).scheduleAtalaOperations(operation) - verifyNoMoreInteractions(objectManagementService) - } - - "return error when provided operation is invalid" in { - val operation = BlockProcessingServiceSpec.signOperation( - IssueCredentialBatchOperationSpec.exampleOperation - .update( - _.issueCredentialBatch.credentialBatchData.merkleRoot := ByteString - .copyFrom("abc".getBytes) - ), - "master", - CreateDIDOperationSpec.masterKeys.getPrivateKey - ) - - val error = intercept[StatusRuntimeException] { - service.scheduleOperations( - node_api.ScheduleOperationsRequest(List(operation)) - ) - } - error.getStatus.getCode mustEqual Status.Code.INVALID_ARGUMENT - } - } - - "NodeService.revokeCredentials" should { - "schedule RevokeCredentials operation" in { - val operation = BlockProcessingServiceSpec.signOperation( - RevokeCredentialsOperationSpec.revokeFullBatchOperation, - "master", - CreateDIDOperationSpec.masterKeys.getPrivateKey - ) - val operationId = AtalaOperationId.of(operation) - mockOperationId(operationId) - - val response = service - .scheduleOperations( - node_api.ScheduleOperationsRequest(List(operation)) - ) - .outputs - .head - - response.getOperationId mustEqual operationId.toProtoByteString - verify(objectManagementService).scheduleAtalaOperations(operation) - verifyNoMoreInteractions(objectManagementService) - } - - "return error when provided operation is invalid" in { - val operation = BlockProcessingServiceSpec.signOperation( - RevokeCredentialsOperationSpec.revokeFullBatchOperation.update( - _.revokeCredentials.credentialBatchId := "" - ), - "master", - CreateDIDOperationSpec.masterKeys.getPrivateKey - ) - - val error = intercept[StatusRuntimeException] { - service.scheduleOperations( - node_api.ScheduleOperationsRequest(List(operation)) - ) - } - error.getStatus.getCode mustEqual Status.Code.INVALID_ARGUMENT - } - } - "NodeService.getBuildInfo" should { "return proper build and protocol information" in { val currentNetworkProtocolMajorVersion = 2 @@ -537,271 +443,6 @@ class NodeServiceSpec } } - "NodeService.getBatchState" should { - "fail when batchId is not valid" in { - val invalidBatchId = "invalid@_?" - val requestWithInvalidId = GetBatchStateRequest(batchId = invalidBatchId) - val expectedMessage = s"INVALID_ARGUMENT: Invalid batch id: $invalidBatchId" - - doReturn(fake[Instant](dummySyncTimestamp)) - .when(objectManagementService) - .getLastSyncedTimestamp - - val error = intercept[RuntimeException] { - service.getBatchState(requestWithInvalidId) - } - error.getMessage must be(expectedMessage) - } - - "return an error when the CredentialBatchesRepository fails" in { - val validBatchId = - CredentialBatchId.fromDigest(Sha256.compute("valid".getBytes())) - val requestWithValidId = - GetBatchStateRequest(batchId = validBatchId.getId) - - val errorMsg = "an unexpected error" - val repositoryError = - ReaderT.liftF( - IO.raiseError[Either[NodeError, Option[CredentialBatchState]]]( - new RuntimeException(errorMsg) - ) - ) - - doReturn(repositoryError) - .when(credentialBatchesRepository) - .getBatchState(validBatchId) - doReturn(fake[Instant](dummySyncTimestamp)) - .when(objectManagementService) - .getLastSyncedTimestamp - - val err = intercept[RuntimeException]( - service.getBatchState(requestWithValidId) - ) - err.getMessage.endsWith(errorMsg) must be(true) - } - - "return empty response when the CredentialBatchesRepository reports no results" in { - val validBatchId = - CredentialBatchId.fromDigest(Sha256.compute("valid".getBytes())) - val requestWithValidId = - GetBatchStateRequest(batchId = validBatchId.getId) - - val repositoryError = ReaderT.liftF( - IO.pure[Either[NodeError, Option[CredentialBatchState]]](Right(None)) - ) - - doReturn(repositoryError) - .when(credentialBatchesRepository) - .getBatchState(validBatchId) - doReturn(fake[Instant](dummySyncTimestamp)) - .when(objectManagementService) - .getLastSyncedTimestamp - - val response = service.getBatchState(requestWithValidId) - response.issuerDid must be("") - response.merkleRoot must be(empty) - response.publicationLedgerData must be(empty) - response.lastSyncedBlockTimestamp must be( - Some(dummySyncTimestamp.toProtoTimestamp) - ) - } - - "return batch state when CredentialBatchesRepository succeeds" in { - val validBatchId = - CredentialBatchId.fromDigest(Sha256.compute("valid".getBytes())) - val requestWithValidId = - GetBatchStateRequest(batchId = validBatchId.getId) - - val issuerDIDSuffix: DidSuffix = - DidSuffix(Sha256.compute("testDID".getBytes()).getHexValue) - val issuedOnLedgerData = dummyLedgerData - val merkleRoot = new MerkleRoot(Sha256.compute("content".getBytes())) - val credState = - CredentialBatchState( - merkleRoot = merkleRoot, - batchId = validBatchId, - issuerDIDSuffix = issuerDIDSuffix, - issuedOn = issuedOnLedgerData, - revokedOn = None, - lastOperation = Sha256.compute("lastOp".getBytes()) - ) - - val repositoryResponse = - ReaderT.liftF( - IO.pure[Either[NodeError, Option[CredentialBatchState]]]( - Right(Some(credState)) - ) - ) - - val ledgerDataProto = node_models - .LedgerData() - .withTransactionId(dummyLedgerData.transactionId.toString) - .withLedger(common_models.Ledger.IN_MEMORY) - .withTimestampInfo( - node_models - .TimestampInfo() - .withBlockTimestamp( - Instant - .ofEpochMilli( - issuedOnLedgerData.timestampInfo.getAtalaBlockTimestamp - ) - .toProtoTimestamp - ) - .withBlockSequenceNumber( - issuedOnLedgerData.timestampInfo.getAtalaBlockSequenceNumber - ) - .withOperationSequenceNumber( - issuedOnLedgerData.timestampInfo.getOperationSequenceNumber - ) - ) - - doReturn( - fake[Instant](dummySyncTimestamp) - ).when(objectManagementService).getLastSyncedTimestamp - doReturn(repositoryResponse) - .when(credentialBatchesRepository) - .getBatchState(validBatchId) - - val response = service.getBatchState(requestWithValidId) - response.issuerDid must be(issuerDIDSuffix.getValue) - response.merkleRoot.toByteArray.toVector must be( - merkleRoot.getHash.getValue - ) - response.publicationLedgerData must be(Some(ledgerDataProto)) - response.revocationLedgerData must be(empty) - response.lastSyncedBlockTimestamp must be( - Some(dummySyncTimestamp.toProtoTimestamp) - ) - } - } - - "NodeService.getCredentialRevocationTime" should { - "fail when batchId is not valid" in { - val invalidBatchId = "invalid@_?" - val validCredentialHash = Sha256.compute("random".getBytes()) - val requestWithInvalidId = - GetCredentialRevocationTimeRequest( - batchId = invalidBatchId, - credentialHash = ByteString.copyFrom(validCredentialHash.getValue) - ) - val expectedMessage = s"INVALID_ARGUMENT: Invalid batch id: $invalidBatchId" - - doReturn(fake[Instant](dummySyncTimestamp)) - .when(objectManagementService) - .getLastSyncedTimestamp - val error = intercept[RuntimeException] { - service.getCredentialRevocationTime(requestWithInvalidId) - } - error.getMessage must be(expectedMessage) - } - - "fail when credentialHash is not valid" in { - val validBatchId = - CredentialBatchId.fromDigest(Sha256.compute("random".getBytes())) - val requestWithInvalidCredentialHash = - GetCredentialRevocationTimeRequest( - batchId = validBatchId.getId, - credentialHash = ByteString.copyFrom(Array[Byte](0x4a)) - ) - - val expectedMessage = - "INVALID_ARGUMENT: The given byte array does not correspond to a SHA256 hash. It must have exactly 32 bytes: 4A" - - doReturn( - fake[Instant](dummySyncTimestamp) - ).when(objectManagementService).getLastSyncedTimestamp - - val error = intercept[RuntimeException] { - service.getCredentialRevocationTime(requestWithInvalidCredentialHash) - } - - error.getMessage must be(expectedMessage) - } - - "return empty timestamp when CredentialBatchesRepository succeeds returning None" in { - val validBatchId = - CredentialBatchId.fromDigest(Sha256.compute("valid".getBytes())) - val validCredentialHash = Sha256.compute("random".getBytes()) - val validRequest = GetCredentialRevocationTimeRequest( - batchId = validBatchId.getId, - credentialHash = ByteString.copyFrom(validCredentialHash.getValue) - ) - - val repositoryResponse = ReaderT.liftF( - IO.pure[Either[NodeError, Option[LedgerData]]](Right(None)) - ) - - doReturn( - fake[Instant](dummySyncTimestamp) - ).when(objectManagementService).getLastSyncedTimestamp - - doReturn(repositoryResponse) - .when(credentialBatchesRepository) - .getCredentialRevocationTime(validBatchId, validCredentialHash) - - val response = service.getCredentialRevocationTime(validRequest) - response.revocationLedgerData must be(empty) - response.lastSyncedBlockTimestamp must be( - Some(dummySyncTimestamp.toProtoTimestamp) - ) - } - - "return correct timestamp when CredentialBatchesRepository succeeds returning a time" in { - val validBatchId = - CredentialBatchId.fromDigest(Sha256.compute("valid".getBytes())) - val validCredentialHash = Sha256.compute("random".getBytes()) - val validRequest = GetCredentialRevocationTimeRequest( - batchId = validBatchId.getId, - credentialHash = ByteString.copyFrom(validCredentialHash.getValue) - ) - val revocationDate = new TimestampInfo(Instant.now().toEpochMilli, 1, 1) - val revocationLedgerData = LedgerData( - TransactionId - .from(Array.fill[Byte](TransactionId.config.size.toBytes.toInt)(1)) - .value, - Ledger.InMemory, - revocationDate - ) - - val repositoryResponse = - ReaderT.liftF( - IO.pure[Either[NodeError, Option[LedgerData]]]( - Right(Some(revocationLedgerData)) - ) - ) - - val timestampInfoProto = node_models - .TimestampInfo() - .withBlockTimestamp( - Instant - .ofEpochMilli(revocationDate.getAtalaBlockTimestamp) - .toProtoTimestamp - ) - .withBlockSequenceNumber(revocationDate.getAtalaBlockSequenceNumber) - .withOperationSequenceNumber(revocationDate.getOperationSequenceNumber) - - val revocationLedgerDataProto = node_models - .LedgerData() - .withTransactionId(revocationLedgerData.transactionId.toString) - .withLedger(common_models.Ledger.IN_MEMORY) - .withTimestampInfo(timestampInfoProto) - - doReturn( - fake[Instant](dummySyncTimestamp) - ).when(objectManagementService).getLastSyncedTimestamp - - doReturn(repositoryResponse) - .when(credentialBatchesRepository) - .getCredentialRevocationTime(validBatchId, validCredentialHash) - - val response = service.getCredentialRevocationTime(validRequest) - response.revocationLedgerData must be(Some(revocationLedgerDataProto)) - response.lastSyncedBlockTimestamp must be( - Some(dummySyncTimestamp.toProtoTimestamp) - ) - } - } - "NodeService.scheduleOperations" should { "fail when called with an empty sequence of operations" in { val error = intercept[StatusRuntimeException] { @@ -825,10 +466,9 @@ class NodeServiceSpec ) val invalidOperation = BlockProcessingServiceSpec.signOperation( - IssueCredentialBatchOperationSpec.exampleOperation + CreateDIDOperationSpec.exampleOperation .update( - _.issueCredentialBatch.credentialBatchData.merkleRoot := ByteString - .copyFrom("abc".getBytes) + _.createDid.didData.context := Seq("abc") ), "master", CreateDIDOperationSpec.masterKeys.getPrivateKey @@ -845,63 +485,6 @@ class NodeServiceSpec verifyNoMoreInteractions(objectManagementService) } - "properly return the result of a CreateDID operation and an IssueCredentialBatch operation" in { - val createDIDOperation = BlockProcessingServiceSpec.signOperation( - CreateDIDOperationSpec.exampleOperation, - "master", - CreateDIDOperationSpec.masterKeys.getPrivateKey - ) - val createDIDOperationId = AtalaOperationId.of(createDIDOperation) - - val issuanceOperation = BlockProcessingServiceSpec.signOperation( - IssueCredentialBatchOperationSpec.exampleOperation, - "master", - CreateDIDOperationSpec.masterKeys.getPrivateKey - ) - val issuanceOperationId = AtalaOperationId.of(issuanceOperation) - - doReturn( - fake[List[Either[NodeError, AtalaOperationId]]]( - List(Right(createDIDOperationId), Right(issuanceOperationId)) - ) - ).when(objectManagementService) - .scheduleAtalaOperations(*) - - val response = service.scheduleOperations( - node_api - .ScheduleOperationsRequest() - .withSignedOperations(Seq(createDIDOperation, issuanceOperation)) - ) - - val expectedBatchId = - Sha256 - .compute( - IssueCredentialBatchOperationSpec.exampleOperation.getIssueCredentialBatch.getCredentialBatchData.toByteArray - ) - .getHexValue - - val expectedDIDSuffix = - Sha256 - .compute(CreateDIDOperationSpec.exampleOperation.toByteArray) - .getHexValue - - response.outputs.size mustBe (2) - - response.outputs.head.getCreateDidOutput.didSuffix mustBe expectedDIDSuffix - response.outputs.head.operationMaybe.operationId.value mustEqual createDIDOperationId.toProtoByteString - response.outputs.head.operationMaybe.error mustBe None - - response.outputs.last.getBatchOutput.batchId mustBe expectedBatchId - response.outputs.last.operationMaybe.operationId.value mustBe issuanceOperationId.toProtoByteString - response.outputs.last.operationMaybe.error mustBe None - - verify(objectManagementService).scheduleAtalaOperations( - createDIDOperation, - issuanceOperation - ) - verifyNoMoreInteractions(objectManagementService) - } - "properly return the result of a CreateDID operation and a DID Update operation" in { val createDIDOperation = BlockProcessingServiceSpec.signOperation( CreateDIDOperationSpec.exampleOperation, @@ -941,7 +524,7 @@ class NodeServiceSpec response.outputs.head.operationMaybe.error mustBe None response.outputs.last.result mustBe OperationOutput.Result - .UpdateDidOutput(node_models.UpdateDIDOutput()) + .UpdateDidOutput(node_api.UpdateDIDOutput()) response.outputs.last.operationMaybe.operationId.value mustEqual updateOperationId.toProtoByteString response.outputs.last.operationMaybe.error mustBe None @@ -951,37 +534,5 @@ class NodeServiceSpec ) verifyNoMoreInteractions(objectManagementService) } - - "properly return the result of a RevokeCredentials operation" in { - val revokeOperation = BlockProcessingServiceSpec.signOperation( - RevokeCredentialsOperationSpec.revokeFullBatchOperation, - "master", - CreateDIDOperationSpec.masterKeys.getPrivateKey - ) - val revokeOperationId = AtalaOperationId.of(revokeOperation) - - doReturn( - fake[List[Either[NodeError, AtalaOperationId]]]( - List(Right(revokeOperationId)) - ) - ) - .when(objectManagementService) - .scheduleAtalaOperations(*) - - val response = service.scheduleOperations( - node_api - .ScheduleOperationsRequest() - .withSignedOperations(Seq(revokeOperation)) - ) - - response.outputs.size mustBe (1) - response.outputs.head.getRevokeCredentialsOutput mustBe node_models - .RevokeCredentialsOutput() - response.outputs.head.operationMaybe.operationId.value mustEqual revokeOperationId.toProtoByteString - response.outputs.head.operationMaybe.error mustBe None - - verify(objectManagementService).scheduleAtalaOperations(revokeOperation) - verifyNoMoreInteractions(objectManagementService) - } } } diff --git a/node/src/test/scala/io/iohk/atala/prism/node/cardano/models/AtalaObjectMetadataSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/cardano/models/AtalaObjectMetadataSpec.scala index 96ce328b2f..3e1b212260 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/cardano/models/AtalaObjectMetadataSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/cardano/models/AtalaObjectMetadataSpec.scala @@ -4,7 +4,7 @@ import com.google.protobuf.ByteString import io.circe.{Json, parser} import io.iohk.atala.prism.identity.PrismDid.{getDEFAULT_MASTER_KEY_ID => masterKeyId} import io.iohk.atala.prism.node.cardano.models.AtalaObjectMetadata.METADATA_PRISM_INDEX -import io.iohk.atala.prism.protos.{node_internal, node_models} +import io.iohk.atala.prism.protos.node_models import org.scalatest.OptionValues._ import org.scalatest.matchers.must.Matchers._ import org.scalatest.wordspec.AnyWordSpec @@ -42,10 +42,10 @@ class AtalaObjectMetadataSpec extends AnyWordSpec { ) ) - private val atalaObjectWithVersion = node_internal + private val atalaObjectWithVersion = node_models .AtalaObject() .withBlockContent( - node_internal.AtalaBlock( + node_models.AtalaBlock( operations = atalaOperations, // Unknown field corresponding to a deprecated field "version", equal to "1" unknownFields = UnknownFieldSet.empty.withField( @@ -59,10 +59,10 @@ class AtalaObjectMetadataSpec extends AnyWordSpec { ) ) ) - private val atalaObjectWithoutVersion = node_internal + private val atalaObjectWithoutVersion = node_models .AtalaObject() .withBlockContent( - node_internal.AtalaBlock( + node_models.AtalaBlock( operations = atalaOperations ) ) diff --git a/node/src/test/scala/io/iohk/atala/prism/node/metrics/OperationsCounterSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/metrics/OperationsCounterSpec.scala index 25226f52c3..c3ff8525df 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/metrics/OperationsCounterSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/metrics/OperationsCounterSpec.scala @@ -10,8 +10,6 @@ import org.scalatest.matchers.must.Matchers import io.iohk.atala.prism.node.operations.{ CreateDIDOperationSpec, UpdateDIDOperationSpec, - IssueCredentialBatchOperationSpec, - RevokeCredentialsOperationSpec, ProtocolVersionUpdateOperationSpec, DeactivateDIDOperationSpec } @@ -31,8 +29,6 @@ class OperationsCounterSpec extends AnyWordSpec with Matchers { // Includes all type of update actions val updateDidOperation = sign(UpdateDIDOperationSpec.exampleAllActionsOperation) - val issueCredentialBatchOperation = sign(IssueCredentialBatchOperationSpec.exampleOperation) - val revokeCredentialsOperation = sign(RevokeCredentialsOperationSpec.revokeFullBatchOperation) val protocolVersionUpdateOperation = sign( ProtocolVersionUpdateOperationSpec.protocolUpdateOperation( ProtocolVersionUpdateOperationSpec.protocolVersionInfo1 @@ -43,8 +39,6 @@ class OperationsCounterSpec extends AnyWordSpec with Matchers { val operations = List( createDidOperation, updateDidOperation, - issueCredentialBatchOperation, - revokeCredentialsOperation, protocolVersionUpdateOperation, deactivateDIDOperation ) diff --git a/node/src/test/scala/io/iohk/atala/prism/node/models/AtalaObjectInfoSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/models/AtalaObjectInfoSpec.scala index 2589b400d6..aa1305632f 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/models/AtalaObjectInfoSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/models/AtalaObjectInfoSpec.scala @@ -4,7 +4,7 @@ import io.iohk.atala.prism.node.AtalaWithPostgresSpec import io.iohk.atala.prism.node.operations.CreateDIDOperationSpec import io.iohk.atala.prism.node.services.BlockProcessingServiceSpec import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import org.scalatest.OptionValues._ @@ -24,8 +24,8 @@ class AtalaObjectInfoSpec extends AtalaWithPostgresSpec { ops: Seq[SignedAtalaOperation], status: AtalaObjectStatus = AtalaObjectStatus.Pending ) = { - val block = node_internal.AtalaBlock(ops) - val blockContent = node_internal + val block = node_models.AtalaBlock(ops) + val blockContent = node_models .AtalaObject() .withBlockContent(block) diff --git a/node/src/test/scala/io/iohk/atala/prism/node/nonce/ClientHelperSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/nonce/ClientHelperSpec.scala index b677ea54ef..ef9c14d05e 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/nonce/ClientHelperSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/nonce/ClientHelperSpec.scala @@ -3,7 +3,7 @@ package io.iohk.atala.prism.node.nonce import io.iohk.atala.prism.node.DIDUtil import io.iohk.atala.prism.node.auth.utils.DIDUtils import io.iohk.atala.prism.crypto.EC.{INSTANCE => EC} -import io.iohk.atala.prism.protos.connector_api +import io.iohk.atala.prism.protos.common_models import org.scalatest.OptionValues._ import org.scalatest.concurrent.ScalaFutures._ import org.scalatest.matchers.must.Matchers._ @@ -21,7 +21,7 @@ class ClientHelperSpec extends AnyWordSpec { did, keyPair.getPrivateKey ) - val request = connector_api + val request = common_models .ConnectionsStatusRequest() .withConnectionTokens("a b c".split(" ").toList) val header = requestSigner(request) diff --git a/node/src/test/scala/io/iohk/atala/prism/node/operations/IssueCredentialBatchOperationSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/operations/IssueCredentialBatchOperationSpec.scala deleted file mode 100644 index 92d5318acc..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/operations/IssueCredentialBatchOperationSpec.scala +++ /dev/null @@ -1,327 +0,0 @@ -package io.iohk.atala.prism.node.operations - -import cats.effect.unsafe.implicits.global -import com.google.protobuf.ByteString -import doobie.implicits._ -import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256} -import io.iohk.atala.prism.node.{AtalaWithPostgresSpec, DataPreparation} -import io.iohk.atala.prism.node.DataPreparation.{dummyApplyOperationConfig, dummyLedgerData} -import io.iohk.atala.prism.node.models.{DIDData, DIDPublicKey, KeyUsage} -import io.iohk.atala.prism.node.repositories.daos.CredentialBatchesDAO -import io.iohk.atala.prism.protos.node_models -import org.scalatest.EitherValues._ -import org.scalatest.Inside.inside -import org.scalatest.OptionValues.convertOptionToValuable - -object IssueCredentialBatchOperationSpec { - val masterKeys = CreateDIDOperationSpec.masterKeys - val issuingKeys = CreateDIDOperationSpec.issuingKeys - - lazy val issuerDidKeys = List( - DIDPublicKey( - issuerDIDSuffix, - "master", - KeyUsage.MasterKey, - masterKeys.getPublicKey - ), - DIDPublicKey( - issuerDIDSuffix, - "issuing", - KeyUsage.IssuingKey, - issuingKeys.getPublicKey - ) - ) - - lazy val issuerCreateDIDOperation = - CreateDIDOperation - .parse(CreateDIDOperationSpec.exampleOperation, dummyLedgerData) - .toOption - .value - lazy val issuerDIDSuffix = issuerCreateDIDOperation.id - val content = "" - val mockMerkleRoot = new MerkleRoot(Sha256.compute(content.getBytes)) - - val exampleOperation = node_models.AtalaOperation( - operation = node_models.AtalaOperation.Operation.IssueCredentialBatch( - value = node_models.IssueCredentialBatchOperation( - credentialBatchData = Some( - node_models.CredentialBatchData( - issuerDid = issuerDIDSuffix.getValue, - merkleRoot = ByteString.copyFrom(mockMerkleRoot.getHash.getValue) - ) - ) - ) - ) - ) -} - -class IssueCredentialBatchOperationSpec extends AtalaWithPostgresSpec { - - import IssueCredentialBatchOperationSpec._ - - "IssueCredentialBatchOperation.parse" should { - "parse valid IssueCredentialBatchOperation AtalaOperation" in { - IssueCredentialBatchOperation.parse( - exampleOperation, - dummyLedgerData - ) mustBe a[Right[_, _]] - } - - "return error when issuerDID is not provided / empty" in { - val invalidOperation = exampleOperation - .update(_.issueCredentialBatch.credentialBatchData.issuerDid := "") - - inside( - IssueCredentialBatchOperation.parse(invalidOperation, dummyLedgerData) - ) { case Left(ValidationError.InvalidValue(path, value, _)) => - path.path mustBe Vector( - "issueCredentialBatch", - "credentialBatchData", - "issuerDID" - ) - value mustBe "" - } - } - - "return error when issuerDID doesn't have valid form" in { - val invalidOperation = exampleOperation - .update( - _.issueCredentialBatch.credentialBatchData.issuerDid := "my best friend" - ) - - inside( - IssueCredentialBatchOperation.parse(invalidOperation, dummyLedgerData) - ) { case Left(ValidationError.InvalidValue(path, value, _)) => - path.path mustBe Vector( - "issueCredentialBatch", - "credentialBatchData", - "issuerDID" - ) - value mustBe "my best friend" - } - } - - "return error when merkle root is not provided / empty" in { - val invalidOperation = exampleOperation - .update( - _.issueCredentialBatch.credentialBatchData.merkleRoot := ByteString.EMPTY - ) - - inside( - IssueCredentialBatchOperation.parse(invalidOperation, dummyLedgerData) - ) { case Left(ValidationError.InvalidValue(path, value, _)) => - path.path mustBe Vector( - "issueCredentialBatch", - "credentialBatchData", - "merkleRoot" - ) - value mustBe "0x0" - } - } - - "return error when hash has invalid length" in { - val invalidHash = ByteString.copyFrom("abc", "UTF8") - val invalidOperation = exampleOperation - .update( - _.issueCredentialBatch.credentialBatchData.merkleRoot := invalidHash - ) - - inside( - IssueCredentialBatchOperation.parse(invalidOperation, dummyLedgerData) - ) { case Left(ValidationError.InvalidValue(path, value, _)) => - path.path mustBe Vector( - "issueCredentialBatch", - "credentialBatchData", - "merkleRoot" - ) - value mustBe "0x616263" - } - } - } - - "IssueCredentialBatchOperation.getCorrectnessData" should { - "provide the key reference be used for signing" in { - DataPreparation - .createDID( - DIDData( - issuerDIDSuffix, - issuerDidKeys, - Nil, - Nil, - issuerCreateDIDOperation.digest - ), - dummyLedgerData - ) - val parsedOperation = IssueCredentialBatchOperation - .parse(exampleOperation, dummyLedgerData) - .toOption - .value - - val CorrectnessData(key, previousOperation) = parsedOperation - .getCorrectnessData("issuing") - .transact(database) - .value - .unsafeRunSync() - .toOption - .value - - key mustBe issuingKeys.getPublicKey - previousOperation mustBe None - } - "return state error when there are used different key than issuing key" in { - DataPreparation - .createDID( - DIDData( - issuerDIDSuffix, - issuerDidKeys, - Nil, - Nil, - issuerCreateDIDOperation.digest - ), - dummyLedgerData - ) - val parsedOperation = IssueCredentialBatchOperation - .parse(exampleOperation, dummyLedgerData) - .toOption - .value - - val result = parsedOperation - .getCorrectnessData("master") - .transact(database) - .value - .unsafeRunSync() - - result mustBe Left( - StateError.InvalidKeyUsed( - "The key type expected is Issuing key. Type used: MasterKey" - ) - ) - } - "return state error when unknown keyId is used" in { - DataPreparation - .createDID( - DIDData( - issuerDIDSuffix, - issuerDidKeys, - Nil, - Nil, - issuerCreateDIDOperation.digest - ), - dummyLedgerData - ) - val parsedOperation = IssueCredentialBatchOperation - .parse(exampleOperation, dummyLedgerData) - .toOption - .value - - val result = parsedOperation - .getCorrectnessData("issuing3") - .transact(database) - .value - .unsafeRunSync() - - result mustBe Left(StateError.UnknownKey(issuerDIDSuffix, "issuing3")) - } - } - - "IssueCredentialBatchOperation.applyState" should { - "create the credential batch information in the database" in { - DataPreparation - .createDID( - DIDData( - issuerDIDSuffix, - issuerDidKeys, - Nil, - Nil, - issuerCreateDIDOperation.digest - ), - dummyLedgerData - ) - val parsedOperation = IssueCredentialBatchOperation - .parse(exampleOperation, dummyLedgerData) - .toOption - .value - - val result = parsedOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeToFuture() - .futureValue - result mustBe a[Right[_, _]] - - val insertedBatch = - CredentialBatchesDAO - .findBatch(parsedOperation.credentialBatchId) - .transact(database) - .unsafeToFuture() - .futureValue - .value - - insertedBatch.batchId mustBe parsedOperation.credentialBatchId - insertedBatch.issuerDIDSuffix mustBe parsedOperation.issuerDIDSuffix - insertedBatch.merkleRoot mustBe parsedOperation.merkleRoot - insertedBatch.issuedOn mustBe dummyLedgerData - insertedBatch.lastOperation mustBe parsedOperation.digest - insertedBatch.revokedOn mustBe empty - } - - "return error when issuer is missing in the DB" in { - val parsedOperation = IssueCredentialBatchOperation - .parse(exampleOperation, dummyLedgerData) - .toOption - .value - - val result = parsedOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeToFuture() - .futureValue - .left - .value - - result mustBe a[StateError.EntityMissing] - } - - "return error when the credential already exists in the db" in { - DataPreparation - .createDID( - DIDData( - issuerDIDSuffix, - issuerDidKeys, - Nil, - Nil, - issuerCreateDIDOperation.digest - ), - dummyLedgerData - ) - - val parsedOperation = IssueCredentialBatchOperation - .parse(exampleOperation, dummyLedgerData) - .toOption - .value - - // first insertion - val resultAttempt1 = parsedOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeToFuture() - .futureValue - - resultAttempt1 mustBe a[Right[_, _]] - - val resultAttempt2 = parsedOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeToFuture() - .futureValue - .left - .value - - resultAttempt2 mustBe a[StateError.EntityExists] - } - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/operations/RevokeCredentialsOperationSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/operations/RevokeCredentialsOperationSpec.scala deleted file mode 100644 index 089505df30..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/operations/RevokeCredentialsOperationSpec.scala +++ /dev/null @@ -1,498 +0,0 @@ -package io.iohk.atala.prism.node.operations - -import cats.effect.unsafe.implicits.global -import com.google.protobuf.ByteString -import doobie.implicits._ -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.{Sha256, Sha256Digest} -import io.iohk.atala.prism.protos.models.TimestampInfo -import io.iohk.atala.prism.node.models.{Ledger, TransactionId} -import io.iohk.atala.prism.node.AtalaWithPostgresSpec -import io.iohk.atala.prism.node.DataPreparation.{dummyApplyOperationConfig, dummyLedgerData} -import io.iohk.atala.prism.node.models.nodeState.LedgerData -import io.iohk.atala.prism.node.repositories.daos.CredentialBatchesDAO -import io.iohk.atala.prism.protos.node_models -import org.scalatest.EitherValues._ -import org.scalatest.Inside._ -import org.scalatest.OptionValues._ - -import java.time.Instant - -object RevokeCredentialsOperationSpec { - private val revokingKeys = CreateDIDOperationSpec.revokingKeys - - lazy val issuerCreateDIDOperation: CreateDIDOperation = - CreateDIDOperation - .parse(CreateDIDOperationSpec.exampleOperation, dummyLedgerData) - .toOption - .value - - lazy val credentialIssueBatchOperation: IssueCredentialBatchOperation = - IssueCredentialBatchOperation - .parse( - IssueCredentialBatchOperationSpec.exampleOperation, - dummyLedgerData - ) - .toOption - .value - - lazy val credentialBatchId: CredentialBatchId = - credentialIssueBatchOperation.credentialBatchId - - val revocationDate: TimestampInfo = - new TimestampInfo(Instant.ofEpochMilli(0).toEpochMilli, 0, 1) - val revocationLedgerData: LedgerData = - LedgerData( - TransactionId - .from(Array.fill[Byte](TransactionId.config.size.toBytes.toInt)(0)) - .value, - Ledger.InMemory, - revocationDate - ) - - val revokeFullBatchOperation: node_models.AtalaOperation = - node_models.AtalaOperation( - operation = node_models.AtalaOperation.Operation.RevokeCredentials( - value = node_models.RevokeCredentialsOperation( - previousOperationHash = ByteString.copyFrom(credentialIssueBatchOperation.digest.getValue), - credentialBatchId = credentialBatchId.getId, - credentialsToRevoke = Seq() - ) - ) - ) - - val credentialHashToRevoke: Sha256Digest = Sha256.compute("cred 1".getBytes) - - val revokeSpecificCredentialsOperation: node_models.AtalaOperation = - node_models.AtalaOperation( - operation = node_models.AtalaOperation.Operation.RevokeCredentials( - value = node_models.RevokeCredentialsOperation( - previousOperationHash = ByteString.copyFrom(credentialIssueBatchOperation.digest.getValue), - credentialBatchId = credentialBatchId.getId, - credentialsToRevoke = Seq(ByteString.copyFrom(credentialHashToRevoke.getValue)) - ) - ) - ) -} - -class RevokeCredentialsOperationSpec extends AtalaWithPostgresSpec { - - import RevokeCredentialsOperationSpec._ - - "RevokeCredentialsOperation.parse" should { - "parse valid RevokeCredentials AtalaOperation to revoke a full batch" in { - RevokeCredentialsOperation.parse( - revokeFullBatchOperation, - dummyLedgerData - ) mustBe a[Right[_, _]] - } - - "parse valid RevokeCredentials AtalaOperation to revoke specific credentials within a batch" in { - RevokeCredentialsOperation.parse( - revokeSpecificCredentialsOperation, - dummyLedgerData - ) mustBe a[Right[_, _]] - } - - "return error when no previous operation is provided" in { - val invalidOperation = revokeFullBatchOperation - .update(_.revokeCredentials.previousOperationHash := ByteString.EMPTY) - - inside( - RevokeCredentialsOperation.parse(invalidOperation, dummyLedgerData) - ) { case Left(ValidationError.InvalidValue(path, value, _)) => - path.path mustBe Vector("revokeCredentials", "previousOperationHash") - value mustBe "0x0" - } - } - - "return error when previous operation hash has invalid length" in { - val bs = ByteString.copyFromUtf8("abc") - val invalidOperation = revokeFullBatchOperation - .update(_.revokeCredentials.previousOperationHash := bs) - - inside( - RevokeCredentialsOperation.parse(invalidOperation, dummyLedgerData) - ) { case Left(ValidationError.InvalidValue(path, value, _)) => - path.path mustBe Vector("revokeCredentials", "previousOperationHash") - value mustBe "0x616263" - } - } - - "return error if no credential batch id is provided" in { - val invalidOperation = revokeFullBatchOperation - .update(_.revokeCredentials.credentialBatchId := "") - - inside( - RevokeCredentialsOperation.parse(invalidOperation, dummyLedgerData) - ) { case Left(ValidationError.InvalidValue(path, value, _)) => - path.path mustBe Vector("revokeCredentials", "credentialBatchId") - value mustBe "" - } - } - - "return error if credential batch id has invalid format" in { - val cid = "my last credential" - val invalidOperation = revokeFullBatchOperation - .update(_.revokeCredentials.credentialBatchId := cid) - - inside( - RevokeCredentialsOperation.parse(invalidOperation, dummyLedgerData) - ) { case Left(ValidationError.InvalidValue(path, value, _)) => - path.path mustBe Vector("revokeCredentials", "credentialBatchId") - value mustBe cid - } - } - - "return error if a credential hash to revoke has invalid format" in { - val invalidSeq = Seq(ByteString.copyFrom("my last credential".getBytes())) - val invalidOperation = revokeFullBatchOperation - .update(_.revokeCredentials.credentialsToRevoke := invalidSeq) - - inside( - RevokeCredentialsOperation.parse(invalidOperation, dummyLedgerData) - ) { case Left(ValidationError.InvalidValue(path, value, _)) => - path.path mustBe Vector("revokeCredentials", "credentialsToRevoke") - value mustBe invalidSeq.toString - } - } - } - - "RevokeCredentialsOperation.getCorrectnessData" should { - "provide the data required for correctness verification" in { - issuerCreateDIDOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - credentialIssueBatchOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - - val parsedOperation = RevokeCredentialsOperation - .parse(revokeFullBatchOperation, dummyLedgerData) - .toOption - .value - - val corrDataE = parsedOperation - .getCorrectnessData("revoking") - .transact(database) - .value - .unsafeRunSync() - - val CorrectnessData(key, previousOperation) = corrDataE.toOption.value - - key mustBe revokingKeys.getPublicKey - previousOperation mustBe Some(credentialIssueBatchOperation.digest) - } - "return state error when there are used different key than revocation key" in { - issuerCreateDIDOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - credentialIssueBatchOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - - val parsedOperation = RevokeCredentialsOperation - .parse(revokeFullBatchOperation, dummyLedgerData) - .toOption - .value - - val result = parsedOperation - .getCorrectnessData("master") - .transact(database) - .value - .unsafeRunSync() - - result mustBe Left( - StateError.InvalidKeyUsed( - "The key type expected is Revocation key. Type used: MasterKey" - ) - ) - } - } - - "RevokeCredentialsOperation.applyState" should { - "mark credential batch as revoked in the database" in { - issuerCreateDIDOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - credentialIssueBatchOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - - val parsedOperation = - RevokeCredentialsOperation - .parse(revokeFullBatchOperation, revocationLedgerData) - .toOption - .value - - parsedOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - .toOption - .value - - val credentialBatch = - CredentialBatchesDAO - .findBatch(parsedOperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - .value - - credentialBatch.revokedOn mustBe Some(revocationLedgerData) - } - - "fail when attempting to revoke an already revoked credential batch" in { - issuerCreateDIDOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - credentialIssueBatchOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - - val parsedOperation = - RevokeCredentialsOperation - .parse(revokeFullBatchOperation, revocationLedgerData) - .toOption - .value - - parsedOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - .toOption - .value - - val credentialBatch = - CredentialBatchesDAO - .findBatch(parsedOperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - .value - - credentialBatch.revokedOn mustBe Some(revocationLedgerData) - - val error = - parsedOperation.applyState(dummyApplyOperationConfig).transact(database).value.unsafeRunSync() - - error.left.value mustBe a[StateError.BatchAlreadyRevoked] - } - - "mark specific credentials as revoked in the database" in { - issuerCreateDIDOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - credentialIssueBatchOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - - val parsedOperation = - RevokeCredentialsOperation - .parse(revokeSpecificCredentialsOperation, revocationLedgerData) - .toOption - .value - - parsedOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - .toOption - .value - - val credentialsRevoked = - CredentialBatchesDAO - .findRevokedCredentials(parsedOperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - - credentialsRevoked.size mustBe 1 - val (revokedCredHash, revokedAt) = credentialsRevoked.headOption.value - revokedCredHash mustBe credentialHashToRevoke - revokedAt mustBe revocationLedgerData - - // the batch itself should not be revoked - val credentialBatch = - CredentialBatchesDAO - .findBatch(parsedOperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - .value - - credentialBatch.revokedOn mustBe empty - } - - "fail to revoke specific credentials when the batch was already revoked" in { - issuerCreateDIDOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - credentialIssueBatchOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - - val parsedRevokeBatchOperation = - RevokeCredentialsOperation - .parse(revokeFullBatchOperation, revocationLedgerData) - .toOption - .value - - parsedRevokeBatchOperation - .applyState(dummyApplyOperationConfig) - .value - .transact(database) - .unsafeRunSync() - .toOption - .value - - val credentialBatch = - CredentialBatchesDAO - .findBatch(parsedRevokeBatchOperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - .value - - credentialBatch.revokedOn mustBe Some(revocationLedgerData) - - val parsedOperation = - RevokeCredentialsOperation - .parse(revokeSpecificCredentialsOperation, dummyLedgerData) - .toOption - .value - - // sanity check - parsedOperation.credentialBatchId mustBe parsedRevokeBatchOperation.credentialBatchId - - val error = parsedOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - .left - .value - - error mustBe a[StateError.BatchAlreadyRevoked] - - val credentialsRevoked = - CredentialBatchesDAO - .findRevokedCredentials(parsedOperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - - credentialsRevoked mustBe empty - - // the batch itself should remain revoked with the same time - val credentialBatchAfter = - CredentialBatchesDAO - .findBatch(parsedRevokeBatchOperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - .value - - credentialBatchAfter.revokedOn mustBe Some(revocationLedgerData) - } - - "do not update revocation time for specific credentials that were already revoked" in { - issuerCreateDIDOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - credentialIssueBatchOperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - - val parsedFirstOperation = - RevokeCredentialsOperation - .parse(revokeSpecificCredentialsOperation, revocationLedgerData) - .toOption - .value - - parsedFirstOperation - .applyState(dummyApplyOperationConfig) - .value - .transact(database) - .unsafeRunSync() - .toOption - .value - - val credentialsRevoked = - CredentialBatchesDAO - .findRevokedCredentials(parsedFirstOperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - - credentialsRevoked.size mustBe 1 - val (revokedCredHash, revokedAt) = credentialsRevoked.headOption.value - revokedCredHash mustBe credentialHashToRevoke - revokedAt mustBe revocationLedgerData - - val parsedOSecondperation = - RevokeCredentialsOperation - .parse(revokeSpecificCredentialsOperation, dummyLedgerData) - .toOption - .value - - // sanity check - parsedOSecondperation.credentialBatchId mustBe parsedFirstOperation.credentialBatchId - - parsedOSecondperation - .applyState(dummyApplyOperationConfig) - .transact(database) - .value - .unsafeRunSync() - .toOption - .value - - val credentialsRevokedAfter = - CredentialBatchesDAO - .findRevokedCredentials(parsedOSecondperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - - credentialsRevokedAfter.size mustBe 1 - val (revokedCredHashAfter, revokedAtAfter) = - credentialsRevokedAfter.headOption.value - revokedCredHashAfter mustBe credentialHashToRevoke - // the time didn't change - revokedAtAfter mustBe revocationLedgerData - - // the batch itself should not be revoked - val credentialBatchAfter = - CredentialBatchesDAO - .findBatch(parsedFirstOperation.credentialBatchId) - .transact(database) - .unsafeRunSync() - .value - - credentialBatchAfter.revokedOn mustBe empty - } - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/poc/CredVerification.scala b/node/src/test/scala/io/iohk/atala/prism/node/poc/CredVerification.scala deleted file mode 100644 index 32ff6fd285..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/poc/CredVerification.scala +++ /dev/null @@ -1,139 +0,0 @@ -package io.iohk.atala.prism.node.poc - -import cats.data.{Validated, ValidatedNel} -import cats.implicits.{catsSyntaxTuple6Semigroupal, catsSyntaxValidatedId} -import io.iohk.atala.prism.credentials.PrismCredential -import io.iohk.atala.prism.crypto.{MerkleInclusionProof, MerkleRoot} -import io.iohk.atala.prism.api.CredentialBatches -import io.iohk.atala.prism.node.models.KeyData -import io.iohk.atala.prism.protos.models.TimestampInfo - -object CredVerification { - - sealed trait VerificationError - object VerificationError { - case class CredentialWasRevoked(revokedOn: TimestampInfo) extends VerificationError - case class BatchWasRevoked(revokedOn: TimestampInfo) extends VerificationError - case object InvalidMerkleProof extends VerificationError - case class KeyWasNotValid( - keyAddedOn: TimestampInfo, - credentialIssuedOn: TimestampInfo - ) extends VerificationError - case class KeyWasRevoked( - credentialIssuedOn: TimestampInfo, - keyRevokedOn: TimestampInfo - ) extends VerificationError - case object InvalidSignature extends VerificationError - } - - case class BatchData( - batchIssuanceDate: TimestampInfo, - revocationDate: Option[TimestampInfo] - ) - - import VerificationError._ - - private val valid = ().validNel[VerificationError] - - /** This method receives data retrieved from the node and the credential to verify and returns true if and only if the - * credential is valid. - * - * We have some assumptions to call this method: - * 1. The keyData is obtained from the PRISM node and corresponds to the key used to sign the credential 2. The - * batchData is obtained from the PRISM node and corresponds to the signedCredential parameter 3. The issuer DID - * is a trusted one 4. The credentialRevocationTime is obtained from the PRISM node and corresponds to the - * signedCredential parameter - * - * @param keyData - * the public key used to sign the credential and its addition and (optional) revocation timestamps - * @param batchData - * the credential information extracted from the node - * @param credentialRevocationTime - * the credential information extracted from the node - * @param merkleRoot - * merkle root that represents the batch - * @param inclusionProof - * merkle proof of inclusion that states that signedCredential is in the batch - * @param signedCredential - * the credential to verify - * @return - * a validation result - */ - def verify( - keyData: KeyData, - batchData: BatchData, - credentialRevocationTime: Option[TimestampInfo], - merkleRoot: MerkleRoot, - inclusionProof: MerkleInclusionProof, - signedCredential: PrismCredential - ): ValidatedNel[VerificationError, Unit] = { - - // Scala's type system is evil, so we need this type alias to currify things for the - // compiler (see https://stackoverflow.com/questions/49865936/scala-cats-validated-value-mapn-is-not-a-member-of-validatednel-tuple) - type ValidationResult[A] = ValidatedNel[VerificationError, A] - - // the credential batch is not revoked - val credentialBatchNotRevoked: ValidationResult[Unit] = - batchData.revocationDate.fold(valid) { revokedOn => - BatchWasRevoked(revokedOn = revokedOn).invalidNel - } - - // the key was added before the credential was issued - val keyAddedBeforeIssuance: ValidationResult[Unit] = - Validated.condNel( - keyData.addedOn occurredBefore batchData.batchIssuanceDate, - (), - KeyWasNotValid( - keyAddedOn = keyData.addedOn, - credentialIssuedOn = batchData.batchIssuanceDate - ) - ) - - // the key is not revoked or, the key was revoked after the credential was signed - val keyWasStillValid: ValidationResult[Unit] = { - keyData.revokedOn match { - case None => // the key was not revoked - valid - case Some(revokedOn) => - if (batchData.batchIssuanceDate occurredBefore revokedOn) valid - else - KeyWasRevoked( - credentialIssuedOn = batchData.batchIssuanceDate, - keyRevokedOn = revokedOn - ).invalidNel - } - } - - // the signature is valid - val signatureIsValid: ValidationResult[Unit] = - Validated.condNel( - signedCredential.isValidSignature(keyData.issuingKey), - (), - InvalidSignature - ) - - val individualCredentialNotRevoked: ValidationResult[Unit] = - credentialRevocationTime.fold(valid) { revokedOn => - CredentialWasRevoked(revokedOn).invalidNel - } - - val merkleProofIsValid: ValidationResult[Unit] = - Validated.condNel( - CredentialBatches - .verifyInclusion(signedCredential, merkleRoot, inclusionProof), - (), - InvalidMerkleProof - ) - - ( - credentialBatchNotRevoked, - keyAddedBeforeIssuance, - keyWasStillValid, - signatureIsValid, - individualCredentialNotRevoked, - merkleProofIsValid - ).mapN { (_: Unit, _: Unit, _: Unit, _: Unit, _: Unit, _: Unit) => - () - } - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/poc/EncodedSizes.scala b/node/src/test/scala/io/iohk/atala/prism/node/poc/EncodedSizes.scala deleted file mode 100644 index 928c707bb8..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/poc/EncodedSizes.scala +++ /dev/null @@ -1,79 +0,0 @@ -package io.iohk.atala.prism.node.poc - -import java.util.Base64 -import com.google.protobuf.ByteString -import io.iohk.atala.prism.crypto.EC.{INSTANCE => EC} -import io.iohk.atala.prism.crypto.keys.ECPublicKey -import io.iohk.atala.prism.crypto.ECConfig.{INSTANCE => ECConfig} -import io.iohk.atala.prism.crypto.Sha256 -import io.iohk.atala.prism.node.models.DidSuffix -import io.iohk.atala.prism.protos.node_models - -object EncodedSizes { - def main(args: Array[String]): Unit = { - val startTime = System.currentTimeMillis() - - val n = 100000 - println(s"Generating $n dids") - - val data = for { - _ <- 1 to n - masterPublicKey1 = EC.generateKeyPair().getPublicKey - masterPublicKey2 = EC.generateKeyPair().getPublicKey - masterPublicKey3 = EC.generateKeyPair().getPublicKey - did = createDID( - List(masterPublicKey1, masterPublicKey2, masterPublicKey3) - ) - } yield (did, did.length) - - val sortedData = data.sortBy(_._2) - println("printing 3 shortest DIDs") - println(sortedData.take(3).mkString("\n")) - println("printing 3 longest DIDs") - println(sortedData.drop(n - 3).mkString("\n")) - - val averageSize = data.foldLeft(0) { _ + _._2 } / n.toDouble - println(s"Average DID length $averageSize bytes") - val endTime = System.currentTimeMillis() - - println(s"Dataset generated in ${(endTime - startTime) / 1000.0} seconds") - - } - - def createDID(masterPublicKeys: List[ECPublicKey]): String = { - def keyElement(publicKey: ECPublicKey, index: Int): node_models.PublicKey = - node_models.PublicKey( - id = s"master$index", - usage = node_models.KeyUsage.MASTER_KEY, - keyData = node_models.PublicKey.KeyData.EcKeyData( - publicKeyToProto(publicKey) - ) - ) - - val createDidOp = node_models.CreateDIDOperation( - didData = Some( - node_models.CreateDIDOperation.DIDCreationData( - publicKeys = masterPublicKeys.zipWithIndex map { case (k, i) => - keyElement(k, i) - } - ) - ) - ) - - val atalaOp = node_models.AtalaOperation(operation = node_models.AtalaOperation.Operation.CreateDid(createDidOp)) - val operationBytes = atalaOp.toByteArray - val operationHash = Sha256.compute(operationBytes) - val didSuffix: DidSuffix = DidSuffix.fromDigest(operationHash) - val encodedOperation = Base64.getUrlEncoder.encodeToString(operationBytes) - s"did:prism:${didSuffix.getValue}:$encodedOperation" - } - - private def publicKeyToProto(key: ECPublicKey): node_models.ECKeyData = { - val point = key.getCurvePoint - node_models.ECKeyData( - curve = ECConfig.getCURVE_NAME, - x = ByteString.copyFrom(point.getX.bytes()), - y = ByteString.copyFrom(point.getY.bytes()) - ) - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/poc/GenericCredentialsSDK.scala b/node/src/test/scala/io/iohk/atala/prism/node/poc/GenericCredentialsSDK.scala deleted file mode 100644 index c564b53c0a..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/poc/GenericCredentialsSDK.scala +++ /dev/null @@ -1,45 +0,0 @@ -package io.iohk.atala.prism.node.poc - -import io.iohk.atala.prism.credentials.content.CredentialContent -import io.iohk.atala.prism.identity.{PrismDid => DID} -import io.iohk.atala.prism.node.models.DidSuffix -import kotlinx.serialization.json.JsonElementKt.JsonPrimitive -import kotlinx.serialization.json.JsonObject - -import scala.annotation.nowarn -import scala.jdk.CollectionConverters._ -// This SDK would allow to build generic credentials and manipulate them -// For this toy example, the credential model is a String that represents a JSON -// and we didn't add nice builders, we just take fixed values for illustration -// to build a degree credential -object GenericCredentialsSDK { - - private var issuerDIDUsed: DID = _ - private var keyIdUsed: String = "" - - def buildGenericCredential( - credentialType: String, - issuerDID: DID, - issuanceKeyId: String, - claims: String - ): CredentialContent = { - issuerDIDUsed = issuerDID - keyIdUsed = issuanceKeyId - val fields = Map( - "type" -> JsonPrimitive(credentialType), - "id" -> JsonPrimitive(s"did:prism:${issuerDID.getSuffix}"), - "keyId" -> JsonPrimitive(issuanceKeyId), - "credentialSubject" -> JsonPrimitive(claims) - ) - new CredentialContent(new JsonObject(fields.asJava)) - } - - @nowarn("cat=unused-params") - def getIssuerDID(credential: String): String = issuerDIDUsed.getValue - @nowarn("cat=unused-params") - def getIssuerDIDSufix(credential: String): DidSuffix = DidSuffix( - issuerDIDUsed.getSuffix - ) - @nowarn("cat=unused-params") - def getKeyId(credential: String): String = keyIdUsed -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/poc/Wallet.scala b/node/src/test/scala/io/iohk/atala/prism/node/poc/Wallet.scala deleted file mode 100644 index 72dcdfa94a..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/poc/Wallet.scala +++ /dev/null @@ -1,237 +0,0 @@ -package io.iohk.atala.prism.node.poc - -import cats.data.ValidatedNel -import com.google.protobuf.ByteString -import io.iohk.atala.prism.credentials._ -import io.iohk.atala.prism.credentials.json.JsonBasedCredential -import io.iohk.atala.prism.credentials.content.CredentialContent -import io.iohk.atala.prism.crypto.{MerkleInclusionProof, Sha256} -import io.iohk.atala.prism.crypto.EC.{INSTANCE => EC} -import io.iohk.atala.prism.crypto.keys.{ECPrivateKey, ECPublicKey} -import io.iohk.atala.prism.crypto.ECConfig.{INSTANCE => ECConfig} -import io.iohk.atala.prism.protos.{node_api, node_models} -import io.iohk.atala.prism.crypto.signature.ECSignature -import io.iohk.atala.prism.identity.PrismDid -import io.iohk.atala.prism.node.models.{DidSuffix, KeyData} -import io.iohk.atala.prism.node.grpc.ProtoCodecs -import io.iohk.atala.prism.node.poc.CredVerification.{BatchData, VerificationError} -import org.scalatest.OptionValues.convertOptionToValuable - -// We define some classes to illustrate what happens in the different components -case class Wallet(node: node_api.NodeServiceGrpc.NodeServiceBlockingStub) { - - private var dids: Map[DidSuffix, collection.mutable.Map[String, ECPrivateKey]] = Map() - - def generateDID(): (DidSuffix, node_models.AtalaOperation) = { - val masterKeyPair = EC.generateKeyPair() - val masterPrivateKey = masterKeyPair.getPrivateKey - val masterPublicKey = masterKeyPair.getPublicKey - val issuanceKeyPair = EC.generateKeyPair() - val issuancePrivateKey = issuanceKeyPair.getPrivateKey - val issuancePublicKey = issuanceKeyPair.getPublicKey - - // This could be encapsulated in the "NodeSDK". I added it here for simplicity - // Note that in our current design we cannot create a did that has two keys from start - val createDidOp = node_models.CreateDIDOperation( - didData = Some( - node_models.CreateDIDOperation.DIDCreationData( - publicKeys = Seq( - node_models.PublicKey( - id = PrismDid.getDEFAULT_MASTER_KEY_ID, - usage = node_models.KeyUsage.MASTER_KEY, - keyData = node_models.PublicKey.KeyData.EcKeyData( - publicKeyToProto(masterPublicKey) - ) - ), - node_models.PublicKey( - id = "issuance0", - usage = node_models.KeyUsage.ISSUING_KEY, - keyData = node_models.PublicKey.KeyData.EcKeyData( - publicKeyToProto(issuancePublicKey) - ) - ) - ) - ) - ) - ) - - val atalaOp = node_models.AtalaOperation(operation = node_models.AtalaOperation.Operation.CreateDid(createDidOp)) - val operationHash = Sha256.compute(atalaOp.toByteArray) - val didSuffix: DidSuffix = DidSuffix(operationHash.getHexValue) - - dids += (didSuffix -> collection.mutable.Map( - PrismDid.getDEFAULT_MASTER_KEY_ID -> masterPrivateKey, - "issuance0" -> issuancePrivateKey - )) - - (didSuffix, atalaOp) - } - - def addRevocationKeyToDid( - revocationKeyId: String, - previousOperationHash: ByteString, - didSuffix: DidSuffix - ): Unit = { - val revocationKeyPair = EC.generateKeyPair() - val publicKeyProto = node_models.PublicKey( - id = revocationKeyId, - usage = node_models.KeyUsage.REVOCATION_KEY, - keyData = node_models.PublicKey.KeyData.EcKeyData( - publicKeyToProto(revocationKeyPair.getPublicKey) - ) - ) - - val updateDIDOp = node_models.UpdateDIDOperation( - previousOperationHash = previousOperationHash, - id = didSuffix.getValue, - actions = Seq( - node_models.UpdateDIDAction( - node_models.UpdateDIDAction.Action.AddKey( - node_models.AddKeyAction( - Some(publicKeyProto) - ) - ) - ) - ) - ) - val updateDidOpSigned = signOperation( - node_models.AtalaOperation( - node_models.AtalaOperation.Operation.UpdateDid(updateDIDOp) - ), - PrismDid.getDEFAULT_MASTER_KEY_ID, - didSuffix - ) - node.scheduleOperations(node_api.ScheduleOperationsRequest(List(updateDidOpSigned))) - dids(didSuffix) += (revocationKeyId -> revocationKeyPair.getPrivateKey) - () - } - - def signOperation( - operation: node_models.AtalaOperation, - keyId: String, - didSuffix: DidSuffix - ): node_models.SignedAtalaOperation = { - // TODO: This logic should also live eventually in the crypto library - val key = dids(didSuffix)(keyId) - node_models.SignedAtalaOperation( - signedWith = keyId, - operation = Some(operation), - signature = ByteString.copyFrom(EC.signBytes(operation.toByteArray, key).getData) - ) - } - - def signCredential( - credentialContent: CredentialContent, - keyId: String, - didSuffix: DidSuffix - ): PrismCredential = { - val privateKey = dids(didSuffix)(keyId) - new JsonBasedCredential(credentialContent, null).sign(privateKey) - } - - def signKey( - publicKey: ECPublicKey, - keyId: String, - didSuffix: DidSuffix - ): ECSignature = { - val privateKey = dids(didSuffix)(keyId) - EC.signBytes(publicKey.getEncoded, privateKey) - } - - def verifySignedKey( - publicKey: ECPublicKey, - signature: ECSignature, - signingKey: ECPublicKey - ): Boolean = { - EC.verifyBytes(publicKey.getEncoded, signingKey, signature) - } - - def verifyCredential( - credential: PrismCredential, - merkleProof: MerkleInclusionProof - ): ValidatedNel[VerificationError, Unit] = { - // extract user DIDSuffix and keyId from credential - val issuerDID = Option(credential.getContent.getIssuerDid) - .getOrElse(throw new Exception("getIssuerDid is null")) - val issuanceKeyId = - Option(credential.getContent.getIssuanceKeyId) - .getOrElse(throw new Exception("getIssuanceKeyId is null")) - - // request credential state to the node - val merkleRoot = merkleProof.derivedRoot - val batchId = - CredentialBatchId.fromBatchData(issuerDID.getSuffix, merkleRoot) - - val batchStateProto = node.getBatchState( - node_api.GetBatchStateRequest( - batchId.getId - ) - ) - val batchIssuanceDate = - ProtoCodecs.fromTimestampInfoProto( - batchStateProto.getPublicationLedgerData.timestampInfo.value - ) - val batchRevocationDate = - batchStateProto.getRevocationLedgerData.timestampInfo.map( - ProtoCodecs.fromTimestampInfoProto - ) - val batchData = BatchData(batchIssuanceDate, batchRevocationDate) - - // resolve DID through the node - val didDocumentOption = node - .getDidDocument( - node_api.GetDidDocumentRequest( - did = issuerDID.getValue - ) - ) - .document - val didDocument = didDocumentOption.value - - // get verification key - val issuancekeyProtoOption = - didDocument.publicKeys.find(_.id == issuanceKeyId) - val issuancekeyData = issuancekeyProtoOption.value - val issuanceKey = - issuancekeyProtoOption.flatMap(ProtoCodecs.fromProtoKey).value - val issuanceKeyAddedOn = ProtoCodecs.fromTimestampInfoProto( - issuancekeyData.addedOn.value.timestampInfo.value - ) - val issuanceKeyRevokedOn = - issuancekeyData.revokedOn.flatMap( - _.timestampInfo.map(ProtoCodecs.fromTimestampInfoProto) - ) - - val keyData = KeyData(issuanceKey, issuanceKeyAddedOn, issuanceKeyRevokedOn) - - // request specific credential revocation status to the node - val credentialHash = credential.hash - val credentialRevocationTimeResponse = node.getCredentialRevocationTime( - node_api - .GetCredentialRevocationTimeRequest() - .withBatchId(batchId.getId) - .withCredentialHash(ByteString.copyFrom(credentialHash.getValue)) - ) - val credentialRevocationTime = - credentialRevocationTimeResponse.getRevocationLedgerData.timestampInfo - .map(ProtoCodecs.fromTimestampInfoProto) - - CredVerification - .verify( - keyData, - batchData, - credentialRevocationTime, - merkleRoot, - merkleProof, - credential - ) - } - - private def publicKeyToProto(key: ECPublicKey): node_models.ECKeyData = { - val point = key.getCurvePoint - node_models.ECKeyData( - curve = ECConfig.getCURVE_NAME, - x = ByteString.copyFrom(point.getX.bytes()), - y = ByteString.copyFrom(point.getY.bytes()) - ) - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/poc/batch/Connector.scala b/node/src/test/scala/io/iohk/atala/prism/node/poc/batch/Connector.scala deleted file mode 100644 index 5740d34f9d..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/poc/batch/Connector.scala +++ /dev/null @@ -1,32 +0,0 @@ -package io.iohk.atala.prism.node.poc.batch - -import io.iohk.atala.prism.crypto.MerkleInclusionProof -import io.iohk.atala.prism.protos.node_api -import io.iohk.atala.prism.protos.node_api.ScheduleOperationsRequest -import io.iohk.atala.prism.protos.node_models.{OperationOutput, SignedAtalaOperation} - -case class Connector(node: node_api.NodeServiceGrpc.NodeServiceBlockingStub) { - def registerDID( - signedAtalaOperation: SignedAtalaOperation - ): OperationOutput = { - node - .scheduleOperations( - ScheduleOperationsRequest(List(signedAtalaOperation)) - ) - .outputs - .head - } - - // a tiny simulation of sending the credential - private var credentialBatchChannel: List[(String, MerkleInclusionProof)] = Nil - - def sendCredentialAndProof( - message: List[(String, MerkleInclusionProof)] - ): Unit = { - credentialBatchChannel = message - } - - def receivedCredentialAndProof(): List[(String, MerkleInclusionProof)] = { - credentialBatchChannel - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/poc/batch/FlowPoC.scala b/node/src/test/scala/io/iohk/atala/prism/node/poc/batch/FlowPoC.scala deleted file mode 100644 index 8682ad6b1c..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/poc/batch/FlowPoC.scala +++ /dev/null @@ -1,297 +0,0 @@ -package io.iohk.atala.prism.node.poc.batch - -import cats.effect.IO -import cats.effect.unsafe.implicits.global -import cats.scalatest.ValidatedValues.convertValidatedToValidatable -import cats.syntax.functor._ -import com.google.protobuf.ByteString -import io.grpc.inprocess.{InProcessChannelBuilder, InProcessServerBuilder} -import io.grpc.{ManagedChannel, Server} -import io.iohk.atala.prism.node.AtalaWithPostgresSpec -import io.iohk.atala.prism.api.CredentialBatches -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.credentials.json.JsonBasedCredential -import io.iohk.atala.prism.crypto.{Sha256, Sha256Digest} -import io.iohk.atala.prism.identity.PrismDid.{getDEFAULT_MASTER_KEY_ID => masterKeyId} -import io.iohk.atala.prism.identity.{PrismDid => DID} -import io.iohk.atala.prism.node.logging.TraceId.IOWithTraceIdContext -import io.iohk.atala.prism.node.models.DidSuffix -import io.iohk.atala.prism.node.operations.ApplyOperationConfig -import io.iohk.atala.prism.node.poc.CredVerification.VerificationError._ -import io.iohk.atala.prism.node.poc.{GenericCredentialsSDK, Wallet} -import io.iohk.atala.prism.node.repositories._ -import io.iohk.atala.prism.node.services.models.AtalaObjectNotification -import io.iohk.atala.prism.node.services._ -import io.iohk.atala.prism.node.{DataPreparation, NodeGrpcServiceImpl, UnderlyingLedger} -import io.iohk.atala.prism.protos.node_api -import io.iohk.atala.prism.node.utils.IOUtils._ -import io.iohk.atala.prism.node.utils.NodeClientUtils._ -import org.scalatest.BeforeAndAfterEach -import tofu.logging.Logs -import java.util.concurrent.TimeUnit -import scala.concurrent.duration.DurationInt -import scala.jdk.CollectionConverters._ - -class FlowPoC extends AtalaWithPostgresSpec with BeforeAndAfterEach { - - private val flowPocTestLogs = Logs.withContext[IO, IOWithTraceIdContext] - protected var serverName: String = _ - protected var serverHandle: Server = _ - protected var channelHandle: ManagedChannel = _ - protected var nodeServiceStub: node_api.NodeServiceGrpc.NodeServiceBlockingStub = _ - protected var didDataRepository: DIDDataRepository[IOWithTraceIdContext] = _ - protected var atalaOperationsRepository: AtalaOperationsRepository[IOWithTraceIdContext] = _ - protected var credentialBatchesRepository: CredentialBatchesRepository[IOWithTraceIdContext] = _ - protected var atalaReferenceLedger: UnderlyingLedger[IOWithTraceIdContext] = _ - protected var blockProcessingService: BlockProcessingServiceImpl = _ - protected var objectManagementService: ObjectManagementService[IOWithTraceIdContext] = _ - protected var submissionService: SubmissionService[IOWithTraceIdContext] = _ - protected var submissionSchedulingService: SubmissionSchedulingService = _ - protected var atalaObjectsTransactionsRepository: AtalaObjectsTransactionsRepository[IOWithTraceIdContext] = _ - protected var metricsCountersRepository: MetricsCountersRepository[IOWithTraceIdContext] = _ - protected var keyValuesRepository: KeyValuesRepository[IOWithTraceIdContext] = - _ - protected var protocolVersionsRepository: ProtocolVersionRepository[IOWithTraceIdContext] = _ - private val publicKeysLimit = 10 - private val servicesLimit = 10 - - override def beforeEach(): Unit = { - super.beforeEach() - - didDataRepository = DIDDataRepository.unsafe(dbLiftedToTraceIdIO, flowPocTestLogs) - credentialBatchesRepository = CredentialBatchesRepository.unsafe(dbLiftedToTraceIdIO, flowPocTestLogs) - protocolVersionsRepository = ProtocolVersionRepository.unsafe(dbLiftedToTraceIdIO, flowPocTestLogs) - - atalaReferenceLedger = InMemoryLedgerService.unsafe(onAtalaReference, flowPocTestLogs) - blockProcessingService = new BlockProcessingServiceImpl(ApplyOperationConfig(DidSuffix("0a1e3"))) - atalaOperationsRepository = AtalaOperationsRepository.unsafe(dbLiftedToTraceIdIO, flowPocTestLogs) - metricsCountersRepository = MetricsCountersRepository.unsafe(dbLiftedToTraceIdIO, flowPocTestLogs) - atalaObjectsTransactionsRepository = AtalaObjectsTransactionsRepository - .unsafe(dbLiftedToTraceIdIO, flowPocTestLogs) - submissionService = SubmissionService.unsafe( - atalaReferenceLedger, - atalaOperationsRepository, - atalaObjectsTransactionsRepository, - logs = flowPocTestLogs - ) - // this service needs to pull operations from the database and to send them to the ledger - submissionSchedulingService = SubmissionSchedulingService( - SubmissionSchedulingService.Config( - refreshAndSubmitPeriod = 1.second, - moveScheduledToPendingPeriod = 2.second - ), - submissionService - ) - keyValuesRepository = KeyValuesRepository.unsafe(dbLiftedToTraceIdIO, flowPocTestLogs) - objectManagementService = ObjectManagementService.unsafe( - atalaOperationsRepository, - atalaObjectsTransactionsRepository, - keyValuesRepository, - protocolVersionsRepository, - blockProcessingService, - publicKeysLimit, - servicesLimit, - dbLiftedToTraceIdIO, - flowPocTestLogs - ) - def onAtalaReference( - notification: AtalaObjectNotification - ): IOWithTraceIdContext[Unit] = objectManagementService - .saveObject(notification) - .void - - serverName = InProcessServerBuilder.generateName() - - serverHandle = InProcessServerBuilder - .forName(serverName) - .directExecutor() - .addService( - node_api.NodeServiceGrpc - .bindService( - new NodeGrpcServiceImpl( - NodeService.unsafe( - didDataRepository, - objectManagementService, - credentialBatchesRepository, - flowPocTestLogs - ) - ), - executionContext - ) - ) - .build() - .start() - - channelHandle = InProcessChannelBuilder.forName(serverName).directExecutor().build() - - nodeServiceStub = node_api.NodeServiceGrpc.blockingStub(channelHandle) - } - - override def afterEach(): Unit = { - channelHandle.shutdown() - channelHandle.awaitTermination(10, TimeUnit.SECONDS) - serverHandle.shutdown() - serverHandle.awaitTermination() - super.afterEach() - } - - "The batch issuance/verification flow" should { - "work" in { - - // the idea of the flow to implement - // 1. issuer generates a DID with the wallet - // 2- she uses the connector to publish it - // 3. she grabs credential data from the management console - // 4- she builds 4 generic credentials - // 5. she signs them with the wallet - // 6. she issues the credentials as two batches (with 2 credentials per batch) - // through the management console - // 7. she encodes the credentials and sends them through the connector along with - // the corresponding proofs of inclusion - // ... later ... - // 8. a verifier receives the credentials through the connector - // 9. gives the signed credentials to the wallet to verify them and it succeeds - // ... later ... - // 10. the issuer decides to revoke the first batch - // 11. the issuer decides to revoke the first credential from the second batch - // ... later ... - // 12. the verifier calls the wallet again to verify the credentials - // and the verification fails for all but the second credential of the second batch - - val wallet = Wallet(nodeServiceStub) - val console = ManagementConsole(nodeServiceStub) - val connector = Connector(nodeServiceStub) - - // 1. issuer generates a DID with the wallet - val (didSuffix, createDIDOp) = wallet.generateDID() - - // 2- she uses the connector to publish it - val signedCreateDIDOp = - wallet.signOperation(createDIDOp, masterKeyId, didSuffix) - val registerDIDOperationId = connector - .registerDID(signedAtalaOperation = signedCreateDIDOp) - .getOperationId - DataPreparation.waitConfirmation( - nodeServiceStub, - registerDIDOperationId - ) - - // 3. she grabs credential data from the management console - val consoleCredentials = console.getCredentials(4) - - // 4. she builds 4 generic credentials - val issuanceKeyId = "issuance0" - - val issuerDID = DID.buildCanonical(Sha256Digest.fromHex(didSuffix.value)) - val credentialsToSign = consoleCredentials.map { credential => - GenericCredentialsSDK.buildGenericCredential( - "university-degree", - issuerDID, - issuanceKeyId, - credential.credentialData - ) - } - - // 5. she signs them with the wallet - val signedCredentials = credentialsToSign.map { credentialToSign => - wallet.signCredential(credentialToSign, issuanceKeyId, didSuffix) - } - - // 6. she issues the credentials as two batches (with 2 credentials per batch) - // through the management console - val batch1 = CredentialBatches.batch(signedCredentials.take(2).asJava) - val (root1, proofs1) = (batch1.getRoot, batch1.getProofs.asScala.toList) - val batch2 = CredentialBatches.batch(signedCredentials.drop(2).asJava) - val (root2, proofs2) = (batch2.getRoot, batch2.getProofs.asScala.toList) - - val issueBatch1Op = issueBatchOperation(issuerDID, root1) - val issueBatch2Op = issueBatchOperation(issuerDID, root2) - - val signedIssueBatch1Op = - wallet.signOperation(issueBatch1Op, issuanceKeyId, didSuffix) - val signedIssueBatch2Op = - wallet.signOperation(issueBatch2Op, issuanceKeyId, didSuffix) - val issueCredentialBatchOperationId1 = - console.issueCredentialBatch(signedIssueBatch1Op).getOperationId - val issueCredentialBatchOperationId2 = - console.issueCredentialBatch(signedIssueBatch2Op).getOperationId - DataPreparation.waitConfirmation( - nodeServiceStub, - issueCredentialBatchOperationId1, - issueCredentialBatchOperationId2 - ) - - // 7. she encodes the credentials and sends them through the connector along with - // the corresponding proofs of inclusion - val credentialsToSend = - signedCredentials.zip(proofs1 ++ proofs2).map { case (c, p) => - (c.getCanonicalForm, p) - } - connector.sendCredentialAndProof(credentialsToSend) - - // ... later ... - // 8. a verifier receives the credentials through the connector - val List((c1, p1), (c2, p2), (c3, p3), (c4, p4)) = - connector.receivedCredentialAndProof().map { case (c, p) => - (JsonBasedCredential.fromString(c), p) - } - - // 9. gives the signed credentials to the wallet to verify them and it succeeds - wallet.verifyCredential(c1, p1).isValid mustBe true - wallet.verifyCredential(c2, p2).isValid mustBe true - wallet.verifyCredential(c3, p3).isValid mustBe true - wallet.verifyCredential(c4, p4).isValid mustBe true - - // ... later ... - // 10. the issuer decides to revoke the first batch - val revocationKeyId = "revocation0" - wallet.addRevocationKeyToDid( - revocationKeyId = revocationKeyId, - previousOperationHash = ByteString.copyFrom(Sha256.compute(createDIDOp.toByteArray).getValue), - didSuffix = didSuffix - ) - - val issueBatch1OpHash = Sha256.compute(issueBatch1Op.toByteArray) - val batchId1 = CredentialBatchId.fromBatchData(issuerDID.getSuffix, root1) - val revokeBatch1Op = - revokeCredentialsOperation(issueBatch1OpHash, batchId1) - val signedRevokeBatch1Op = - wallet.signOperation(revokeBatch1Op, revocationKeyId, didSuffix) - val revokeCredentialBatchOperationId = - console.revokeCredentialBatch(signedRevokeBatch1Op).getOperationId - - // 11. the issuer decides to revoke the first credential from the second batch - val issueBatch2OpHash = Sha256.compute(issueBatch2Op.toByteArray) - val batchId2 = CredentialBatchId.fromBatchData(issuerDID.getSuffix, root2) - val revokeC3Op = - revokeCredentialsOperation(issueBatch2OpHash, batchId2, Seq(c3.hash)) - val signedRevokeC3Op = - wallet.signOperation(revokeC3Op, revocationKeyId, didSuffix) - val revokeSpecificCredentialsOperationId = - console.revokeSpecificCredentials(signedRevokeC3Op).getOperationId - - DataPreparation.waitConfirmation( - nodeServiceStub, - revokeCredentialBatchOperationId, - revokeSpecificCredentialsOperationId - ) - - // ... later ... - // 12. the verifier calls the wallet again to verify the credentials - // and the verification fails for all but the second credential of the second batch - val e1 = wallet.verifyCredential(c1, p1).invalid.e - e1.size mustBe 1 - e1.head mustBe a[BatchWasRevoked] - - val e2 = wallet.verifyCredential(c2, p2).invalid.e - e2.size mustBe 1 - e2.head mustBe a[BatchWasRevoked] - - val e3 = wallet.verifyCredential(c3, p3).invalid.e - e3.size mustBe 1 - e3.head mustBe a[CredentialWasRevoked] - - wallet.verifyCredential(c4, p4).isValid mustBe true - } - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/poc/batch/ManagementConsole.scala b/node/src/test/scala/io/iohk/atala/prism/node/poc/batch/ManagementConsole.scala deleted file mode 100644 index 1fcbbf0ce3..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/poc/batch/ManagementConsole.scala +++ /dev/null @@ -1,71 +0,0 @@ -package io.iohk.atala.prism.node.poc.batch - -import java.time.LocalDate -import java.util.UUID -import io.iohk.atala.prism.protos.console_models.CManagerGenericCredential -import io.iohk.atala.prism.protos.node_api.ScheduleOperationsRequest -import io.iohk.atala.prism.protos.{node_api, node_models} -import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation - -case class ManagementConsole( - node: node_api.NodeServiceGrpc.NodeServiceBlockingStub -) { - // example credentials we have from the backend - def getCredentials( - amountOfCredentials: Int - ): List[CManagerGenericCredential] = - (1 to amountOfCredentials).toList.map { index => - CManagerGenericCredential( - credentialId = UUID.randomUUID().toString, - issuerId = UUID.randomUUID().toString, - contactId = UUID.randomUUID().toString, - credentialData = s"""{ - | "title" : "Bs in Computer Science", - | "enrollmentDate" : "${LocalDate.now()}", - | "graduationDate" : "${LocalDate.now()}", - | "subjectName" : "Asymptomatic Joe $index" - |}""".stripMargin, - issuerName = "National University of Rosario" - ) - } - - // this is a toy API to simulate what the console does - def issueCredentialBatch( - issueCredentialBatchOperation: SignedAtalaOperation - ): node_models.OperationOutput = { - // First some storage stuff to mark a credential as stored - // It then posts the operation to the node - node - .scheduleOperations( - ScheduleOperationsRequest(List(issueCredentialBatchOperation)) - ) - .outputs - .head - } - - def revokeCredentialBatch( - revokeCredentialBatchOperation: SignedAtalaOperation - ): node_models.OperationOutput = { - // First storage stuff - // then, posting things on the blockchain through the node - node - .scheduleOperations( - ScheduleOperationsRequest(List(revokeCredentialBatchOperation)) - ) - .outputs - .head - } - - def revokeSpecificCredentials( - revokeCredentialBatchOperation: SignedAtalaOperation - ): node_models.OperationOutput = { - // First storage stuff - // then, posting things on the blockchain through the node - node - .scheduleOperations( - ScheduleOperationsRequest(List(revokeCredentialBatchOperation)) - ) - .outputs - .head - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/poc/estimations/CardanoFeeEstimator.scala b/node/src/test/scala/io/iohk/atala/prism/node/poc/estimations/CardanoFeeEstimator.scala deleted file mode 100644 index cfb52d226d..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/poc/estimations/CardanoFeeEstimator.scala +++ /dev/null @@ -1,348 +0,0 @@ -package io.iohk.atala.prism.node.poc.estimations - -import cats.effect.IO -import cats.effect.unsafe.implicits.global -import com.google.protobuf.ByteString -import com.typesafe.config.ConfigFactory -import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256} -import io.iohk.atala.prism.crypto.EC.{INSTANCE => EC} -import io.iohk.atala.prism.crypto.keys.{ECPrivateKey, ECPublicKey} -import io.iohk.atala.prism.crypto.ECConfig.{INSTANCE => ECConfig} -import io.iohk.atala.prism.identity.{CanonicalPrismDid => Canonical, PrismDid => DID} -import io.iohk.atala.prism.node.NodeConfig -import io.iohk.atala.prism.node.cardano.models._ -import io.iohk.atala.prism.node.cardano.wallet.CardanoWalletApiClient -import io.iohk.atala.prism.node.poc.estimations.CardanoFeeEstimator.{Estimation, Issuer, TotalEstimation} -import io.iohk.atala.prism.protos.node_internal.AtalaObject -import io.iohk.atala.prism.protos.node_models.{AtalaOperation, SignedAtalaOperation} -import io.iohk.atala.prism.protos.{node_internal, node_models} -import org.scalatest.OptionValues._ -import org.scalatest.concurrent.ScalaFutures._ -import tofu.logging.Logs -import io.iohk.atala.prism.node.utils.IOUtils._ -import scala.collection.mutable.ListBuffer -import scala.concurrent.duration._ - -/** Estimates the Cardano fees to pay for a given deployment simulation. - * - *

You can run the estimator with `sbt node/test:run` and choosing `CardanoFeeEstimator` from the list. In order to - * do so, make sure you have set the proper environment variables, as suggested here. - */ -class CardanoFeeEstimator( - walletId: WalletId, - paymentAddress: Address, - cardanoWalletApiClient: CardanoWalletApiClient[IO] -) { - // Max number of credentials that can be issued in the same transaction - private val MAX_CREDENTIAL_BATCH_SIZE = 2048 - - private implicit def patienceConfig: PatienceConfig = - PatienceConfig(20.seconds, 50.millis) - - def estimate(issuers: List[Issuer]): TotalEstimation = { - val createDidAtalaObjects = ListBuffer[AtalaObject]() - val issueCredentialBatchAtalaObjects = ListBuffer[AtalaObject]() - issuers.foreach { issuer => - // Create the DID of the issuer - val masterKey = EC.generateKeyPair() - val issuingKey = EC.generateKeyPair() - val did = createDID(s"Issuer ${issuer.id}") - val masterKeyOperation = addMasterKeyOperation(masterKey.getPublicKey) - createDidAtalaObjects += createAtalaObject( - signOperation(masterKeyOperation, masterKey.getPrivateKey), - signOperation( - addIssuingKeyOperation( - did, - issuingKey.getPublicKey, - masterKeyOperation - ), - masterKey.getPrivateKey - ) - ) - - // Issue credentials - issuer.credentialsToIssue.foreach { credentialsToIssue => - val batches = math - .ceil(credentialsToIssue / MAX_CREDENTIAL_BATCH_SIZE.toDouble) - .toInt - for (batchId <- 0 until batches) { - val merkleRoot = new MerkleRoot( - Sha256.compute(s"Issuer ${issuer.id}, batch $batchId".getBytes) - ) - issueCredentialBatchAtalaObjects += createAtalaObject( - signOperation( - issueCredentialBatchOperation(merkleRoot, did), - issuingKey.getPrivateKey - ) - ) - } - } - } - - TotalEstimation( - didCreation = Estimation( - transactions = createDidAtalaObjects.size, - fees = estimateFees(createDidAtalaObjects) - ), - credentialIssuing = Estimation( - transactions = issueCredentialBatchAtalaObjects.size, - fees = estimateFees(issueCredentialBatchAtalaObjects) - ) - ) - } - - private def estimateFees(atalaObjects: Iterable[AtalaObject]): Lovelace = { - val atalaObjectsBySize = atalaObjects.groupBy(_.toByteArray.length) - val fees = atalaObjectsBySize.foldLeft(BigInt(0)) { case (sum, (_, atalaObjectsWithSameSize)) => - // For performance, use an arbitrary object to estimate all of the objects with the same size, even though they - // may get different fees - sum + atalaObjectsWithSameSize.size * estimateFee( - atalaObjectsWithSameSize.head - ) - } - Lovelace(fees) - } - - private def estimateFee(atalaObject: AtalaObject): Lovelace = { - val estimatedFee = cardanoWalletApiClient - .estimateTransactionFee( - walletId = walletId, - payments = List(Payment(paymentAddress, Lovelace(1000000))), - metadata = Some(AtalaObjectMetadata.toTransactionMetadata(atalaObject)) - ) - .unsafeToFuture() - .futureValue - .toOption - .value - - // We are only interested in the minimum estimated fee, because the maximum is dynamic and wallet-dependent - estimatedFee.min - } - - private def createAtalaObject( - operations: SignedAtalaOperation* - ): AtalaObject = { - val block = node_internal.AtalaBlock(operations) - AtalaObject().withBlockContent(block) - } - - private def signOperation( - atalaOperation: AtalaOperation, - privateKey: ECPrivateKey - ): SignedAtalaOperation = { - node_models.SignedAtalaOperation( - signedWith = DID.getDEFAULT_MASTER_KEY_ID, - operation = Some(atalaOperation), - signature = ByteString.copyFrom( - EC.signBytes(atalaOperation.toByteArray, privateKey).getData - ) - ) - } - - private def createDID(id: String): Canonical = { - DID.buildCanonical(Sha256.compute(id.getBytes)) - } - - private def addMasterKeyOperation(publicKey: ECPublicKey): AtalaOperation = { - val createDIDOp = node_models.CreateDIDOperation( - didData = Some( - node_models.CreateDIDOperation.DIDCreationData( - publicKeys = Seq( - node_models.PublicKey( - id = DID.getDEFAULT_MASTER_KEY_ID, - usage = node_models.KeyUsage.MASTER_KEY, - keyData = node_models.PublicKey.KeyData.EcKeyData( - publicKeyToProto(publicKey) - ) - ) - ) - ) - ) - ) - - node_models.AtalaOperation(AtalaOperation.Operation.CreateDid(createDIDOp)) - } - - private def addIssuingKeyOperation( - did: Canonical, - publicKey: ECPublicKey, - lastOperation: AtalaOperation - ): AtalaOperation = { - val createDIDOp = node_models.UpdateDIDOperation( - previousOperationHash = ByteString.copyFrom(Sha256.compute(lastOperation.toByteArray).getValue), - id = did.getSuffix, - actions = List( - node_models.UpdateDIDAction( - action = node_models.UpdateDIDAction.Action.AddKey( - node_models.AddKeyAction( - key = Some( - node_models.PublicKey( - id = s"issuing0", - usage = node_models.KeyUsage.ISSUING_KEY, - keyData = node_models.PublicKey.KeyData.EcKeyData( - publicKeyToProto(publicKey) - ) - ) - ) - ) - ) - ) - ) - ) - - node_models.AtalaOperation(AtalaOperation.Operation.UpdateDid(createDIDOp)) - } - - private def issueCredentialBatchOperation( - merkleRoot: MerkleRoot, - issuerDid: Canonical - ): AtalaOperation = { - val issueCredentialOp = node_models.IssueCredentialBatchOperation( - credentialBatchData = Some( - node_models.CredentialBatchData( - issuerDid = issuerDid.getSuffix, - merkleRoot = ByteString.copyFrom(merkleRoot.getHash.getValue) - ) - ) - ) - - node_models.AtalaOperation( - AtalaOperation.Operation.IssueCredentialBatch(issueCredentialOp) - ) - } - - private def publicKeyToProto(key: ECPublicKey): node_models.ECKeyData = { - val point = key.getCurvePoint - node_models.ECKeyData( - curve = ECConfig.getCURVE_NAME, - x = ByteString.copyFrom(point.getX.bytes()), - y = ByteString.copyFrom(point.getY.bytes()) - ) - } -} - -object CardanoFeeEstimator { - case class Issuer(id: Int, credentialsToIssue: List[Int]) - - sealed trait EstimationFormat { - val transactions: Int - val fees: Lovelace - - def toString(indent: String): String = { - val averageFee = Lovelace(fees / transactions) - s"""${indent}Transactions: $transactions - |${indent}Fees: ${fees.asAda} (${fees.asUsd}) - | ${indent}Average fee: ${averageFee.asLovelace} (${averageFee.asUsd}) - |""".stripMargin - } - } - - case class TotalEstimation( - didCreation: Estimation, - credentialIssuing: Estimation - ) extends EstimationFormat { - override val transactions: Int = - didCreation.transactions + credentialIssuing.transactions - override val fees: Lovelace = Lovelace( - didCreation.fees + credentialIssuing.fees - ) - } - - case class Estimation(transactions: Int, fees: Lovelace) extends EstimationFormat - - def main(args: Array[String]): Unit = { - estimateEthiopia() - // Force termination as a hanging thread seems to exist - sys.exit(0) - } - - private def estimateEthiopia(): Unit = { - /* - There are: - - 5M students - - 700K teachers - - 4K schools - Schools will: - - Issue IDs to students - - Report 4 times per year - - Issue a yearly certificate - The National Exam Certificate body will: - - Issue certificates for ~500K students per year (for grades 7, 9, and 11) - - Assumptions: - - Students will use unpublished DIDs - - Teachers do not need public DIDs as they don't issue credentials (the school does) - - School DIDs are batched as part of the deployment - - Students are evenly distributed in schools - */ - val students = 5000000 - val schools = 4000 - val yearlyNationalExamCertificates = 500000 - val studentsPerSchool = students / schools - val yearlyReportsPerStudent = 4 - val yearlyCertificatesPerStudent = 1 - val yearlyCredentialsPerStudent = - yearlyReportsPerStudent + yearlyCertificatesPerStudent - - // Issue `yearlyNationalExamCertificates` credentials once per year - val nationalExamCertBody = - Issuer(id = 0, credentialsToIssue = List(yearlyNationalExamCertificates)) - val schoolIssuers = List.tabulate(schools)(schoolId => - // Issue `studentsPerSchool` credentials `yearlyCredentialsPerStudent` times in a year - Issuer( - id = schoolId, - credentialsToIssue = List.fill(yearlyCredentialsPerStudent)(studentsPerSchool) - ) - ) - - val estimator = createCardanoFeeEstimator() - val estimation = - estimator.estimate(List(nationalExamCertBody) ++ schoolIssuers) - - println(s"""Ethiopia estimation: - | Initial setup (DID creation): - |${estimation.didCreation.toString(" - ")} - | Yearly (credential issuing): - |${estimation.credentialIssuing.toString(" - ")} - | Total: - |${estimation.toString(" - ")} - |""".stripMargin) - } - - private def createCardanoFeeEstimator(): CardanoFeeEstimator = { - - val clientConfig = - NodeConfig.cardanoConfig(ConfigFactory.load().getConfig("cardano")) - val walletId = WalletId.from(clientConfig.walletId).value - val paymentAddress = Address(clientConfig.paymentAddress) - val logs = Logs.sync[IO, IO] - val cardanoWalletApiClient = - CardanoWalletApiClient.unsafe[IO, IO]( - clientConfig.cardanoClientConfig.cardanoWalletConfig, - logs - ) - - new CardanoFeeEstimator(walletId, paymentAddress, cardanoWalletApiClient) - } - - implicit class LovelaceFormat(val amount: Lovelace) { - private val ADA_USD_PRICE = 0.103377 - - private def toAda: Double = { - amount.toDouble / 1000000 - } - - def asLovelace: String = { - f"$amount lovelace" - } - - def asAda: String = { - f"$toAda%.6f ADA" - } - - def asUsd: String = { - f"$$${toAda * ADA_USD_PRICE}%.2f USD" - } - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/repositories/CredentialBatchesRepositorySpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/repositories/CredentialBatchesRepositorySpec.scala deleted file mode 100644 index d368883c3e..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/repositories/CredentialBatchesRepositorySpec.scala +++ /dev/null @@ -1,278 +0,0 @@ -package io.iohk.atala.prism.node.repositories - -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256, Sha256Digest} -import io.iohk.atala.prism.node.models.{DidSuffix, Ledger, TransactionId} -import io.iohk.atala.prism.node.models.nodeState.{CredentialBatchState, LedgerData} -import org.scalatest.OptionValues._ -import java.time.Instant -import cats.effect.IO -import cats.effect.unsafe.implicits.global -import doobie.util.transactor.Transactor -import io.iohk.atala.prism.protos.models.TimestampInfo -import io.iohk.atala.prism.node.{AtalaWithPostgresSpec, DataPreparation} -import io.iohk.atala.prism.node.models.DIDData -import tofu.logging.Logging - -class CredentialBatchesRepositorySpec extends AtalaWithPostgresSpec { - - import CredentialBatchesRepositorySpec._ - - private val logs = Logging.Make.plain[IO] - - private lazy implicit val repository: CredentialBatchesRepository[IO] = - CredentialBatchesRepository.unsafe(database, logs) - - private val dummyTimestampInfo = - new TimestampInfo(Instant.ofEpochMilli(0).toEpochMilli, 1, 0) - private val dummyLedgerData = LedgerData( - TransactionId - .from(Array.fill[Byte](TransactionId.config.size.toBytes.toInt)(0)) - .value, - Ledger.InMemory, - dummyTimestampInfo - ) - - "CredentialsRepository.getCredentialRevocationTime" should { - "return empty timestamp when there is no data associated to the credential and batch" in { - val randomBatchId = CredentialBatchId.random() - val randomCredentialHash = Sha256.compute("random".getBytes()) - - revocationTime(randomBatchId, randomCredentialHash) must be(None) - } - - "return proper timestamp when there is data associated to the credential and batch" in { - val randomBatchId = CredentialBatchId.random() - val randomCredentialHash1 = Sha256.compute("random".getBytes()) - val randomCredentialHash2 = Sha256.compute("another random".getBytes()) - val randomRevocationTime = - new TimestampInfo(Instant.now().toEpochMilli, 10, 100) - val randomRevocationLedgerData = LedgerData( - TransactionId - .from(Array.fill[Byte](TransactionId.config.size.toBytes.toInt)(0)) - .value, - Ledger.InMemory, - randomRevocationTime - ) - - val randomIssuerDIDSuffix = - DidSuffix(Sha256.compute("did".getBytes()).getHexValue) - val randomLastOperation = Sha256.compute("lastOperation".getBytes()) - val randomMerkleRoot = - new MerkleRoot(Sha256.compute("merkleRoot".getBytes())) - val randomIssuedOnTime = LedgerData( - TransactionId - .from(Array.fill[Byte](TransactionId.config.size.toBytes.toInt)(0)) - .value, - Ledger.InMemory, - dummyTimestampInfo - ) - - registerDID(randomIssuerDIDSuffix) - - DataPreparation.createBatch( - randomBatchId, - randomLastOperation, - randomIssuerDIDSuffix, - randomMerkleRoot, - randomIssuedOnTime - ) - - DataPreparation.revokeCredentials( - randomBatchId, - List(randomCredentialHash1, randomCredentialHash2), - randomRevocationLedgerData - ) - - revocationTime(randomBatchId, randomCredentialHash1) must be( - Some(randomRevocationLedgerData) - ) - revocationTime(randomBatchId, randomCredentialHash2) must be( - Some(randomRevocationLedgerData) - ) - } - } - - "CredentialsRepository.getBatchState" should { - "return empty when the batch is unknown" in { - val randomBatchId = CredentialBatchId.random() - - val response = repository - .getBatchState(randomBatchId) - .unsafeRunSync() - .toOption - .flatten - - response must be(empty) - } - - "return proper data when there is non-revoked batch data" in { - val randomBatchId = CredentialBatchId.random() - val randomIssuerDIDSuffix = - DidSuffix(Sha256.compute("did".getBytes()).getHexValue) - val randomLastOperation = Sha256.compute("lastOperation".getBytes()) - val randomMerkleRoot = - new MerkleRoot(Sha256.compute("merkleRoot".getBytes())) - val randomIssuedOnLedgerData = LedgerData( - TransactionId - .from(Array.fill[Byte](TransactionId.config.size.toBytes.toInt)(0)) - .value, - Ledger.InMemory, - dummyTimestampInfo - ) - - registerDID(randomIssuerDIDSuffix) - - DataPreparation.createBatch( - randomBatchId, - randomLastOperation, - randomIssuerDIDSuffix, - randomMerkleRoot, - randomIssuedOnLedgerData - ) - - val expectedState = CredentialBatchState( - batchId = randomBatchId, - issuerDIDSuffix = randomIssuerDIDSuffix, - merkleRoot = randomMerkleRoot, - issuedOn = randomIssuedOnLedgerData, - revokedOn = None, - lastOperation = randomLastOperation - ) - - repository - .getBatchState(randomBatchId) - .unsafeRunSync() - .toOption - .flatten must be(Some(expectedState)) - } - - "return proper data when the batch was revoked" in { - val randomBatchId = CredentialBatchId.random() - val randomIssuerDIDSuffix = - DidSuffix(Sha256.compute("did".getBytes()).getHexValue) - val randomLastOperation = Sha256.compute("lastOperation".getBytes()) - val randomMerkleRoot = - new MerkleRoot(Sha256.compute("merkleRoot".getBytes())) - val randomIssuedOnLedgerData = dummyLedgerData - - val randomRevocationTime = - new TimestampInfo(Instant.now().toEpochMilli, 10, 100) - val randomRevocationLedgerData = LedgerData( - TransactionId - .from(Array.fill[Byte](TransactionId.config.size.toBytes.toInt)(0)) - .value, - Ledger.InMemory, - randomRevocationTime - ) - - registerDID(randomIssuerDIDSuffix) - - DataPreparation.createBatch( - randomBatchId, - randomLastOperation, - randomIssuerDIDSuffix, - randomMerkleRoot, - randomIssuedOnLedgerData - ) - - DataPreparation.revokeCredentialBatch( - randomBatchId, - randomRevocationLedgerData - ) - - val expectedState = CredentialBatchState( - batchId = randomBatchId, - issuerDIDSuffix = randomIssuerDIDSuffix, - merkleRoot = randomMerkleRoot, - issuedOn = randomIssuedOnLedgerData, - revokedOn = Some(randomRevocationLedgerData), - lastOperation = randomLastOperation - ) - - repository - .getBatchState(randomBatchId) - .unsafeRunSync() - .toOption - .flatten must be(Some(expectedState)) - } - - "revocation of previously revoked credential doesn't throw any errors" in { - val randomBatchId = CredentialBatchId.random() - val randomIssuerDIDSuffix = DidSuffix(Sha256.compute("did".getBytes()).getHexValue) - val randomLastOperation = Sha256.compute("lastOperation".getBytes()) - val randomMerkleRoot = new MerkleRoot(Sha256.compute("merkleRoot".getBytes())) - val randomCredentialId1 = Sha256.compute("randomCredential1".getBytes()) - val randomCredentialId2 = Sha256.compute("randomCredential2".getBytes()) - val randomCredentialId3 = Sha256.compute("randomCredential3".getBytes()) - val randomIssuedOnLedgerData = dummyLedgerData - - val randomRevocationTime = - new TimestampInfo(Instant.now().toEpochMilli, 10, 100) - val randomRevocationLedgerData = LedgerData( - TransactionId - .from(Array.fill[Byte](TransactionId.config.size.toBytes.toInt)(0)) - .value, - Ledger.InMemory, - randomRevocationTime - ) - - registerDID(randomIssuerDIDSuffix) - - DataPreparation.createBatch( - randomBatchId, - randomLastOperation, - randomIssuerDIDSuffix, - randomMerkleRoot, - randomIssuedOnLedgerData - ) - - DataPreparation.revokeCredentials( - randomBatchId, - List(randomCredentialId1, randomCredentialId2), - randomRevocationLedgerData - ) - - DataPreparation.revokeCredentials( - randomBatchId, - List(randomCredentialId2, randomCredentialId3), - randomRevocationLedgerData - ) - } - } -} - -object CredentialBatchesRepositorySpec { - private def registerDID( - didSuffix: DidSuffix - )(implicit database: Transactor[IO]): Unit = { - val lastOperation = - Sha256.compute("a random did create operation".getBytes()) - val dummyTimestampInfo = - new TimestampInfo(Instant.ofEpochMilli(0).toEpochMilli, 1, 0) - val dummyLedgerData = LedgerData( - TransactionId - .from(Array.fill[Byte](TransactionId.config.size.toBytes.toInt)(0)) - .value, - Ledger.InMemory, - dummyTimestampInfo - ) - DataPreparation.createDID( - DIDData(didSuffix, keys = Nil, Nil, Nil, lastOperation), - dummyLedgerData - ) - } - - private def revocationTime( - batchId: CredentialBatchId, - credentialHash: Sha256Digest - )(implicit - repository: CredentialBatchesRepository[IO] - ): Option[LedgerData] = { - repository - .getCredentialRevocationTime(batchId, credentialHash) - .unsafeRunSync() - .toOption - .flatten - } -} diff --git a/node/src/test/scala/io/iohk/atala/prism/node/repositories/daos/AtalaObjectTransactionSubmissionsDAOSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/repositories/daos/AtalaObjectTransactionSubmissionsDAOSpec.scala index 98bf3c59c2..eebe711850 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/repositories/daos/AtalaObjectTransactionSubmissionsDAOSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/repositories/daos/AtalaObjectTransactionSubmissionsDAOSpec.scala @@ -16,7 +16,7 @@ import io.iohk.atala.prism.node.models.{ AtalaObjectTransactionSubmissionStatus } import io.iohk.atala.prism.node.services.BlockProcessingServiceSpec -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import org.scalatest.OptionValues._ import scalapb.UnknownFieldSet @@ -25,11 +25,11 @@ import java.time.{Duration, Instant} class AtalaObjectTransactionSubmissionsDAOSpec extends AtalaWithPostgresSpec { private val ONE_SECOND = Duration.ofSeconds(1) - private val atalaObjectId = AtalaObjectId.of(node_internal.AtalaObject()) + private val atalaObjectId = AtalaObjectId.of(node_models.AtalaObject()) private val atalaObjectId2 = AtalaObjectId.of( - node_internal + node_models .AtalaObject() .withUnknownFields( // something to differentiate one object from another diff --git a/node/src/test/scala/io/iohk/atala/prism/node/repositories/daos/AtalaObjectsDAOSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/repositories/daos/AtalaObjectsDAOSpec.scala index cde1f31e30..138890b1bc 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/repositories/daos/AtalaObjectsDAOSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/repositories/daos/AtalaObjectsDAOSpec.scala @@ -8,7 +8,7 @@ import io.iohk.atala.prism.node.models.{BlockInfo, Ledger, TransactionId, Transa import io.iohk.atala.prism.node.AtalaWithPostgresSpec import io.iohk.atala.prism.node.models.AtalaObjectStatus.{Merged, Pending, Processed, Scheduled} import io.iohk.atala.prism.node.models.{AtalaObjectId, AtalaObjectInfo, AtalaObjectStatus} -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import org.scalatest.OptionValues._ import scalapb.UnknownFieldSet @@ -16,7 +16,7 @@ import java.time.Instant import scala.util.Random class AtalaObjectsDAOSpec extends AtalaWithPostgresSpec { - private val objectId = AtalaObjectId.of(node_internal.AtalaObject()) + private val objectId = AtalaObjectId.of(node_models.AtalaObject()) private val byteContent = "byteContent".getBytes private val transactionInfo = TransactionInfo( transactionId = TransactionId @@ -144,7 +144,7 @@ class AtalaObjectsDAOSpec extends AtalaWithPostgresSpec { // value is irrelevant in this case, we just need to make sure we can distinguish objects some how (by index in this case) val fieldSet = UnknownFieldSet.empty.withField(count, UnknownFieldSet.Field(fixed32 = Seq(count))) val objId = AtalaObjectId.of( - node_internal + node_models .AtalaObject() .withUnknownFields(fieldSet) ) @@ -162,7 +162,7 @@ class AtalaObjectsDAOSpec extends AtalaWithPostgresSpec { withClue(s"Index $ind:") { val fieldSet = UnknownFieldSet.empty.withField(ind, UnknownFieldSet.Field(fixed32 = Seq(ind))) objInfo.objectId mustBe AtalaObjectId.of( - node_internal.AtalaObject().withUnknownFields(fieldSet) + node_models.AtalaObject().withUnknownFields(fieldSet) ) } } @@ -175,11 +175,11 @@ class AtalaObjectsDAOSpec extends AtalaWithPostgresSpec { val random = Random val statuses = List(Scheduled, Pending, Merged, Processed) val generatedStatuses = (0 until N).map(_ => statuses(random.nextInt(4))) - var scheduledObjects = List[node_internal.AtalaObject]() + var scheduledObjects = List[node_models.AtalaObject]() (0 until N).foreach { count => val fieldSet = UnknownFieldSet.empty.withField(count, UnknownFieldSet.Field(fixed32 = Seq(count))) - val obj = node_internal.AtalaObject().withUnknownFields(fieldSet) + val obj = node_models.AtalaObject().withUnknownFields(fieldSet) val objId = AtalaObjectId.of(obj) val status = generatedStatuses(count) insert(objId, byteContent, generatedStatuses(count)) diff --git a/node/src/test/scala/io/iohk/atala/prism/node/repositories/package.scala b/node/src/test/scala/io/iohk/atala/prism/node/repositories/package.scala index fb8a58cb36..25dd9fe69c 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/repositories/package.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/repositories/package.scala @@ -2,7 +2,6 @@ package io.iohk.atala.prism.node import io.iohk.atala.prism.crypto.Sha256Digest import io.iohk.atala.prism.node.models.DidSuffix -import io.iohk.atala.prism.node.models.CredentialId package object repositories { @@ -13,8 +12,4 @@ package object repositories { digest.getHexValue ) - def credentialIdFromDigest(digest: Sha256Digest): CredentialId = CredentialId( - digest.getHexValue - ) - } diff --git a/node/src/test/scala/io/iohk/atala/prism/node/services/BlockProcessingServiceSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/services/BlockProcessingServiceSpec.scala index 25fd704751..58e3b05aad 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/services/BlockProcessingServiceSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/services/BlockProcessingServiceSpec.scala @@ -19,7 +19,7 @@ import io.iohk.atala.prism.node.operations.{ import io.iohk.atala.prism.node.operations.UpdateDIDOperationSpec.{exampleAddKeyAction, exampleRemoveKeyAction} import io.iohk.atala.prism.node.repositories.daos.DIDDataDAO import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation -import io.iohk.atala.prism.protos.{node_internal, node_models} +import io.iohk.atala.prism.protos.node_models import org.scalatest.OptionValues._ import java.time.Instant @@ -48,7 +48,7 @@ object BlockProcessingServiceSpec { val signedUpdateDidOperation: SignedAtalaOperation = signOperation(updateDidOperation, "master", masterKeys.getPrivateKey) - val exampleBlock = node_internal.AtalaBlock( + val exampleBlock = node_models.AtalaBlock( operations = Seq(signedCreateDidOperation) ) @@ -144,7 +144,7 @@ class BlockProcessingServiceSpec extends AtalaWithPostgresSpec { opIds.size must be(1) val atalaOperationId = opIds.head - val atalaBlock = node_internal.AtalaBlock( + val atalaBlock = node_models.AtalaBlock( operations = Seq(signedUpdateDidOperation) ) @@ -185,7 +185,7 @@ class BlockProcessingServiceSpec extends AtalaWithPostgresSpec { opIds.size must be(1) val atalaOperationId = opIds.head - val invalidSignatureBlock = node_internal.AtalaBlock( + val invalidSignatureBlock = node_models.AtalaBlock( operations = Seq(invalidSignatureOperation) ) @@ -223,7 +223,7 @@ class BlockProcessingServiceSpec extends AtalaWithPostgresSpec { val signedValidOperation = signOperation(createDidOperation, "master", masterKeys.getPrivateKey) - val block = node_internal.AtalaBlock( + val block = node_models.AtalaBlock( operations = Seq(signedInvalidOperation, signedValidOperation) ) val (objId, opIds) = DataPreparation.insertOperationStatuses( @@ -316,7 +316,7 @@ class BlockProcessingServiceSpec extends AtalaWithPostgresSpec { val signedOperation3 = signOperation(operation3, "rootkey", operation3Keys.getPrivateKey) - val block = node_internal.AtalaBlock( + val block = node_models.AtalaBlock( operations = Seq(signedOperation1, incorrectlySignedOperation2, signedOperation3) ) @@ -416,7 +416,7 @@ class BlockProcessingServiceSpec extends AtalaWithPostgresSpec { val updateDidSignedOperation2 = signOperation(updateDidOperation2, "master", masterKeys.getPrivateKey) - val block = node_internal.AtalaBlock( + val block = node_models.AtalaBlock( operations = Seq( createDidSignedOperation, updateDidSignedOperation1, @@ -518,15 +518,15 @@ class BlockProcessingServiceSpec extends AtalaWithPostgresSpec { val signedOperation3 = signOperation(operation3, "rootkey", operation3Keys.getPrivateKey) - val block1 = node_internal + val block1 = node_models .AtalaBlock( // first block contains 1 valid and 1 invalid operation operations = Seq(signedOperation1, incorrectlySignedOperation2) ) - val block2 = node_internal + val block2 = node_models .AtalaBlock( // second block contains 1 valid operation, and two duplications operations = Seq(signedOperation3, signedOperation1, signedOperation3) ) - val block3 = node_internal + val block3 = node_models .AtalaBlock( // third block contains 1 duplicate operation and 1 invalid operations = Seq(signedOperation1, incorrectlySignedOperation2) ) diff --git a/node/src/test/scala/io/iohk/atala/prism/node/services/CardanoLedgerServiceIntegrationSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/services/CardanoLedgerServiceIntegrationSpec.scala index ff9577df29..34db657165 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/services/CardanoLedgerServiceIntegrationSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/services/CardanoLedgerServiceIntegrationSpec.scala @@ -10,7 +10,7 @@ import io.iohk.atala.prism.node.cardano.models.{Address, WalletId} import io.iohk.atala.prism.node.repositories.KeyValuesRepository import io.iohk.atala.prism.node.services.CardanoLedgerService.CardanoNetwork import io.iohk.atala.prism.node.services.models.testing.TestAtalaHandlers -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import io.iohk.atala.prism.node.AtalaWithPostgresSpec import io.iohk.atala.prism.node.logging.TraceId import io.iohk.atala.prism.node.logging.TraceId.IOWithTraceIdContext @@ -88,9 +88,9 @@ class CardanoLedgerServiceIntegrationSpec extends AtalaWithPostgresSpec { .unsafeRunSync() // Publish random object - val atalaObject = node_internal + val atalaObject = node_models .AtalaObject() - .withBlockContent(node_internal.AtalaBlock(operations = Seq())) + .withBlockContent(node_models.AtalaBlock(operations = Seq())) val transaction = cardanoLedgerService .publish(atalaObject) .run(TraceId.generateYOLO) @@ -102,7 +102,7 @@ class CardanoLedgerServiceIntegrationSpec extends AtalaWithPostgresSpec { s"AtalaObject published in transaction ${transaction.transactionId} on ${transaction.ledger}" ) - def notifiedAtalaObjects: Seq[node_internal.AtalaObject] = { + def notifiedAtalaObjects: Seq[node_models.AtalaObject] = { notificationHandler.receivedNotifications.map(_.atalaObject).toSeq } diff --git a/node/src/test/scala/io/iohk/atala/prism/node/services/CardanoLedgerServiceSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/services/CardanoLedgerServiceSpec.scala index a0d3bb36f1..8c0fb3f772 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/services/CardanoLedgerServiceSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/services/CardanoLedgerServiceSpec.scala @@ -20,7 +20,7 @@ import io.iohk.atala.prism.node.repositories.KeyValuesRepository import io.iohk.atala.prism.node.services.CardanoLedgerService.{CardanoBlockHandler, CardanoNetwork} import io.iohk.atala.prism.node.services.models.testing.TestAtalaHandlers import io.iohk.atala.prism.node.services.models.{AtalaObjectNotification, AtalaObjectNotificationHandler} -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import io.iohk.atala.prism.node.utils.BytesOps import io.iohk.atala.prism.node.utils.IOUtils._ import org.scalatest.OptionValues._ @@ -53,9 +53,9 @@ class CardanoLedgerServiceSpec extends AtalaWithPostgresSpec { } "publish" should { - val atalaObject = node_internal + val atalaObject = node_models .AtalaObject() - .withBlockContent(node_internal.AtalaBlock()) + .withBlockContent(node_models.AtalaBlock()) val expectedWalletApiPath = s"v2/wallets/$walletId/transactions" "publish an object" in { @@ -213,10 +213,10 @@ class CardanoLedgerServiceSpec extends AtalaWithPostgresSpec { blocksWithNotifications.map { blockWithNotification => val block = allBlocks(blockWithNotification) - val atalaObject = node_internal + val atalaObject = node_models .AtalaObject() .withBlockContent( - node_internal.AtalaBlock(operations = Seq()) + node_models.AtalaBlock(operations = Seq()) ) val blockIndex = block.transactions.size val transaction = Transaction( @@ -247,7 +247,7 @@ class CardanoLedgerServiceSpec extends AtalaWithPostgresSpec { // AtalaObjectMetadata.toTransactionMetadata cannot be used as the format received by cardano-db-sync is not // compatible def toTransactionMetadata( - atalaObject: node_internal.AtalaObject + atalaObject: node_models.AtalaObject ): TransactionMetadata = { TransactionMetadata( Json.obj( diff --git a/node/src/test/scala/io/iohk/atala/prism/node/services/ObjectManagementServiceSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/services/ObjectManagementServiceSpec.scala index 1cdd655a18..9088879b49 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/services/ObjectManagementServiceSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/services/ObjectManagementServiceSpec.scala @@ -34,7 +34,7 @@ import io.iohk.atala.prism.node.repositories.{ import io.iohk.atala.prism.node.services.BlockProcessingServiceSpec.{createDidOperation, signOperation} import io.iohk.atala.prism.node.services.models.AtalaObjectNotification import io.iohk.atala.prism.node.{DataPreparation, PublicationInfo, UnderlyingLedger} -import io.iohk.atala.prism.protos.{node_internal, node_models} +import io.iohk.atala.prism.protos.node_models import io.iohk.atala.prism.node.utils.IOUtils._ import org.mockito import org.mockito.captor.ArgCaptor @@ -250,7 +250,7 @@ class ObjectManagementServiceSpec returnedOperationId mustBe BlockProcessingServiceSpec.signedCreateDidOperationId // Verify published AtalaObject - val atalaObjectCaptor = ArgCaptor[node_internal.AtalaObject] + val atalaObjectCaptor = ArgCaptor[node_models.AtalaObject] verify(ledger).publish(atalaObjectCaptor) val atalaObject = atalaObjectCaptor.value val atalaBlock = atalaObject.blockContent.value @@ -486,7 +486,7 @@ class ObjectManagementServiceSpec .unsafeToFuture() .futureValue - val blockCaptor = ArgCaptor[node_internal.AtalaBlock] + val blockCaptor = ArgCaptor[node_models.AtalaBlock] verify(blockProcessing).processBlock( blockCaptor, // mockito hates value classes, so we cannot test equality to this argument @@ -527,7 +527,7 @@ class ObjectManagementServiceSpec atalaObject mustBe None } - def queryAtalaObject(obj: node_internal.AtalaObject): AtalaObjectInfo = { + def queryAtalaObject(obj: node_models.AtalaObject): AtalaObjectInfo = { AtalaObjectsDAO .get(AtalaObjectId.of(obj)) .transact(database) diff --git a/node/src/test/scala/io/iohk/atala/prism/node/services/SubmissionServiceSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/services/SubmissionServiceSpec.scala index 2ce4c67ba6..ede58c3326 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/services/SubmissionServiceSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/services/SubmissionServiceSpec.scala @@ -22,7 +22,7 @@ import io.iohk.atala.prism.node.repositories.{ ProtocolVersionRepository } import io.iohk.atala.prism.node.repositories.daos.AtalaObjectsDAO -import io.iohk.atala.prism.protos.node_internal +import io.iohk.atala.prism.protos.node_models import io.iohk.atala.prism.protos.node_models.SignedAtalaOperation import io.iohk.atala.prism.node.utils.IOUtils._ import org.mockito.scalatest.{MockitoSugar, ResetMocksAfterEachTest} @@ -505,8 +505,8 @@ class SubmissionServiceSpec numOps: Int, numPubsAdditional: Int = 0 ): ( - List[node_internal.AtalaObject], - List[node_internal.AtalaObject], + List[node_models.AtalaObject], + List[node_models.AtalaObject], List[PublicationInfo], List[SignedAtalaOperation] ) = { @@ -526,19 +526,19 @@ class SubmissionServiceSpec // Calculate atala objects merged in a naive way var accOps = List.empty[SignedAtalaOperation] - var oldObj: node_internal.AtalaObject = null + var oldObj: node_models.AtalaObject = null val atalaObjectsMerged = - collection.mutable.ArrayBuffer.empty[node_internal.AtalaObject] + collection.mutable.ArrayBuffer.empty[node_models.AtalaObject] atalaOperations.reverse.foreach { op => val nextAccOps = op +: accOps val curObj = createAtalaObject( - block = node_internal.AtalaBlock(operations = nextAccOps) + block = node_models.AtalaBlock(operations = nextAccOps) ) if (estimateTxMetadataSize(curObj) >= cardano.TX_METADATA_MAX_SIZE) { assert(oldObj != null) atalaObjectsMerged.append(oldObj) - oldObj = createAtalaObject(block = node_internal.AtalaBlock(operations = List(op))) + oldObj = createAtalaObject(block = node_models.AtalaBlock(operations = List(op))) accOps = List(op) } else { oldObj = curObj diff --git a/node/src/test/scala/io/iohk/atala/prism/node/utils/GrpcUtilsSpec.scala b/node/src/test/scala/io/iohk/atala/prism/node/utils/GrpcUtilsSpec.scala index fcd2099afb..d0f03b57a2 100644 --- a/node/src/test/scala/io/iohk/atala/prism/node/utils/GrpcUtilsSpec.scala +++ b/node/src/test/scala/io/iohk/atala/prism/node/utils/GrpcUtilsSpec.scala @@ -1,23 +1,22 @@ package io.iohk.atala.prism.node.utils import com.google.protobuf.ByteString -import io.iohk.atala.prism.protos.node_api.ScheduleOperationsResponse -import io.iohk.atala.prism.protos.node_models.{OperationOutput, RevokeCredentialsOutput} +import io.iohk.atala.prism.protos.node_api.{OperationOutput, DeactivateDIDOutput, ScheduleOperationsResponse} import org.scalatest.matchers.must.Matchers._ import org.scalatest.wordspec.AnyWordSpec class GrpcUtilsSpec extends AnyWordSpec { - private val revokeCredentialsOperationOutput: OperationOutput = OperationOutput( - OperationOutput.Result.RevokeCredentialsOutput(RevokeCredentialsOutput()), + private val deactivateDidsOperationOutput: OperationOutput = OperationOutput( + OperationOutput.Result.DeactivateDidOutput(DeactivateDIDOutput()), OperationOutput.OperationMaybe.OperationId(ByteString.copyFrom("aba".getBytes)) ) "extractSingleOperationOutput" should { "correctly extract single output from Vector" in { val operationOutput = GrpcUtils.extractSingleOperationOutput( - ScheduleOperationsResponse(Vector(revokeCredentialsOperationOutput)) + ScheduleOperationsResponse(Vector(deactivateDidsOperationOutput)) ) - operationOutput must be(revokeCredentialsOperationOutput) + operationOutput must be(deactivateDidsOperationOutput) } "throw error on empty list" in { @@ -32,7 +31,7 @@ class GrpcUtilsSpec extends AnyWordSpec { "throw error when more than one output returned" in { val error = intercept[RuntimeException] { GrpcUtils.extractSingleOperationOutput( - ScheduleOperationsResponse(Seq(revokeCredentialsOperationOutput, revokeCredentialsOperationOutput)) + ScheduleOperationsResponse(Seq(deactivateDidsOperationOutput, deactivateDidsOperationOutput)) ) } error.getMessage mustEqual "1 operation output expected but got 2" diff --git a/node/src/test/scala/io/iohk/atala/prism/node/utils/NodeClientUtils.scala b/node/src/test/scala/io/iohk/atala/prism/node/utils/NodeClientUtils.scala deleted file mode 100644 index 8c5ce3048b..0000000000 --- a/node/src/test/scala/io/iohk/atala/prism/node/utils/NodeClientUtils.scala +++ /dev/null @@ -1,52 +0,0 @@ -package io.iohk.atala.prism.node.utils - -import com.google.protobuf.ByteString -import io.iohk.atala.prism.credentials.CredentialBatchId -import io.iohk.atala.prism.crypto.{MerkleRoot, Sha256Digest} -import io.iohk.atala.prism.protos.node_models -import io.iohk.atala.prism.identity.{PrismDid => DID} - -object NodeClientUtils { - - def issueBatchOperation( - issuerDID: DID, - merkleRoot: MerkleRoot - ): node_models.AtalaOperation = { - node_models - .AtalaOperation( - operation = node_models.AtalaOperation.Operation.IssueCredentialBatch( - value = node_models - .IssueCredentialBatchOperation( - credentialBatchData = Some( - node_models.CredentialBatchData( - issuerDid = issuerDID.getSuffix, - merkleRoot = toByteString(merkleRoot.getHash) - ) - ) - ) - ) - ) - } - - def revokeCredentialsOperation( - previousOperationHash: Sha256Digest, - batchId: CredentialBatchId, - credentialsToRevoke: Seq[Sha256Digest] = Nil - ): node_models.AtalaOperation = { - node_models - .AtalaOperation( - operation = node_models.AtalaOperation.Operation.RevokeCredentials( - value = node_models - .RevokeCredentialsOperation( - previousOperationHash = toByteString(previousOperationHash), - credentialBatchId = batchId.getId, - credentialsToRevoke = credentialsToRevoke.map(toByteString) - ) - ) - ) - } - - def toByteString(hash: Sha256Digest): ByteString = - ByteString.copyFrom(hash.getValue) - -}