-
Notifications
You must be signed in to change notification settings - Fork 0
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
refactor(ATL-6924): add support for other keys #834
Merged
EzequielPostan
merged 5 commits into
open-source-node
from
ATL-6924-add-support-for-other-keys
Apr 19, 2024
Merged
Changes from all commits
Commits
Show all changes
5 commits
Select commit
Hold shift + click to select a range
49ca1be
ATL-6924: Replace users of ECPublicKey
EzequielPostan 01967cc
ATL-6924: Simplify models
EzequielPostan a8afcd6
ATL-6924: Simplify types
EzequielPostan 73ff642
ATL-6924: Add support for encoding functions
EzequielPostan a41f5ee
ATL-6924: Refactor CryptoUtils and remove SHA256Digest
EzequielPostan File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
133 changes: 133 additions & 0 deletions
133
node/src/main/scala/io/iohk/atala/prism/node/crypto/CryptoUtils.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,133 @@ | ||
package io.iohk.atala.prism.node.crypto | ||
|
||
import io.iohk.atala.prism.node.models.ProtocolConstants | ||
import org.bouncycastle.jcajce.provider.asymmetric.util.EC5Util | ||
import org.bouncycastle.jce.interfaces.ECPublicKey | ||
|
||
import java.security.{KeyFactory, MessageDigest, PublicKey, Security, Signature} | ||
import org.bouncycastle.jce.{ECNamedCurveTable, ECPointUtil} | ||
import org.bouncycastle.jce.provider.BouncyCastleProvider | ||
|
||
import java.security.spec.{ECPoint, ECPublicKeySpec} | ||
|
||
object CryptoUtils { | ||
trait SecpPublicKey { | ||
private[crypto] def publicKey: PublicKey | ||
def curveName: String = ProtocolConstants.secpCurveName | ||
def compressed: Array[Byte] = publicKey | ||
.asInstanceOf[ECPublicKey] | ||
.getQ | ||
.getEncoded(true) | ||
def x: Array[Byte] = publicKey.asInstanceOf[ECPublicKey].getQ.getAffineXCoord.getEncoded | ||
def y: Array[Byte] = publicKey.asInstanceOf[ECPublicKey].getQ.getAffineYCoord.getEncoded | ||
} | ||
|
||
private[crypto] class SecpPublicKeyImpl(pubKey: PublicKey) extends SecpPublicKey { | ||
override private[crypto] def publicKey: PublicKey = pubKey | ||
} | ||
|
||
// We define the constructor to SecpKeys private so that the only way to generate | ||
// these keys is by using the methods unsafeToPublicKeyFromByteCoordinates and | ||
// unsafeToPublicKeyFromCompressed. | ||
object SecpPublicKey { | ||
|
||
private[crypto] def fromPublicKey(key: PublicKey): SecpPublicKey = new SecpPublicKeyImpl(key) | ||
|
||
def checkECDSASignature(msg: Array[Byte], sig: Array[Byte], pubKey: SecpPublicKey): Boolean = { | ||
val ecdsaVerify = Signature.getInstance("SHA256withECDSA", provider) | ||
ecdsaVerify.initVerify(pubKey.publicKey) | ||
ecdsaVerify.update(msg) | ||
ecdsaVerify.verify(sig) | ||
} | ||
|
||
def unsafeToSecpPublicKeyFromCompressed(com: Vector[Byte]): SecpPublicKey = { | ||
val params = ECNamedCurveTable.getParameterSpec("secp256k1") | ||
val fact = KeyFactory.getInstance("ECDSA", provider) | ||
val curve = params.getCurve | ||
val ellipticCurve = EC5Util.convertCurve(curve, params.getSeed) | ||
val point = ECPointUtil.decodePoint(ellipticCurve, com.toArray) | ||
val params2 = EC5Util.convertSpec(ellipticCurve, params) | ||
val keySpec = new ECPublicKeySpec(point, params2) | ||
SecpPublicKey.fromPublicKey(fact.generatePublic(keySpec)) | ||
} | ||
|
||
def unsafeToSecpPublicKeyFromByteCoordinates(x: Array[Byte], y: Array[Byte]): SecpPublicKey = { | ||
def trimLeadingZeroes(arr: Array[Byte], c: String): Array[Byte] = { | ||
val trimmed = arr.dropWhile(_ == 0.toByte) | ||
require( | ||
trimmed.length <= PUBLIC_KEY_COORDINATE_BYTE_SIZE, | ||
s"Expected $c coordinate byte length to be less than or equal ${PUBLIC_KEY_COORDINATE_BYTE_SIZE}, but got ${trimmed.length} bytes" | ||
) | ||
trimmed | ||
} | ||
|
||
val xTrimmed = trimLeadingZeroes(x, "x") | ||
val yTrimmed = trimLeadingZeroes(y, "y") | ||
val xInteger = BigInt(1, xTrimmed) | ||
val yInteger = BigInt(1, yTrimmed) | ||
SecpPublicKey.unsafeToSecpPublicKeyFromBigIntegerCoordinates(xInteger, yInteger) | ||
} | ||
|
||
def unsafeToSecpPublicKeyFromBigIntegerCoordinates(x: BigInt, y: BigInt): SecpPublicKey = { | ||
val params = ECNamedCurveTable.getParameterSpec("secp256k1") | ||
val fact = KeyFactory.getInstance("ECDSA", provider) | ||
val curve = params.getCurve | ||
val ellipticCurve = EC5Util.convertCurve(curve, params.getSeed) | ||
val point = new ECPoint(x.bigInteger, y.bigInteger) | ||
val params2 = EC5Util.convertSpec(ellipticCurve, params) | ||
val keySpec = new ECPublicKeySpec(point, params2) | ||
SecpPublicKey.fromPublicKey(fact.generatePublic(keySpec)) | ||
} | ||
} | ||
|
||
private val provider = new BouncyCastleProvider() | ||
private val PUBLIC_KEY_COORDINATE_BYTE_SIZE: Int = 32 | ||
|
||
Security.addProvider(provider) | ||
|
||
trait Sha256Hash { | ||
def bytes: Vector[Byte] | ||
def hexEncoded: String = bytesToHex(bytes) | ||
} | ||
|
||
private[crypto] case class Sha256HashImpl(bytes: Vector[Byte]) extends Sha256Hash { | ||
require(bytes.size == 32) | ||
} | ||
|
||
object Sha256Hash { | ||
|
||
def fromBytes(arr: Array[Byte]): Sha256Hash = Sha256HashImpl(arr.toVector) | ||
|
||
def compute(bArray: Array[Byte]): Sha256Hash = { | ||
Sha256HashImpl( | ||
MessageDigest | ||
.getInstance("SHA-256") | ||
.digest(bArray) | ||
.toVector | ||
) | ||
} | ||
|
||
def fromHex(hexedBytes: String): Sha256Hash = { | ||
val HEX_STRING_RE = "^[0-9a-fA-F]{64}$".r | ||
if (HEX_STRING_RE.matches(hexedBytes)) Sha256HashImpl(hexToBytes(hexedBytes)) | ||
else | ||
throw new IllegalArgumentException( | ||
"The given hex string doesn't correspond to a valid SHA-256 hash encoded as string" | ||
) | ||
} | ||
} | ||
|
||
def bytesToHex(bytes: Vector[Byte]): String = { | ||
bytes.map(byte => f"${byte & 0xff}%02x").mkString | ||
} | ||
|
||
def hexToBytes(hex: String): Vector[Byte] = { | ||
val HEX_ARRAY = "0123456789abcdef".toCharArray | ||
for { | ||
pair <- hex.grouped(2).toVector | ||
firstIndex = HEX_ARRAY.indexOf(pair(0)) | ||
secondIndex = HEX_ARRAY.indexOf(pair(1)) | ||
octet = firstIndex << 4 | secondIndex | ||
} yield octet.toByte | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,8 +5,7 @@ import com.google.protobuf.ByteString | |
import io.iohk.atala.prism.protos.models.TimestampInfo | ||
import io.iohk.atala.prism.crypto.EC.{INSTANCE => EC} | ||
import io.iohk.atala.prism.crypto.keys.ECPublicKey | ||
import io.iohk.atala.prism.crypto.ECConfig.{INSTANCE => ECConfig} | ||
import io.iohk.atala.prism.node.models.{DidSuffix, Ledger} | ||
import io.iohk.atala.prism.node.models.{DidSuffix, Ledger, PublicKeyData} | ||
import io.iohk.atala.prism.protos.common_models | ||
import io.iohk.atala.prism.node.models | ||
import io.iohk.atala.prism.node.models.KeyUsage._ | ||
|
@@ -56,7 +55,7 @@ object ProtoCodecs { | |
didDataState.keys.map(key => | ||
toProtoPublicKey( | ||
key.keyId, | ||
toECKeyData(key.key), | ||
toCompressedECKeyData(key.key), | ||
toProtoKeyUsage(key.keyUsage), | ||
toLedgerData(key.addedOn), | ||
key.revokedOn map toLedgerData | ||
|
@@ -83,28 +82,26 @@ object ProtoCodecs { | |
|
||
def toProtoPublicKey( | ||
id: String, | ||
ecKeyData: node_models.ECKeyData, | ||
compressedEcKeyData: node_models.CompressedECKeyData, | ||
keyUsage: node_models.KeyUsage, | ||
addedOn: node_models.LedgerData, | ||
revokedOn: Option[node_models.LedgerData] | ||
): node_models.PublicKey = { | ||
val withoutRevKey = node_models | ||
.PublicKey() | ||
.withId(id) | ||
.withEcKeyData(ecKeyData) | ||
.withCompressedEcKeyData(compressedEcKeyData) | ||
.withUsage(keyUsage) | ||
.withAddedOn(addedOn) | ||
|
||
revokedOn.fold(withoutRevKey)(revTime => withoutRevKey.withRevokedOn(revTime)) | ||
} | ||
|
||
def toECKeyData(key: ECPublicKey): node_models.ECKeyData = { | ||
val point = key.getCurvePoint | ||
Comment on lines
-101
to
-102
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. before, the code assumed we had an ECPublicKey which could be decomposed into x and y coordinates. Now we assume it is always a compressed key |
||
def toCompressedECKeyData(key: PublicKeyData): node_models.CompressedECKeyData = { | ||
node_models | ||
.ECKeyData() | ||
.withCurve(ECConfig.getCURVE_NAME) | ||
.withX(ByteString.copyFrom(point.getX.bytes())) | ||
.withY(ByteString.copyFrom(point.getY.bytes())) | ||
.CompressedECKeyData() | ||
.withCurve(key.curveName) | ||
.withData(ByteString.copyFrom(key.compressedKey.toArray)) | ||
} | ||
|
||
def toProtoKeyUsage(keyUsage: models.KeyUsage): node_models.KeyUsage = { | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
as we now assume we always retrieve a compressed key, we request that the model to convert is a compressed one
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
does it apply to new key types and old key type as well? would that require a change in spec then?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
IMO I wouldn't require change on the specs. Would just put in the spec that the compressed version is recommended.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
this method is applied on data retrieved from the node DB, we store keys in compressed format, meaning that all keys we retrieve will be compressed indistinctly of how the user submitted it. So, no need for spec changes for this part
We do need to tell in the spec that Ed25519 and X25519 keys can only be sent in compressed format (which I understand is fine, please correct if I am wrong @FabioPinheiro ). Secp keys can be sent in any of the formats and the node will work fine