Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add di support w/ eddsa-jcs-2022 #3181

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
29c7a87
add did key routes
PatStLouis Aug 15, 2024
006c815
Merge branch 'main' of github.com:OpSecId/aries-cloudagent-python
PatStLouis Aug 15, 2024
c36974c
Merge branch 'main' of github.com:OpSecId/aries-cloudagent-python
PatStLouis Aug 15, 2024
aeecdec
set default key type
PatStLouis Aug 15, 2024
17ea779
linting
PatStLouis Aug 15, 2024
5c1d5de
more formatting
PatStLouis Aug 15, 2024
3d0c1d0
restructure
PatStLouis Aug 15, 2024
0b55237
update manager
PatStLouis Aug 16, 2024
c316ef0
add eddsa-jcs-2022
PatStLouis Aug 18, 2024
6e7f182
add verification
PatStLouis Aug 19, 2024
be62816
run ruff format
PatStLouis Aug 19, 2024
3f532a2
lock poetry file
PatStLouis Aug 19, 2024
5e1e35b
Merge branch 'main' into pstlouis/add-di-support
PatStLouis Aug 19, 2024
33c153b
Merge branch 'pstlouis/add-did-key-route' into pstlouis/add-di-support
PatStLouis Aug 19, 2024
7be92ce
Merge branch 'pstlouis/add-di-support' of github.com:OpSecId/aries-cl…
PatStLouis Sep 8, 2024
3d4ce4b
rebase
PatStLouis Sep 8, 2024
3d6505c
add spec links to docstring
PatStLouis Sep 8, 2024
bf88b9e
group context urls
PatStLouis Sep 8, 2024
09eb4b2
add canonicaljson package to lock file
PatStLouis Sep 8, 2024
c4bee94
update lock file
PatStLouis Sep 8, 2024
296c1f7
Merge branch 'main' into pstlouis/add-di-support
PatStLouis Sep 9, 2024
789eee1
update lock
PatStLouis Sep 9, 2024
d7f83b0
format and linting
PatStLouis Sep 9, 2024
83659ed
declare profile type
PatStLouis Sep 10, 2024
466f284
trying to find the profile
PatStLouis Sep 10, 2024
eded063
Merge branch 'hyperledger:main' into pstlouis/add-di-support
PatStLouis Sep 15, 2024
71d1060
fix unit tests
PatStLouis Sep 15, 2024
92472c3
improve unit tests
PatStLouis Sep 15, 2024
8cfffcc
fix vm id reference
PatStLouis Sep 15, 2024
4ba384d
fix unit tests
PatStLouis Sep 15, 2024
2d9b12a
linting
PatStLouis Sep 15, 2024
0fdbe7f
fix unit tests
PatStLouis Sep 15, 2024
d14d418
fix test variables
PatStLouis Sep 15, 2024
121af79
fix unit tests
PatStLouis Sep 15, 2024
29735f5
linting
PatStLouis Sep 15, 2024
555aa86
remove did key files
PatStLouis Sep 17, 2024
3a9981a
import missing class DIProofSchema
PatStLouis Sep 17, 2024
2802141
regenerate lock file
PatStLouis Sep 17, 2024
1df8875
Merge branch 'hyperledger:main' into pstlouis/add-di-support
PatStLouis Sep 20, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions aries_cloudagent/config/default_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ async def load_plugins(self, context: InjectionContext):
plugin_registry.register_package("aries_cloudagent.protocols")

# Currently providing admin routes only
plugin_registry.register_plugin("aries_cloudagent.did")
plugin_registry.register_plugin("aries_cloudagent.holder")

plugin_registry.register_plugin("aries_cloudagent.ledger")
Expand Down
14 changes: 14 additions & 0 deletions aries_cloudagent/vc/contexts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
"""Context URLS."""

DID_V1 = "https://www.w3.org/ns/did/v1"
CREDENTIALS_V1 = "https://www.w3.org/2018/credentials/v1"
CREDENTIALS_V2 = "https://www.w3.org/ns/credentials/v2"

SECURITY_V1 = "https://w3id.org/security/v1"
SECURITY_V2 = "https://w3id.org/security/v2"
SECURITY_V3_UNSTABLE = "https://w3id.org/security/v3-unstable"
SECURITY_DATA_INTEGRITY_V2 = "https://w3id.org/security/data-integrity/v2"
SECURITY_BBS_V1 = "https://w3id.org/security/bbs/v1"
SECURITY_SUITES_ED25519_2020_V1 = "https://w3id.org/security/suites/ed25519-2020/v1"
SECURITY_MULTIKEY_V1 = "https://w3id.org/security/multikey/v1"
SECURITY_JWK_V1 = "https://w3id.org/security/jwk/v1"
6 changes: 6 additions & 0 deletions aries_cloudagent/vc/vc_di/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,13 @@
)
from .verify import verify_signed_anoncredspresentation


class DataIntegrityProofException(Exception):
"""Base exception for data integrity proofs."""


__all__ = [
"DataIntegrityProofException",
"verify_signed_anoncredspresentation",
"create_signed_anoncreds_presentation",
"prepare_data_for_presentation",
Expand Down
9 changes: 9 additions & 0 deletions aries_cloudagent/vc/vc_di/cryptosuites/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from .eddsa_jcs_2022 import EddsaJcs2022

CRYPTOSUITES = {
"eddsa-jcs-2022": EddsaJcs2022,
}

__all__ = [
"EddsaJcs2022",
]
128 changes: 128 additions & 0 deletions aries_cloudagent/vc/vc_di/cryptosuites/eddsa_jcs_2022.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
"""EddsaJcs2022 cryptosuite."""

from hashlib import sha256
import canonicaljson
import nacl

from ....wallet.base import BaseWallet
from ....utils.multiformats import multibase
from ....core.profile import Profile
from .. import DataIntegrityProofException


class EddsaJcs2022:
"""EddsaJcs2022 suite."""

def __init__(self, *, profile: Profile):
"""Create new EddsaJcs2022 Cryptosuite instance.

https://www.w3.org/TR/vc-di-eddsa/#eddsa-rdfc-2022

Args:
profile: Key profile to use.

"""
super().__init__()
self.profile = profile

async def _serialization(self, hash_data, options):
"""Data Integrity Proof Serialization Algorithm.

https://www.w3.org/TR/vc-di-eddsa/#proof-serialization-eddsa-jcs-2022

"""
async with self.profile.session() as session:
did_info = await session.inject(BaseWallet).get_local_did(
options["verificationMethod"].split("#")[0]
)
async with self.profile.session() as session:
wallet = session.inject(BaseWallet)
proof_bytes = await wallet.sign_message(
message=hash_data,
from_verkey=did_info.verkey,
)
return proof_bytes

async def add_proof(self, document, proof_options):
"""Data Integrity Add Proof Algorithm.

https://www.w3.org/TR/vc-data-integrity/#add-proof

Args:
document: The data to sign.
proof_options: The proof options.

Returns:
secured_document: The document with a new proof attached

"""

existing_proof = document.pop("proof", [])
assert isinstance(existing_proof, list) or isinstance(existing_proof, dict)
existing_proof = (
[existing_proof] if isinstance(existing_proof, dict) else existing_proof
)

assert proof_options["type"] == "DataIntegrityProof"
assert proof_options["cryptosuite"] == "eddsa-jcs-2022"
assert proof_options["proofPurpose"]
assert proof_options["verificationMethod"]

try:
hash_data = (
sha256(canonicaljson.encode_canonical_json(document)).digest()
+ sha256(canonicaljson.encode_canonical_json(proof_options)).digest()
)
proof_bytes = await self._serialization(hash_data, proof_options)

proof = proof_options.copy()
proof["proofValue"] = multibase.encode(proof_bytes, "base58btc")

secured_document = document.copy()
secured_document["proof"] = existing_proof
secured_document["proof"].append(proof)

return secured_document
except Exception:
raise DataIntegrityProofException()

async def verify_proof(self, unsecured_document, proof):
"""Data Integrity Verify Proof Algorithm.

https://www.w3.org/TR/vc-data-integrity/#verify-proof

Args:
unsecured_document: The data to check.
proof: The proof.

Returns:
verification_response: Whether the signature is valid for the data

"""
try:
assert proof["type"] == "DataIntegrityProof"
assert proof["cryptosuite"] == "eddsa-jcs-2022"
assert proof["proofPurpose"]
assert proof["proofValue"]
assert proof["verificationMethod"]

proof_options = proof.copy()
proof_value = proof_options.pop("proofValue")
proof_bytes = multibase.decode(proof_value)

hash_data = (
sha256(canonicaljson.encode_canonical_json(unsecured_document)).digest()
+ sha256(canonicaljson.encode_canonical_json(proof_options)).digest()
)
verification_method = proof["verificationMethod"]
did = verification_method.split("#")[0]
if did.split(":")[1] == "key":
pub_key = multibase.decode(did.split(":")[-1])
public_key_bytes = bytes(bytearray(pub_key)[2:])
try:
nacl.bindings.crypto_sign_open(proof_bytes + hash_data, public_key_bytes)
return True
except nacl.exceptions.BadSignatureError:
return False
except Exception:
raise DataIntegrityProofException()
4 changes: 4 additions & 0 deletions aries_cloudagent/vc/vc_di/models/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .proof import DIProof, DIProofSchema
from .proof_options import DIProofOptions, DIProofOptionsSchema

__all__ = ["DIProof", "DIProofSchema", "DIProofOptions", "DIProofOptionsSchema"]
Copy link
Contributor

@jamshale jamshale Sep 16, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You need to import DIProofSchema to fix the issue reported by sonarcloud.

I think the unit testing looks good and we can probably just ignore the coverage. It just complains if under 80%, but that's not always necessary.

217 changes: 217 additions & 0 deletions aries_cloudagent/vc/vc_di/models/proof.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,217 @@
"""DataIntegrityProof."""

from typing import Optional

from marshmallow import INCLUDE, fields, post_dump

from ....messaging.models.base import BaseModel, BaseModelSchema
from ....messaging.valid import (
INDY_ISO8601_DATETIME_EXAMPLE,
INDY_ISO8601_DATETIME_VALIDATE,
UUID4_EXAMPLE,
Uri,
)


class DIProof(BaseModel):
"""Data Integrity Proof model."""

class Meta:
"""DataIntegrityProof metadata."""

schema_class = "DIProofSchema"

def __init__(
self,
id: Optional[str] = None,
type: Optional[str] = "DataIntegrityProof",
proof_purpose: Optional[str] = None,
verification_method: Optional[str] = None,
cryptosuite: Optional[str] = None,
created: Optional[str] = None,
expires: Optional[str] = None,
domain: Optional[str] = None,
challenge: Optional[str] = None,
proof_value: Optional[str] = None,
previous_proof: Optional[str] = None,
nonce: Optional[str] = None,
**kwargs,
) -> None:
"""Initialize the DIProof instance."""

self.id = id
self.type = type
self.proof_purpose = proof_purpose
self.verification_method = verification_method
self.cryptosuite = cryptosuite
self.created = created
self.expires = expires
self.domain = domain
self.challenge = challenge
self.proof_value = proof_value
self.previous_proof = previous_proof
self.nonce = nonce
self.extra = kwargs


class DIProofSchema(BaseModelSchema):
"""Data Integrity Proof schema.

Based on https://www.w3.org/TR/vc-data-integrity/#proofs

"""

class Meta:
"""Accept parameter overload."""

unknown = INCLUDE
model_class = DIProof

id = fields.Str(
required=False,
metadata={
"description": (
"An optional identifier for the proof, which MUST be a URL [URL], \
such as a UUID as a URN"
),
"example": "urn:uuid:6a1676b8-b51f-11ed-937b-d76685a20ff5",
},
)

type = fields.Str(
required=True,
metadata={
"description": (
"The specific type of proof MUST be specified as a string that maps \
to a URL [URL]."
),
"example": "DataIntegrityProof",
},
)

proof_purpose = fields.Str(
data_key="proofPurpose",
required=True,
metadata={
"description": "The proof purpose acts as a safeguard to prevent the proof \
from being misused by being applied to a purpose other than the one that \
was intended.",
"example": "assertionMethod",
},
)

verification_method = fields.Str(
data_key="verificationMethod",
required=True,
validate=Uri(),
metadata={
"description": "A verification method is the means and information needed \
to verify the proof. ",
"example": (
"did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg34"
"2Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL"
),
},
)

cryptosuite = fields.Str(
required=True,
metadata={
"description": (
"An identifier for the cryptographic suite that can be used to verify \
the proof."
),
"example": "eddsa-jcs-2022",
},
)

created = fields.Str(
required=False,
validate=INDY_ISO8601_DATETIME_VALIDATE,
metadata={
"description": (
"The date and time the proof was created is OPTIONAL and, if included, \
MUST be specified as an [XMLSCHEMA11-2] dateTimeStamp string"
),
"example": INDY_ISO8601_DATETIME_EXAMPLE,
},
)

expires = fields.Str(
required=False,
validate=INDY_ISO8601_DATETIME_VALIDATE,
metadata={
"description": (
"The expires property is OPTIONAL and, if present, specifies when the \
proof expires. If present, it MUST be an [XMLSCHEMA11-2] \
dateTimeStamp string"
),
"example": INDY_ISO8601_DATETIME_EXAMPLE,
},
)

domain = fields.Str(
required=False,
metadata={
"description": (
"It conveys one or more security domains in which the proof is \
meant to be used."
),
"example": "example.com",
},
)

challenge = fields.Str(
required=False,
metadata={
"description": (
"The value is used once for a particular domain and window of time. \
This value is used to mitigate replay attacks."
),
"example": UUID4_EXAMPLE,
},
)

proof_value = fields.Str(
required=False,
data_key="proofValue",
metadata={
"description": "A string value that expresses base-encoded binary data \
necessary to verify the digital proof using the verificationMethod \
specified.",
"example": (
"zsy1AahqbzJQ63n9RtekmwzqZeVj494VppdAVJBnMYrTwft6cLJJGeTSSxCCJ6HKnR"
"twE7jjDh6sB2z2AAiZY9BBnCD8wUVgwqH3qchGRCuC2RugA4eQ9fUrR4Yuycac3caiaaay"
),
},
)

previous_proof = fields.Str(
required=False,
data_key="previousProof",
metadata={
"description": "Each value identifies another data integrity proof that \
MUST verify before the current proof is processed.",
"example": ("urn:uuid:6a1676b8-b51f-11ed-937b-d76685a20ff5"),
},
)

nonce = fields.Str(
required=False,
metadata={
"description": "One use of this field is to increase privacy by decreasing \
linkability that is the result of deterministically generated \
signatures.",
"example": (
"CF69iO3nfvqRsRBNElE8b4wO39SyJHPM7Gg1nExltW5vSfQA1lvDCR/zXX1To0/4NLo=="
),
},
)

@post_dump(pass_original=True)
def add_unknown_properties(self, data: dict, original, **kwargs):
"""Add back unknown properties before outputting."""

data.update(original.extra)

return data
Loading
Loading