From d3709862261f01a92d4cd1eb969e0c5000c47c62 Mon Sep 17 00:00:00 2001 From: larisa17 <42570262+larisa17@users.noreply.github.com> Date: Tue, 10 Dec 2024 15:10:17 +0200 Subject: [PATCH] Include dedup info in get stamp (#743) * wip: testing v2 api * fix: handle failing tests and wrong value returned in in V2 api * fix: failing tests * Include dedup info in get stamp * update submit passport v2 test * update registry schema * remove submit passport call from v2 * set default for clashing stamps * adjust test dedup * fix data dump test * update lambda call * minor updates * update return message * add single function * update v2 tests * update handle_scoring * update response * update return data type * update tests * update tests passport * update passport tests * add api dedup test * minor updates * fix returned expiration date * add new flow for deduplication * update schema * update tests * pass expiration dates * update test stamp get score * update v2/aws_lambdas/tests/test_stamp_score_get.py * update tests * update test_passport_submission.py * update tests * adjust score tests * rename stamp expiration dates * modify clashing stamps * update schema * add clashing stamps in ret * update lambda test and score format * update test for dedup * update lifo tests * update test for return score format * update resposne schema & tests --------- Co-authored-by: Gerald Iakobinyi-Pich Co-authored-by: Gerald Iakobinyi-Pich --- api/account/deduplication/lifo.py | 16 +- api/account/test/test_deduplication_lifo.py | 27 +- .../test/test_cmd_scorer_dump_data.py | 1 + api/registry/atasks.py | 39 +- api/registry/migrations/0042_score_stamps.py | 17 + api/registry/models.py | 1 + api/registry/test/test_passport_get_score.py | 2 +- api/scorer/config/gitcoin_passport_weights.py | 1 + api/scorer/test/test_choose_binary_scorer.py | 2 + api/scorer_weighted/computation.py | 14 +- api/scorer_weighted/models.py | 13 +- api/v2/api/api_stamps.py | 13 +- api/v2/aws_lambdas/stamp_score_GET.py | 1 + .../aws_lambdas/tests/test_stamp_score_get.py | 225 +++++++++- api/v2/schema.py | 13 +- api/v2/test/test_api_dedup.py | 283 +++++++++++++ api/v2/test/test_historical_score_endpoint.py | 28 +- api/v2/test/test_passport_submission.py | 384 +++++++++++++++--- 18 files changed, 989 insertions(+), 91 deletions(-) create mode 100644 api/registry/migrations/0042_score_stamps.py create mode 100644 api/v2/test/test_api_dedup.py diff --git a/api/account/deduplication/lifo.py b/api/account/deduplication/lifo.py index 94bb635d8..ea37e4ae2 100644 --- a/api/account/deduplication/lifo.py +++ b/api/account/deduplication/lifo.py @@ -1,10 +1,11 @@ import copy from typing import Tuple -import api_logging as logging -from account.models import Community from django.conf import settings from django.db import IntegrityError + +import api_logging as logging +from account.models import Community from registry.models import Event, HashScorerLink, Stamp from registry.utils import get_utc_time @@ -66,7 +67,7 @@ async def arun_lifo_dedup( hash_links_to_create = [] hash_links_to_update = [] - clashing_stamps = [] + clashing_stamps = {} for stamp in lifo_passport["stamps"]: hash = stamp["credential"]["credentialSubject"]["hash"] @@ -104,7 +105,9 @@ async def arun_lifo_dedup( ) ) else: - clashing_stamps.append(stamp) + clashing_stamps[ + stamp["credential"]["credentialSubject"]["provider"] + ] = stamp await save_hash_links( hash_links_to_create, hash_links_to_update, address, community @@ -125,11 +128,10 @@ async def arun_lifo_dedup( }, community=community, ) - for stamp in clashing_stamps + for _, stamp in clashing_stamps.items() ] ) - - return (deduped_passport, None) + return (deduped_passport, None, clashing_stamps) async def save_hash_links( diff --git a/api/account/test/test_deduplication_lifo.py b/api/account/test/test_deduplication_lifo.py index 997a7a20f..556aad97a 100644 --- a/api/account/test/test_deduplication_lifo.py +++ b/api/account/test/test_deduplication_lifo.py @@ -1,13 +1,14 @@ from unittest import mock -from account.deduplication import Rules -from account.deduplication.lifo import HashScorerLinkIntegrityError, alifo -from account.models import Account, Community from asgiref.sync import async_to_sync from django.conf import settings from django.contrib.auth import get_user_model from django.test import TransactionTestCase from ninja_jwt.schema import RefreshToken + +from account.deduplication import Rules +from account.deduplication.lifo import HashScorerLinkIntegrityError, alifo +from account.models import Account, Community from registry.models import HashScorerLink, Passport, Stamp from scorer_weighted.models import Scorer, WeightedScorer @@ -83,13 +84,13 @@ async def test_lifo_no_deduplicate_across_cummunities(self): credential=credential, ) - deduped_passport, _ = await alifo( + deduped_passport, _, clashing_stamps = await alifo( passport1.community, {"stamps": [credential]}, passport1.address ) - # We expect the passport not to be deduped, as the duplicate hash is # contained in a different community self.assertEqual(len(deduped_passport["stamps"]), 1) + self.assertEqual(clashing_stamps, {}) @async_to_sync async def test_lifo_no_deduplicate_same_passport_address_across_cummunities(self): @@ -121,13 +122,14 @@ async def test_lifo_no_deduplicate_same_passport_address_across_cummunities(self credential=credential, ) - deduped_passport, _ = await alifo( + deduped_passport, _, clashing_stamps = await alifo( passport1.community, {"stamps": [credential]}, passport1.address ) # We expect the passport not to be deduped, as the duplicate hash is # contained in a different community self.assertEqual(len(deduped_passport["stamps"]), 1) + self.assertEqual(clashing_stamps, {}) @async_to_sync async def test_lifo_deduplicate(self): @@ -140,10 +142,9 @@ async def test_lifo_deduplicate(self): ) # We test deduplication of the 1st passport (for example user submits the same passport again) - deduped_passport, _ = await alifo( + deduped_passport, _, clashing_stamps = await alifo( passport.community, {"stamps": [credential]}, passport.address ) - stamp = deduped_passport["stamps"][0] await Stamp.objects.acreate( passport=passport, @@ -154,16 +155,22 @@ async def test_lifo_deduplicate(self): # We expect the passport to not be deduped, as it is the same owner self.assertEqual(len(deduped_passport["stamps"]), 1) - + self.assertEqual(clashing_stamps, {}) # We test deduplication of another passport with different address but # with the same stamp - deduped_passport, _ = await alifo( + deduped_passport, _, clashing_stamps = await alifo( passport.community, {"stamps": [credential]}, "0xaddress_2" ) # We expect the passport to be deduped, and the return copy shall contain # no stamps self.assertEqual(len(deduped_passport["stamps"]), 0) + self.assertEqual( + clashing_stamps, + { + "test_provider": credential, + }, + ) def test_retry_on_clash(self): """ diff --git a/api/ceramic_cache/test/test_cmd_scorer_dump_data.py b/api/ceramic_cache/test/test_cmd_scorer_dump_data.py index f76a3cf82..ecaf5b141 100644 --- a/api/ceramic_cache/test/test_cmd_scorer_dump_data.py +++ b/api/ceramic_cache/test/test_cmd_scorer_dump_data.py @@ -95,6 +95,7 @@ def upload_file(self, file_name, *args, **kwargs): "error", "evidence", "stamp_scores", + "stamps", "id", } expected_passport_keys = {"address", "community", "requires_calculation"} diff --git a/api/registry/atasks.py b/api/registry/atasks.py index a8b66e614..dbda6be06 100644 --- a/api/registry/atasks.py +++ b/api/registry/atasks.py @@ -1,5 +1,6 @@ import copy from datetime import datetime, timezone +from decimal import Decimal from typing import Dict from django.conf import settings @@ -87,7 +88,12 @@ async def aload_passport_data(address: str) -> Dict: return passport_data -async def acalculate_score(passport: Passport, community_id: int, score: Score): +async def acalculate_score( + passport: Passport, + community_id: int, + score: Score, + clashing_stamps: list[dict] = [], +): log.debug("Scoring") user_community = await Community.objects.aget(pk=community_id) @@ -104,6 +110,29 @@ async def acalculate_score(passport: Passport, community_id: int, score: Score): score.error = None score.stamp_scores = scoreData.stamp_scores score.expiration_date = scoreData.expiration_date + stamps = {} + for stamp_name, stamp_score in scoreData.stamp_scores.items(): + # Find if the stamp_name matches any provider in clashing_stamps + matching_stamp = clashing_stamps.get(stamp_name, None) + + # Construct the stamps dictionary + stamps[stamp_name] = { + "score": f"{Decimal(stamp_score):.5f}", + "dedup": matching_stamp is not None, + "expiration_date": matching_stamp["credential"]["expirationDate"] + if matching_stamp + else scoreData.stamp_expiration_dates[stamp_name].isoformat(), + } + # Add stamps present in clashing_stamps but not in stamp_scores + for c_povider, c_stamp in clashing_stamps.items(): + # This returns to the user the information of the deduplicated stamp stamps + if c_povider not in stamps: + stamps[c_povider] = { + "score": "0.00000", # Score is 0 for deduplicated stamps + "dedup": True, + "expiration_date": c_stamp["credential"]["expirationDate"], + } + score.stamps = stamps log.info("Calculated score: %s", score) @@ -126,7 +155,7 @@ async def aprocess_deduplication(passport, community, passport_data, score: Scor if not method: raise Exception("Invalid rule") - deduplicated_passport, affected_passports = await method( + deduplicated_passport, affected_passports, clashing_stamps = await method( community, passport_data, passport.address ) @@ -151,7 +180,7 @@ async def aprocess_deduplication(passport, community, passport_data, score: Scor # await acalculate_score(passport, passport.community_id, affected_score) # await affected_score.asave() - return deduplicated_passport + return (deduplicated_passport, clashing_stamps) async def avalidate_credentials(passport: Passport, passport_data) -> dict: @@ -223,12 +252,12 @@ async def ascore_passport( try: passport_data = await aload_passport_data(address) validated_passport_data = await avalidate_credentials(passport, passport_data) - deduped_passport_data = await aprocess_deduplication( + (deduped_passport_data, clashing_stamps) = await aprocess_deduplication( passport, community, validated_passport_data, score ) await asave_stamps(passport, deduped_passport_data) await aremove_stale_stamps_from_db(passport, deduped_passport_data) - await acalculate_score(passport, community.pk, score) + await acalculate_score(passport, community.pk, score, clashing_stamps) except APIException as e: log.error( diff --git a/api/registry/migrations/0042_score_stamps.py b/api/registry/migrations/0042_score_stamps.py new file mode 100644 index 000000000..723274f8c --- /dev/null +++ b/api/registry/migrations/0042_score_stamps.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.6 on 2024-11-28 20:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("registry", "0041_weightconfiguration_description"), + ] + + operations = [ + migrations.AddField( + model_name="score", + name="stamps", + field=models.JSONField(blank=True, null=True), + ), + ] diff --git a/api/registry/models.py b/api/registry/models.py index f43a178b9..8f50c2001 100644 --- a/api/registry/models.py +++ b/api/registry/models.py @@ -80,6 +80,7 @@ class Status: error = models.TextField(null=True, blank=True) evidence = models.JSONField(null=True, blank=True) stamp_scores = models.JSONField(null=True, blank=True) + stamps = models.JSONField(null=True, blank=True) expiration_date = models.DateTimeField( default=None, null=True, blank=True, db_index=True diff --git a/api/registry/test/test_passport_get_score.py b/api/registry/test/test_passport_get_score.py index 1208b5970..6a77f844c 100644 --- a/api/registry/test/test_passport_get_score.py +++ b/api/registry/test/test_passport_get_score.py @@ -1,4 +1,5 @@ import datetime +from urllib.parse import urlencode import pytest from django.conf import settings @@ -6,7 +7,6 @@ from django.contrib.auth.models import Group from django.test import Client from web3 import Web3 -from urllib.parse import urlencode from account.models import Account, AccountAPIKey, Community from registry.api.v1 import get_scorer_by_id diff --git a/api/scorer/config/gitcoin_passport_weights.py b/api/scorer/config/gitcoin_passport_weights.py index 27a1fb5a7..19289b2e2 100644 --- a/api/scorer/config/gitcoin_passport_weights.py +++ b/api/scorer/config/gitcoin_passport_weights.py @@ -33,6 +33,7 @@ "IdenaState#Verified": "2.029", "Lens": "0.93", "Linkedin": "1.531", + "LinkedinV2": "1.531", "NFT": "1.032", "NFTScore#50": "10.033", "NFTScore#75": "2.034", diff --git a/api/scorer/test/test_choose_binary_scorer.py b/api/scorer/test/test_choose_binary_scorer.py index 3582e6969..4ad179e52 100644 --- a/api/scorer/test/test_choose_binary_scorer.py +++ b/api/scorer/test/test_choose_binary_scorer.py @@ -149,6 +149,7 @@ def _(scorer_community_with_binary_scorer, scorer_api_key): "sum_of_weights": Decimal("70"), "earned_points": {}, "expiration_date": datetime.now(timezone.utc), + "stamp_expiration_dates": {}, } ], ): @@ -220,6 +221,7 @@ def _(scorer_community_with_binary_scorer, scorer_api_key): "sum_of_weights": Decimal("90"), "earned_points": {}, "expiration_date": datetime.now(timezone.utc), + "stamp_expiration_dates": {}, } ], ): diff --git a/api/scorer_weighted/computation.py b/api/scorer_weighted/computation.py index 09a1340f0..f2b395a68 100644 --- a/api/scorer_weighted/computation.py +++ b/api/scorer_weighted/computation.py @@ -1,11 +1,12 @@ +from datetime import datetime from decimal import Decimal +from math import e from typing import Dict, List import api_logging as logging +from account.models import Customization from registry.models import Stamp from scorer_weighted.models import WeightedScorer -from account.models import Customization -from datetime import datetime log = logging.getLogger(__name__) @@ -46,6 +47,7 @@ def calculate_weighted_score( scored_providers = [] earned_points = {} earliest_expiration_date = None + stamp_expiration_dates = {} for stamp in Stamp.objects.filter(passport_id=passport_id): if stamp.provider not in scored_providers: weight = Decimal(weights.get(stamp.provider, 0)) @@ -55,6 +57,7 @@ def calculate_weighted_score( expiration_date = datetime.fromisoformat( stamp.credential["expirationDate"] ) + stamp_expiration_dates[stamp.provider] = expiration_date # Compute the earliest expiration date for the stamps used to calculate the score # as this will be the expiration date of the score if ( @@ -69,6 +72,7 @@ def calculate_weighted_score( "sum_of_weights": sum_of_weights, "earned_points": earned_points, "expiration_date": earliest_expiration_date, + "stamp_expiration_dates": stamp_expiration_dates, } ) return ret @@ -95,6 +99,7 @@ def recalculate_weighted_score( scored_providers = [] earned_points = {} earliest_expiration_date = None + stamp_expiration_dates = {} for stamp in stamp_list: if stamp.provider not in scored_providers: weight = Decimal(weights.get(stamp.provider, 0)) @@ -104,6 +109,7 @@ def recalculate_weighted_score( expiration_date = datetime.fromisoformat( stamp.credential["expirationDate"] ) + stamp_expiration_dates[stamp.provider] = expiration_date # Compute the earliest expiration date for the stamps used to calculate the score # as this will be the expiration date of the score if ( @@ -118,6 +124,7 @@ def recalculate_weighted_score( "sum_of_weights": sum_of_weights, "earned_points": earned_points, "expiration_date": earliest_expiration_date, + "stamp_expiration_dates": stamp_expiration_dates, } ) return ret @@ -158,6 +165,7 @@ async def acalculate_weighted_score( sum_of_weights: Decimal = Decimal(0) scored_providers = [] earned_points = {} + stamp_expiration_dates = {} earliest_expiration_date = None async for stamp in Stamp.objects.filter(passport_id=passport_id): if stamp.provider not in scored_providers: @@ -168,6 +176,7 @@ async def acalculate_weighted_score( expiration_date = datetime.fromisoformat( stamp.credential["expirationDate"] ) + stamp_expiration_dates[stamp.provider] = expiration_date # Compute the earliest expiration date for the stamps used to calculate the score # as this will be the expiration date of the score if ( @@ -183,6 +192,7 @@ async def acalculate_weighted_score( "sum_of_weights": sum_of_weights, "earned_points": earned_points, "expiration_date": earliest_expiration_date, + "stamp_expiration_dates": stamp_expiration_dates, } ) return ret diff --git a/api/scorer_weighted/models.py b/api/scorer_weighted/models.py index 7d852849d..9fdce643e 100644 --- a/api/scorer_weighted/models.py +++ b/api/scorer_weighted/models.py @@ -44,11 +44,13 @@ def __init__( evidence: Optional[List[ThresholdScoreEvidence]], points: dict, expiration_date: datetime, + stamp_expiration_dates: dict, ): self.score = score self.evidence = evidence self.stamp_scores = points self.expiration_date = expiration_date + self.stamp_expiration_dates = stamp_expiration_dates def __repr__(self): return f"ScoreData(score={self.score}, evidence={self.evidence})" @@ -105,7 +107,11 @@ def compute_score(self, passport_ids, community_id: int) -> List[ScoreData]: return [ ScoreData( - score=s["sum_of_weights"], evidence=None, points=s["earned_points"] + score=s["sum_of_weights"], + evidence=None, + points=s["earned_points"], + expiration_date=s["expiration_date"], + stamp_expiration_dates=s["stamp_expiration_dates"], ) for s in calculate_weighted_score(self, passport_ids, community_id) ] @@ -125,6 +131,7 @@ def recompute_score( evidence=None, points=s["earned_points"], expiration_date=s["expiration_date"], + stamp_expiration_dates=s["stamp_expiration_dates"], ) for s in recalculate_weighted_score( self, passport_ids, stamps, community_id @@ -145,6 +152,7 @@ async def acompute_score(self, passport_ids, community_id: int) -> List[ScoreDat evidence=None, points=s["earned_points"], expiration_date=s["expiration_date"], + stamp_expiration_dates=s["stamp_expiration_dates"], ) for s in scores ] @@ -187,6 +195,7 @@ def compute_score(self, passport_ids, community_id: int) -> List[ScoreData]: ], points=rawScore["earned_points"], expiration_date=rawScore["expiration_date"], + stamp_expiration_dates=rawScore["stamp_expiration_dates"], ), rawScores, binaryScores, @@ -221,6 +230,7 @@ def recompute_score( ], points=rawScore["earned_points"], expiration_date=rawScore["expiration_date"], + stamp_expiration_dates=rawScore["stamp_expiration_dates"], ), rawScores, binaryScores, @@ -253,6 +263,7 @@ async def acompute_score(self, passport_ids, community_id: int) -> List[ScoreDat ], points=rawScore["earned_points"], expiration_date=rawScore["expiration_date"], + stamp_expiration_dates=rawScore["stamp_expiration_dates"], ), rawScores, binaryScores, diff --git a/api/v2/api/api_stamps.py b/api/v2/api/api_stamps.py index 3cbe1af7f..57ccf0b37 100644 --- a/api/v2/api/api_stamps.py +++ b/api/v2/api/api_stamps.py @@ -8,7 +8,7 @@ from ninja_extra.exceptions import APIException import api_logging as logging -from account.models import Community +from account.models import Account, Community, Nonce from ceramic_cache.models import CeramicCache from registry.api.schema import ( CursorPaginatedStampCredentialResponse, @@ -57,10 +57,8 @@ async def handle_scoring(address: str, scorer_id: str, user_account): address_lower = address.lower() - if not is_valid_address(address_lower): raise InvalidAddressException() - # Get community object user_community = await aget_scorer_by_id(scorer_id, user_account) @@ -111,7 +109,7 @@ async def handle_scoring(address: str, scorer_id: str, user_account): score.expiration_date.isoformat() if score.expiration_date else None ), error=score.error, - stamp_scores=score.stamp_scores if score.stamp_scores is not None else {}, + stamps=score.stamps if score.stamps is not None else {}, ) @@ -218,7 +216,9 @@ def get_score_history( score_event = filterset.qs.order_by("-created_at").first() if not score_event: - raise ScoreDoesNotExist(address, f"No Score Found for {address} at {created_at}") + raise ScoreDoesNotExist( + address, f"No Score Found for {address} at {created_at}" + ) # Extract and normalize score data from either format score_data = extract_score_data(score_event.data) @@ -232,7 +232,6 @@ def get_score_history( score = score_data["evidence"]["rawScore"] else: score = score_data.get("score", "0") - return V2ScoreResponse( address=address, score=score, @@ -241,7 +240,7 @@ def get_score_history( last_score_timestamp=score_data.get("last_score_timestamp"), expiration_timestamp=score_data.get("expiration_date"), error=score_data.get("error"), - stamp_scores=score_data.get("stamp_scores"), + stamps=score_data.get("stamps"), ) except Exception as e: diff --git a/api/v2/aws_lambdas/stamp_score_GET.py b/api/v2/aws_lambdas/stamp_score_GET.py index 83b3a0eef..30ba822a2 100644 --- a/api/v2/aws_lambdas/stamp_score_GET.py +++ b/api/v2/aws_lambdas/stamp_score_GET.py @@ -5,6 +5,7 @@ from asgiref.sync import async_to_sync from django.db import close_old_connections +from account.models import Nonce from aws_lambdas.utils import ( with_api_request_exception_handling, ) diff --git a/api/v2/aws_lambdas/tests/test_stamp_score_get.py b/api/v2/aws_lambdas/tests/test_stamp_score_get.py index 76015620a..7ad40fdff 100644 --- a/api/v2/aws_lambdas/tests/test_stamp_score_get.py +++ b/api/v2/aws_lambdas/tests/test_stamp_score_get.py @@ -1,13 +1,19 @@ import base64 import json from copy import deepcopy +from datetime import datetime, timedelta, timezone +from decimal import Decimal +from unittest.mock import patch import pytest from account.models import AccountAPIKeyAnalytics from aws_lambdas.scorer_api_passport.tests.helpers import MockContext from aws_lambdas.scorer_api_passport.utils import strip_event +from ceramic_cache.models import CeramicCache from registry.test.test_passport_submission import mock_passport +from scorer.config.gitcoin_passport_weights import GITCOIN_PASSPORT_WEIGHTS +from v2.test.test_api_dedup import avalidate_credentials_side_effect, wallet_a, wallet_b from ..stamp_score_GET import _handler @@ -114,13 +120,198 @@ def test_successful_authentication( assert body["threshold"] == "20.00000" assert body["error"] is None - assert body["stamp_scores"] == {"Ens": "0.408", "Google": "0.525"} + assert body["stamps"] == { + "Ens": { + "score": "0.40800", + "dedup": False, + "expiration_date": next( + ( + datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ).isoformat() + for stamp in mock_passport["stamps"] + if stamp["provider"] == "Ens" + ), + None, + ), + }, + "Google": { + "score": "0.52500", + "dedup": False, + "expiration_date": next( + ( + datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ).isoformat() + for stamp in mock_passport["stamps"] + if stamp["provider"] == "Google" + ), + None, + ), + }, + } # We just check that something != None was recorded for the last timestamp assert body["expiration_timestamp"] is not None assert response["statusCode"] == 200 +@patch( + "registry.atasks.avalidate_credentials", + side_effect=avalidate_credentials_side_effect, +) +def test_successful_authentication_and_dedup_flag( + validate_credential, + weight_config, + scorer_community, + scorer_api_key, + # mocker, +): + """ + Test the following flow: + - wallet A has a valid stamp & checks that the get score returs the proper stamp + - wallet B claims the same stamp (same hash => it is deduped) the get score for wallet B returns the score is 0 & the dedup flag is set to True + - wallet A checks the score again, there are no changes in the returned stamps. + """ + + now = datetime.now(timezone.utc) + days_ago = (now - timedelta(days=2)).isoformat() + days_later = (now + timedelta(days=2)).isoformat() + + sample_provider = "LinkedinV2" + sample_provider_hash = "v0.0.0:Ft7mqRdvJ9jNgSSowb9qdcMeOzswOeighIOvk0wn964=" + + sample_stamp = CeramicCache.objects.create( + address=wallet_a, + provider=sample_provider, + stamp={ + "type": ["VerifiableCredential"], + "proof": { + "jws": "eyJhbGciOiJFZERTQSIsImNyaXQiOlsiYjY0Il0sImI2NCI6ZmFsc2V9..34uD8jKn2N_yE8pY4ErzVD8pJruZq7qJaCxx8y0SReY2liZJatfeQUv1nqmZH19a-svOyfHt_VbmKvh6A5vwBw", + "type": "Ed25519Signature2018", + "created": days_ago, + "proofPurpose": "assertionMethod", + "verificationMethod": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC#z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + }, + "issuer": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + "@context": ["https://www.w3.org/2018/credentials/v1"], + "issuanceDate": days_ago, + "expirationDate": days_later, + "credentialSubject": { + "id": f"did:pkh:eip155:1:{wallet_a}", + "hash": sample_provider_hash, + "@context": [ + { + "hash": "https://schema.org/Text", + "provider": sample_provider, + } + ], + "provider": sample_provider, + }, + }, + ) + + event_wallet_a = make_test_event(scorer_api_key, wallet_a, scorer_community.id) + # pylint: disable=no-value-for-parameter + response_wallet_a = _handler(event_wallet_a, MockContext()) + + assert response_wallet_a is not None + assert response_wallet_a["statusCode"] == 200 + body_wallet_a = json.loads(response_wallet_a["body"]) + assert body_wallet_a["address"] == wallet_a.lower() + assert ( + body_wallet_a["score"] + == f"{Decimal(GITCOIN_PASSPORT_WEIGHTS[sample_provider]):.5f}" + ) + assert body_wallet_a["passing_score"] == False + assert body_wallet_a["threshold"] == "20.00000" + + assert body_wallet_a["error"] is None + assert body_wallet_a["expiration_timestamp"] == days_later + assert body_wallet_a["stamps"] == { + sample_provider: { + "score": f"{Decimal(GITCOIN_PASSPORT_WEIGHTS[sample_provider]):.5f}", + "dedup": False, + "expiration_date": days_later, + } + } + + dedup_stamp = CeramicCache.objects.create( + address=wallet_b, + provider=sample_provider, + stamp={ + "type": ["VerifiableCredential"], + "proof": { + "jws": "eyJhbGciOiJFZERTQSIsImNyaXQiOlsiYjY0Il0sImI2NCI6ZmFsc2V9..34uD8jKn2N_yE8pY4ErzVD8pJruZq7qJaCxx8y0SReY2liZJatfeQUv1nqmZH19a-svOyfHt_VbmKvh6A5vwBw", + "type": "Ed25519Signature2018", + "created": days_ago, + "proofPurpose": "assertionMethod", + "verificationMethod": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC#z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + }, + "issuer": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + "@context": ["https://www.w3.org/2018/credentials/v1"], + "issuanceDate": days_ago, + "expirationDate": days_later, + "credentialSubject": { + "id": f"did:pkh:eip155:1:{wallet_b}", + "hash": sample_provider_hash, + "@context": [ + { + "hash": "https://schema.org/Text", + "provider": sample_provider, + } + ], + "provider": sample_provider, + }, + }, + ) + + event_wallet_b = make_test_event(scorer_api_key, wallet_b, scorer_community.id) + # pylint: disable=no-value-for-parameter + response_wallet_b = _handler(event_wallet_b, MockContext()) + + assert response_wallet_b is not None + assert response_wallet_b["statusCode"] == 200 + body_wallet_b = json.loads(response_wallet_b["body"]) + assert body_wallet_b["address"] == wallet_b.lower() + assert body_wallet_b["score"] == "0.00000" + assert body_wallet_b["passing_score"] == False + assert body_wallet_b["threshold"] == "20.00000" + + assert body_wallet_b["error"] is None + assert body_wallet_b["expiration_timestamp"] is None + assert body_wallet_b["stamps"] == { + sample_provider: { + "score": "0.00000", + "dedup": True, + "expiration_date": days_later, + } + } + + event_wallet_a_back = make_test_event(scorer_api_key, wallet_a, scorer_community.id) + response_wallet_a_back = _handler(event_wallet_a_back, MockContext()) + assert response_wallet_a_back is not None + assert response_wallet_a_back["statusCode"] == 200 + body_wallet_a_back = json.loads(response_wallet_a_back["body"]) + assert body_wallet_a_back["address"] == wallet_a.lower() + assert ( + body_wallet_a_back["score"] + == f"{Decimal(GITCOIN_PASSPORT_WEIGHTS[sample_provider]):.5f}" + ) + assert body_wallet_a_back["passing_score"] == False + assert body_wallet_a_back["threshold"] == "20.00000" + + assert body_wallet_a_back["error"] is None + assert body_wallet_a_back["expiration_timestamp"] == days_later + assert body_wallet_a_back["stamps"] == { + sample_provider: { + "score": f"{Decimal(GITCOIN_PASSPORT_WEIGHTS[sample_provider]):.5f}", + "dedup": False, + "expiration_date": days_later, + } + } + + def test_successful_authentication_and_base64_encoded_body( scorer_api_key, scorer_community_with_binary_scorer, @@ -155,7 +346,36 @@ def test_successful_authentication_and_base64_encoded_body( assert body["passing_score"] == False assert body["threshold"] == "20.00000" assert body["error"] is None - assert body["stamp_scores"] == {"Ens": "0.408", "Google": "0.525"} + assert body["stamps"] == { + "Ens": { + "score": "0.40800", + "dedup": False, + "expiration_date": next( + ( + datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ).isoformat() + for stamp in mock_passport["stamps"] + if stamp["provider"] == "Ens" + ), + None, + ), + }, + "Google": { + "score": "0.52500", + "dedup": False, + "expiration_date": next( + ( + datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ).isoformat() + for stamp in mock_passport["stamps"] + if stamp["provider"] == "Google" + ), + None, + ), + }, + } # We just check that something != None was recorded for the last timestamp assert body["expiration_timestamp"] is not None @@ -375,7 +595,6 @@ def test_failed_authentication_and_analytics_logging( def test_bad_scorer_id_and_analytics_logging( scorer_api_key, - scorer_community_with_binary_scorer, passport_holder_addresses, mocker, ): diff --git a/api/v2/schema.py b/api/v2/schema.py index c82b14f4f..c2e53fd1c 100644 --- a/api/v2/schema.py +++ b/api/v2/schema.py @@ -1,10 +1,19 @@ from decimal import Decimal -from typing import Dict, Optional +from typing import ( + Dict, + Optional, +) from ninja import Schema from pydantic import field_serializer +class V2StampScoreResponse(Schema): + score: str + dedup: bool + expiration_date: Optional[str] + + class V2ScoreResponse(Schema): address: str score: Optional[Decimal] @@ -13,7 +22,7 @@ class V2ScoreResponse(Schema): expiration_timestamp: Optional[str] threshold: Decimal error: Optional[str] - stamp_scores: Optional[Dict[str, Decimal]] + stamps: Optional[Dict[str, V2StampScoreResponse]] @field_serializer("score") def serialize_score(self, score: Decimal, _info): diff --git a/api/v2/test/test_api_dedup.py b/api/v2/test/test_api_dedup.py new file mode 100644 index 000000000..194d211ac --- /dev/null +++ b/api/v2/test/test_api_dedup.py @@ -0,0 +1,283 @@ +import copy +from datetime import datetime, timedelta, timezone +from decimal import Decimal +from re import M +from unittest.mock import patch + +import pytest +from django.conf import settings +from django.contrib.auth.models import User +from django.test import Client +from web3 import Web3 + +from ceramic_cache.models import CeramicCache +from scorer.config.gitcoin_passport_weights import GITCOIN_PASSPORT_WEIGHTS + +web3 = Web3() +web3.eth.account.enable_unaudited_hdwallet_features() + + +pytestmark = pytest.mark.django_db +my_mnemonic = settings.TEST_MNEMONIC +wallet_a = web3.eth.account.from_mnemonic( + my_mnemonic, account_path="m/44'/60'/0'/0/0" +).address + +wallet_b = web3.eth.account.from_mnemonic( + my_mnemonic, account_path="m/44'/60'/0'/0/1" +).address + + +def avalidate_credentials_side_effect(*args, **kwargs): + """ + Validate non expired stamps + """ + validated_passport = copy.deepcopy(args[1]) + validated_passport["stamps"] = [] + for stamp in args[1]["stamps"]: + stamp_expiration_date = datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ) + stamp_is_expired = stamp_expiration_date < datetime.now(timezone.utc) + if not stamp_is_expired: + validated_passport["stamps"].append(copy.deepcopy(stamp)) + return validated_passport + + +class TestApiGetStampsDedupFlagTestCase: + base_url = "/v2/stamps" + + @patch( + "registry.atasks.avalidate_credentials", + side_effect=avalidate_credentials_side_effect, + ) + def test_get_stamps_no_dedup( + self, validate_credential, weight_config, scorer_community, scorer_api_key + ): + """ + Test get stamps for user with no deduplication & expired stamp + Only the valid stamp is returned with the dedup flag set to False + """ + + client = Client() + now = datetime.now(timezone.utc) + days_ago = (now - timedelta(days=2)).isoformat() + weeks_ago = (now - timedelta(days=30)).isoformat() + days_later = (now + timedelta(days=2)).isoformat() + + expired_provider = "githubContributionActivityGte#30" + expired_provider_hash = "v0.0.0:Ft7mqRdvJ9jNgSSowb9qdcMeOzswOeighIOvk000xxx=" + + expired_stamp = CeramicCache.objects.create( + address=wallet_a, + provider=expired_provider, + stamp={ + "type": ["VerifiableCredential"], + "proof": { + "jws": "eyJhbGciOiJFZERTQSIsImNyaXQiOlsiYjY0Il0sImI2NCI6ZmFsc2V9..34uD8jKn2N_yE8pY4ErzVD8pJruZq7qJaCxx8y0SReY2liZJatfeQUv1nqmZH19a-svOyfHt_VbmKvh6A5vwBw", + "type": "Ed25519Signature2018", + "created": weeks_ago, + "proofPurpose": "assertionMethod", + "verificationMethod": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC#z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + }, + "issuer": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + "@context": ["https://www.w3.org/2018/credentials/v1"], + "issuanceDate": weeks_ago, + "expirationDate": days_ago, + "credentialSubject": { + "id": f"did:pkh:eip155:1:{wallet_a}", + "hash": expired_provider_hash, + "@context": [ + { + "hash": "https://schema.org/Text", + "provider": expired_provider, + } + ], + "provider": expired_provider, + }, + }, + ) + + sample_provider = "LinkedinV2" + sample_provider_hash = "v0.0.0:Ft7mqRdvJ9jNgSSowb9qdcMeOzswOeighIOvk0wn964=" + + sample_stamp = CeramicCache.objects.create( + address=wallet_a, + provider=sample_provider, + stamp={ + "type": ["VerifiableCredential"], + "proof": { + "jws": "eyJhbGciOiJFZERTQSIsImNyaXQiOlsiYjY0Il0sImI2NCI6ZmFsc2V9..34uD8jKn2N_yE8pY4ErzVD8pJruZq7qJaCxx8y0SReY2liZJatfeQUv1nqmZH19a-svOyfHt_VbmKvh6A5vwBw", + "type": "Ed25519Signature2018", + "created": days_ago, + "proofPurpose": "assertionMethod", + "verificationMethod": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC#z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + }, + "issuer": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + "@context": ["https://www.w3.org/2018/credentials/v1"], + "issuanceDate": days_ago, + "expirationDate": days_later, + "credentialSubject": { + "id": f"did:pkh:eip155:1:{wallet_a}", + "hash": sample_provider_hash, + "@context": [ + { + "hash": "https://schema.org/Text", + "provider": sample_provider, + } + ], + "provider": sample_provider, + }, + }, + ) + + response = client.get( + f"{self.base_url}/{scorer_community.pk}/score/{wallet_a}", + HTTP_AUTHORIZATION="Token " + scorer_api_key, + ) + response_data = response.json() + assert response.status_code == 200 + assert response_data["error"] is None + assert response_data["stamps"] == { + sample_provider: { + "score": f"{Decimal(GITCOIN_PASSPORT_WEIGHTS[sample_provider]):.5f}", + "dedup": False, + "expiration_date": days_later, + } + } + + @patch( + "registry.atasks.avalidate_credentials", + side_effect=avalidate_credentials_side_effect, + ) + def test_get_stamps_with_dedup( + self, validate_credential, weight_config, scorer_community, scorer_api_key + ): + """ + Test the following flow: + - wallet A has a valid stamp & checks that the get score returs the proper stamp + - wallet B claims the same stamp (same hash => it is deduped) the get score for wallet B returns the score is 0 & the dedup flag is set to True + - wallet A checks the score again, there are no changes in the returned stamps. + """ + + client = Client() + now = datetime.now(timezone.utc) + days_ago = (now - timedelta(days=2)).isoformat() + days_later = (now + timedelta(days=2)).isoformat() + + sample_provider = "LinkedinV2" + sample_provider_hash = "v0.0.0:Ft7mqRdvJ9jNgSSowb9qdcMeOzswOeighIOvk0wn964=" + + sample_stamp = CeramicCache.objects.create( + address=wallet_a, + provider=sample_provider, + stamp={ + "type": ["VerifiableCredential"], + "proof": { + "jws": "eyJhbGciOiJFZERTQSIsImNyaXQiOlsiYjY0Il0sImI2NCI6ZmFsc2V9..34uD8jKn2N_yE8pY4ErzVD8pJruZq7qJaCxx8y0SReY2liZJatfeQUv1nqmZH19a-svOyfHt_VbmKvh6A5vwBw", + "type": "Ed25519Signature2018", + "created": days_ago, + "proofPurpose": "assertionMethod", + "verificationMethod": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC#z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + }, + "issuer": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + "@context": ["https://www.w3.org/2018/credentials/v1"], + "issuanceDate": days_ago, + "expirationDate": days_later, + "credentialSubject": { + "id": f"did:pkh:eip155:1:{wallet_a}", + "hash": sample_provider_hash, + "@context": [ + { + "hash": "https://schema.org/Text", + "provider": sample_provider, + } + ], + "provider": sample_provider, + }, + }, + ) + + response_wallet_a = client.get( + f"{self.base_url}/{scorer_community.pk}/score/{wallet_a}", + HTTP_AUTHORIZATION="Token " + scorer_api_key, + ) + response_data_wallet_a = response_wallet_a.json() + assert response_wallet_a.status_code == 200 + assert response_data_wallet_a["error"] is None + assert ( + response_data_wallet_a["score"] + == f"{Decimal(GITCOIN_PASSPORT_WEIGHTS[sample_provider]):.5f}" + ) + assert response_data_wallet_a["stamps"] == { + sample_provider: { + "score": f"{Decimal(GITCOIN_PASSPORT_WEIGHTS[sample_provider]):.5f}", + "dedup": False, + "expiration_date": days_later, + } + } + + dedup_stamp = CeramicCache.objects.create( + address=wallet_b, + provider=sample_provider, + stamp={ + "type": ["VerifiableCredential"], + "proof": { + "jws": "eyJhbGciOiJFZERTQSIsImNyaXQiOlsiYjY0Il0sImI2NCI6ZmFsc2V9..34uD8jKn2N_yE8pY4ErzVD8pJruZq7qJaCxx8y0SReY2liZJatfeQUv1nqmZH19a-svOyfHt_VbmKvh6A5vwBw", + "type": "Ed25519Signature2018", + "created": days_ago, + "proofPurpose": "assertionMethod", + "verificationMethod": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC#z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + }, + "issuer": "did:key:z6MkghvGHLobLEdj1bgRLhS4LPGJAvbMA1tn2zcRyqmYU5LC", + "@context": ["https://www.w3.org/2018/credentials/v1"], + "issuanceDate": days_ago, + "expirationDate": days_later, + "credentialSubject": { + "id": f"did:pkh:eip155:1:{wallet_b}", + "hash": sample_provider_hash, + "@context": [ + { + "hash": "https://schema.org/Text", + "provider": sample_provider, + } + ], + "provider": sample_provider, + }, + }, + ) + + response_wallet_b = client.get( + f"{self.base_url}/{scorer_community.pk}/score/{wallet_b}", + HTTP_AUTHORIZATION="Token " + scorer_api_key, + ) + response_data_wallet_b = response_wallet_b.json() + assert response_wallet_b.status_code == 200 + assert response_data_wallet_b["error"] is None + assert response_data_wallet_b["score"] == "0.00000" + + assert response_data_wallet_b["stamps"] == { + sample_provider: { + "score": "0.00000", + "dedup": True, + "expiration_date": days_later, + } + } + response_wallet_a_again = client.get( + f"{self.base_url}/{scorer_community.pk}/score/{wallet_a}", + HTTP_AUTHORIZATION="Token " + scorer_api_key, + ) + response_data_wallet_a_again = response_wallet_a_again.json() + assert response_wallet_a_again.status_code == 200 + assert response_data_wallet_a_again["error"] is None + assert ( + response_data_wallet_a_again["score"] + == f"{Decimal(GITCOIN_PASSPORT_WEIGHTS[sample_provider]):.5f}" + ) + assert response_data_wallet_a_again["stamps"] == { + sample_provider: { + "score": f"{Decimal(GITCOIN_PASSPORT_WEIGHTS[sample_provider]):.5f}", + "dedup": False, + "expiration_date": days_later, + } + } diff --git a/api/v2/test/test_historical_score_endpoint.py b/api/v2/test/test_historical_score_endpoint.py index ee638d7f7..eedb13624 100644 --- a/api/v2/test/test_historical_score_endpoint.py +++ b/api/v2/test/test_historical_score_endpoint.py @@ -112,7 +112,18 @@ def test_get_historical_score_new_format( "threshold": "100.00000", }, "passport": 15, - "stamp_scores": {"github": 10, "twitter": 15}, + "stamps": { + "github": { + "score": "1.00000", + "dedup": False, + "expiration_date": "2024-11-21T22:09:10.687Z", + }, + "twitter": { + "score": "0.00000", + "dedup": True, + "expiration_date": None, + }, + }, "expiration_date": "2024-11-21T22:09:10.687Z", "last_score_timestamp": "2024-10-25T19:16:14.023Z", }, @@ -125,14 +136,21 @@ def test_get_historical_score_new_format( HTTP_AUTHORIZATION="Token " + scorer_api_key, ) response_data = response.json() - + print("LARISA response_data", response_data) assert response.status_code == 200 assert response_data["score"] == "5.45900" assert response_data["threshold"] == "100.00000" assert response_data["passing_score"] is False assert response_data["last_score_timestamp"] == "2024-10-25T19:16:14.023Z" assert response_data["expiration_timestamp"] == "2024-11-21T22:09:10.687Z" - assert response_data["stamp_scores"] == {"github": "10", "twitter": "15"} + assert response_data["stamps"] == { + "github": { + "score": "1.00000", + "dedup": False, + "expiration_date": "2024-11-21T22:09:10.687Z", + }, + "twitter": {"score": "0.00000", "dedup": True, "expiration_date": None}, + } @freeze_time("2023-01-01") def test_get_historical_score_missing_fields( @@ -166,7 +184,7 @@ def test_get_historical_score_missing_fields( assert response_data["passing_score"] is True assert response_data["last_score_timestamp"] is None assert response_data["expiration_timestamp"] is None - assert response_data["stamp_scores"] is None + assert response_data["stamps"] is None @freeze_time("2023-01-01") def test_get_historical_score_ne_evidence( @@ -196,7 +214,7 @@ def test_get_historical_score_ne_evidence( assert response_data["passing_score"] is True assert response_data["last_score_timestamp"] is None assert response_data["expiration_timestamp"] is None - assert response_data["stamp_scores"] is None + assert response_data["stamps"] is None def test_get_historical_score_no_score_found( self, diff --git a/api/v2/test/test_passport_submission.py b/api/v2/test/test_passport_submission.py index f1a4560da..aa533925e 100644 --- a/api/v2/test/test_passport_submission.py +++ b/api/v2/test/test_passport_submission.py @@ -1,6 +1,7 @@ import copy from datetime import datetime, timedelta, timezone from decimal import Decimal +from math import e, exp from unittest.mock import patch from django.conf import settings @@ -445,7 +446,7 @@ def test_submitting_without_passport(self, aget_passport, validate_credential): "last_score_timestamp": None, "expiration_timestamp": None, "error": "No Passport found for this address.", - "stamp_scores": {}, + "stamps": {}, } @patch("registry.atasks.validate_credential", side_effect=[[], [], [], []]) @@ -471,7 +472,18 @@ def test_submit_passport_multiple_times( "expiration_timestamp": mock_passport_expiration_date.isoformat(), "threshold": "20.00000", "error": None, - "stamp_scores": {"Ens": "0.408", "Google": "0.525"}, + "stamps": { + "Ens": { + "dedup": False, + "expiration_date": mock_passport_expiration_date.isoformat(), + "score": "0.40800", + }, + "Google": { + "dedup": False, + "expiration_date": mock_passport_expiration_date.isoformat(), + "score": "0.52500", + }, + }, } expected2ndResponse = { @@ -482,7 +494,18 @@ def test_submit_passport_multiple_times( "expiration_timestamp": mock_passport_expiration_date.isoformat(), "threshold": "20.00000", "error": None, - "stamp_scores": {"Ens": "0.408", "Google": "0.525"}, + "stamps": { + "Ens": { + "dedup": False, + "expiration_date": mock_passport_expiration_date.isoformat(), + "score": "0.40800", + }, + "Google": { + "dedup": False, + "expiration_date": mock_passport_expiration_date.isoformat(), + "score": "0.52500", + }, + }, } # First submission @@ -569,6 +592,33 @@ def test_submit_passport_with_binary_scorer_above_threshold( for s in mock_passport["stamps"] ] + expiration_date_map = {} + for stamp in mock_passport["stamps"]: + expiration_date_map[stamp["provider"]] = datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ) + + expecred_result = { + "score": expected_score, + "passing_score": True, + "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", + "error": None, + "expiration_timestamp": min(expiration_date_list).isoformat(), + "last_score_timestamp": get_utc_time().isoformat(), + "stamps": { + "Ens": { + "dedup": False, + "expiration_date": expiration_date_map["Ens"].isoformat(), + "score": "1.00000", + }, + "Google": { + "dedup": False, + "expiration_date": expiration_date_map["Google"].isoformat(), + "score": "1.00000", + }, + }, + "threshold": "2.00000", + } # First submission response = self.client.get( f"{self.base_url}/{self.community.pk}/score/{self.account.address}", @@ -577,18 +627,213 @@ def test_submit_passport_with_binary_scorer_above_threshold( ) self.assertEqual(response.status_code, 200) response_json = response.json() - self.assertEqual( - response_json, + + self.assertEqual(response_json, expecred_result) + + @patch("registry.atasks.validate_credential", side_effect=[[], [], [], []]) + @patch( + "registry.atasks.get_utc_time", + return_value=datetime.fromisoformat("2023-01-11T16:35:23.938006+00:00"), + ) + @patch( + "registry.atasks.aget_passport", + side_effect=[copy.deepcopy(mock_passport), copy.deepcopy(mock_passport)], + ) + def test_submit_passport_with_binary_scorer_below_threshold( + self, aget_passport, get_utc_time, validate_credential + ): + """Verify that submitting the same address multiple times only registers each stamp once, and gives back the same score""" + + expected_score = "2.00000" + + scorer = BinaryWeightedScorer.objects.create( + threshold=20, + weights={"FirstEthTxnProvider": 1.0, "Google": 1, "Ens": 1.0}, + type=Scorer.Type.WEIGHTED_BINARY, + ) + + self.community.scorer = scorer + self.community.save() + expiration_date_list = [ + datetime.fromisoformat(s["credential"]["expirationDate"]) + for s in mock_passport["stamps"] + ] + expiration_date_map = {} + for stamp in mock_passport["stamps"]: + expiration_date_map[stamp["provider"]] = datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ) + expected_result = ( { "score": expected_score, - "passing_score": True, + "passing_score": False, "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", "error": None, "expiration_timestamp": min(expiration_date_list).isoformat(), "last_score_timestamp": get_utc_time().isoformat(), - "stamp_scores": {"Ens": "1.0", "Google": "1.0"}, - "threshold": "2.00000", + "stamps": { + "Ens": { + "score": "1.0", + "dedup": False, + "expiration_date": expiration_date_map["Ens"].isoformat(), + }, + "Google": { + "score": "1.0", + "dedup": False, + "expiration_date": expiration_date_map["Google"].isoformat(), + }, + }, + "threshold": "20.00000", + }, + ) + # First submission + response = self.client.get( + f"{self.base_url}/{self.community.pk}/score/{self.account.address}", + content_type="application/json", + HTTP_AUTHORIZATION=f"Token {self.secret}", + ) + self.assertEqual(response.status_code, 200) + response_json = response.json() + + self.assertEqual(response_json, expected_result) + + # TODO: add tests that verifies that returned threshold is from score when not resdcoring (theoretically threshold could change ...) + + @patch("registry.atasks.validate_credential", side_effect=[[], [], [], []]) + @patch( + "registry.atasks.get_utc_time", + return_value=datetime.fromisoformat("2023-01-11T16:35:23.938006+00:00"), + ) + @patch( + "registry.atasks.aget_passport", + side_effect=[copy.deepcopy(mock_passport), copy.deepcopy(mock_passport)], + ) + def test_submit_passport_with_non_binary_scorer_above_threshold( + self, aget_passport, get_utc_time, validate_credential + ): + """Verify that submitting the same address multiple times only registers each stamp once, and gives back the same score""" + + expected_score = "22.000" + + scorer = WeightedScorer.objects.create( + weights={"FirstEthTxnProvider": 11.0, "Google": 11, "Ens": 11.0}, + type=Scorer.Type.WEIGHTED, + ) + self.community.scorer = scorer + self.community.save() + expiration_date_list = [ + datetime.fromisoformat(s["credential"]["expirationDate"]) + for s in mock_passport["stamps"] + ] + expiration_date_map = {} + for stamp in mock_passport["stamps"]: + expiration_date_map[stamp["provider"]] = datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ) + expected_result = { + "score": expected_score, + "passing_score": True, + "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", + "error": None, + "expiration_timestamp": min(expiration_date_list).isoformat(), + "last_score_timestamp": get_utc_time().isoformat(), + "stamps": { + "Ens": { + "score": "11.0", + "dedup": False, + "expiration_date": expiration_date_map["Ens"].isoformat(), + }, + "Google": { + "score": "11.0", + "dedup": False, + "expiration_date": expiration_date_map["Google"].isoformat(), + }, + }, + "threshold": "20.00000", + } + # First submission + response = self.client.get( + f"{self.base_url}/{self.community.pk}/score/{self.account.address}", + content_type="application/json", + HTTP_AUTHORIZATION=f"Token {self.secret}", + ) + self.assertEqual(response.status_code, 200) + response_json = response.json() + self.assertEqual(response_json["score"], expected_score) + self.assertEqual(response_json["passing_score"], True) + self.assertEqual(response_json, expected_result) + + @patch("registry.atasks.validate_credential", side_effect=[[], [], [], []]) + @patch( + "registry.atasks.get_utc_time", + return_value=datetime.fromisoformat("2023-01-11T16:35:23.938006+00:00"), + ) + @patch( + "registry.atasks.aget_passport", + side_effect=[copy.deepcopy(mock_passport), copy.deepcopy(mock_passport)], + ) + def test_submit_passport_with_non_binary_scorer_below_threshold( + self, aget_passport, get_utc_time, validate_credential + ): + """Verify that submitting the same address multiple times only registers each stamp once, and gives back the same score""" + + expected_score = "2.00000" + + scorer = WeightedScorer.objects.create( + weights={"FirstEthTxnProvider": 1.0, "Google": 1.0, "Ens": 1.0}, + type=Scorer.Type.WEIGHTED, + ) + self.community.scorer = scorer + self.community.save() + expiration_date_list = [ + datetime.fromisoformat(s["credential"]["expirationDate"]) + for s in mock_passport["stamps"] + ] + + # First submission + response = self.client.get( + f"{self.base_url}/{self.community.pk}/score/{self.account.address}", + content_type="application/json", + HTTP_AUTHORIZATION=f"Token {self.secret}", + ) + self.assertEqual(response.status_code, 200) + response_json = response.json() + expected_response = { + "score": expected_score, + "passing_score": False, + "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", + "error": None, + "expiration_timestamp": min(expiration_date_list).isoformat(), + "last_score_timestamp": get_utc_time().isoformat(), + "stamps": { + "Ens": { + "dedup": False, + "expiration_date": next( + datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ).isoformat() + for stamp in mock_passport["stamps"] + if stamp["provider"] == "Ens" + ), + "score": "1.00000", + }, + "Google": { + "dedup": False, + "expiration_date": next( + datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ).isoformat() + for stamp in mock_passport["stamps"] + if stamp["provider"] == "Google" + ), + "score": "1.00000", + }, }, + "threshold": "20.00000", + } + self.assertEqual( + response_json, + expected_response, ) @patch("registry.atasks.validate_credential", side_effect=[[], [], [], []]) @@ -619,6 +864,32 @@ def test_submit_passport_with_binary_scorer_below_threshold( datetime.fromisoformat(s["credential"]["expirationDate"]) for s in mock_passport["stamps"] ] + expiration_date_map = {} + for stamp in mock_passport["stamps"]: + expiration_date_map[stamp["provider"]] = datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ) + expected_result = { + "score": expected_score, + "passing_score": False, + "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", + "error": None, + "expiration_timestamp": min(expiration_date_list).isoformat(), + "last_score_timestamp": get_utc_time().isoformat(), + "stamps": { + "Ens": { + "score": "1.00000", + "dedup": False, + "expiration_date": expiration_date_map["Ens"].isoformat(), + }, + "Google": { + "score": "1.00000", + "dedup": False, + "expiration_date": expiration_date_map["Google"].isoformat(), + }, + }, + "threshold": "20.00000", + } # First submission response = self.client.get( f"{self.base_url}/{self.community.pk}/score/{self.account.address}", @@ -627,19 +898,7 @@ def test_submit_passport_with_binary_scorer_below_threshold( ) self.assertEqual(response.status_code, 200) response_json = response.json() - self.assertEqual( - response_json, - { - "score": expected_score, - "passing_score": False, - "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", - "error": None, - "expiration_timestamp": min(expiration_date_list).isoformat(), - "last_score_timestamp": get_utc_time().isoformat(), - "stamp_scores": {"Ens": "1.0", "Google": "1.0"}, - "threshold": "20.00000", - }, - ) + self.assertEqual(response_json, expected_result) # TODO: add tests that verifies that returned threshold is from score when not resdcoring (theoretically threshold could change ...) @@ -669,6 +928,32 @@ def test_submit_passport_with_non_binary_scorer_above_threshold( datetime.fromisoformat(s["credential"]["expirationDate"]) for s in mock_passport["stamps"] ] + expiration_date_map = {} + for stamp in mock_passport["stamps"]: + expiration_date_map[stamp["provider"]] = datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ) + expected_result = { + "score": expected_score, + "passing_score": True, + "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", + "error": None, + "expiration_timestamp": min(expiration_date_list).isoformat(), + "last_score_timestamp": get_utc_time().isoformat(), + "stamps": { + "Ens": { + "dedup": False, + "expiration_date": expiration_date_map["Ens"].isoformat(), + "score": "11.00000", + }, + "Google": { + "dedup": False, + "expiration_date": expiration_date_map["Google"].isoformat(), + "score": "11.00000", + }, + }, + "threshold": "20.00000", + } # First submission response = self.client.get( @@ -680,19 +965,8 @@ def test_submit_passport_with_non_binary_scorer_above_threshold( response_json = response.json() self.assertEqual(response_json["score"], expected_score) self.assertEqual(response_json["passing_score"], True) - self.assertEqual( - response_json, - { - "score": expected_score, - "passing_score": True, - "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", - "error": None, - "expiration_timestamp": min(expiration_date_list).isoformat(), - "last_score_timestamp": get_utc_time().isoformat(), - "stamp_scores": {"Ens": "11.0", "Google": "11.0"}, - "threshold": "20.00000", - }, - ) + + self.assertEqual(response_json, expected_result) @patch("registry.atasks.validate_credential", side_effect=[[], [], [], []]) @patch( @@ -720,7 +994,33 @@ def test_submit_passport_with_non_binary_scorer_below_threshold( datetime.fromisoformat(s["credential"]["expirationDate"]) for s in mock_passport["stamps"] ] - + expiration_date_map = {} + for stamp in mock_passport["stamps"]: + expiration_date_map[stamp["provider"]] = datetime.fromisoformat( + stamp["credential"]["expirationDate"] + ) + + expected_result = { + "score": expected_score, + "passing_score": False, + "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", + "error": None, + "expiration_timestamp": min(expiration_date_list).isoformat(), + "last_score_timestamp": get_utc_time().isoformat(), + "stamps": { + "Ens": { + "dedup": False, + "expiration_date": expiration_date_map["Ens"].isoformat(), + "score": "1.00000", + }, + "Google": { + "dedup": False, + "expiration_date": expiration_date_map["Google"].isoformat(), + "score": "1.00000", + }, + }, + "threshold": "20.00000", + } # First submission response = self.client.get( f"{self.base_url}/{self.community.pk}/score/{self.account.address}", @@ -729,19 +1029,7 @@ def test_submit_passport_with_non_binary_scorer_below_threshold( ) self.assertEqual(response.status_code, 200) response_json = response.json() - self.assertEqual( - response_json, - { - "score": expected_score, - "passing_score": False, - "address": "0xb81c935d01e734b3d8bb233f5c4e1d72dbc30f6c", - "error": None, - "expiration_timestamp": min(expiration_date_list).isoformat(), - "last_score_timestamp": get_utc_time().isoformat(), - "stamp_scores": {"Ens": "1.0", "Google": "1.0"}, - "threshold": "20.00000", - }, - ) + self.assertEqual(response_json, expected_result) def test_submit_passport_accepts_scorer_id(self): """