-
Notifications
You must be signed in to change notification settings - Fork 8
Expand file tree
/
Copy pathtokenization.py
More file actions
124 lines (98 loc) · 4.06 KB
/
tokenization.py
File metadata and controls
124 lines (98 loc) · 4.06 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import List, Union, Optional
from datetime import datetime
from typing_extensions import Literal
from .._models import BaseModel
from .tokenization_tfa_reason import TokenizationTfaReason
from .tokenization_rule_result import TokenizationRuleResult
from .tokenization_decline_reason import TokenizationDeclineReason
__all__ = ["Tokenization", "Event"]
class Event(BaseModel):
token: Optional[str] = None
"""Globally unique identifier for a Tokenization Event"""
created_at: Optional[datetime] = None
"""Date and time when the tokenization event first occurred. UTC time zone."""
result: Optional[
Literal[
"APPROVED",
"DECLINED",
"NOTIFICATION_DELIVERED",
"REQUIRE_ADDITIONAL_AUTHENTICATION",
"TOKEN_ACTIVATED",
"TOKEN_CREATED",
"TOKEN_DEACTIVATED",
"TOKEN_DELETED_FROM_CONSUMER_APP",
"TOKEN_INACTIVE",
"TOKEN_STATE_UNKNOWN",
"TOKEN_SUSPENDED",
"TOKEN_UPDATED",
]
] = None
"""Enum representing the result of the tokenization event"""
rule_results: Optional[List[TokenizationRuleResult]] = None
"""Results from rules that were evaluated for this tokenization"""
tokenization_decline_reasons: Optional[List[TokenizationDeclineReason]] = None
"""List of reasons why the tokenization was declined"""
tokenization_tfa_reasons: Optional[List[TokenizationTfaReason]] = None
"""List of reasons why two-factor authentication was required"""
type: Optional[
Literal[
"TOKENIZATION_2FA",
"TOKENIZATION_AUTHORIZATION",
"TOKENIZATION_DECISIONING",
"TOKENIZATION_ELIGIBILITY_CHECK",
"TOKENIZATION_UPDATED",
]
] = None
"""Enum representing the type of tokenization event that occurred"""
class Tokenization(BaseModel):
token: str
"""Globally unique identifier for a Tokenization"""
account_token: str
"""The account token associated with the card being tokenized."""
card_token: str
"""The card token associated with the card being tokenized."""
created_at: datetime
"""Date and time when the tokenization first occurred. UTC time zone."""
status: Literal["ACTIVE", "DEACTIVATED", "INACTIVE", "PAUSED", "PENDING_2FA", "PENDING_ACTIVATION", "UNKNOWN"]
"""The status of the tokenization request"""
token_requestor_name: Union[
Literal[
"AMAZON_ONE",
"ANDROID_PAY",
"APPLE_PAY",
"FACEBOOK",
"FITBIT_PAY",
"GARMIN_PAY",
"GOOGLE_PAY",
"MICROSOFT_PAY",
"NETFLIX",
"SAMSUNG_PAY",
"UNKNOWN",
"VISA_CHECKOUT",
],
str,
]
"""The entity that requested the tokenization.
For digital wallets, this will be one of the defined wallet types. For merchant
tokenizations, this will be a free-form merchant name string.
"""
token_unique_reference: str
"""The network's unique reference for the tokenization."""
tokenization_channel: Literal["DIGITAL_WALLET", "MERCHANT"]
"""The channel through which the tokenization was made."""
updated_at: datetime
"""Latest date and time when the tokenization was updated. UTC time zone."""
device_id: Optional[str] = None
"""The device identifier associated with the tokenization."""
digital_card_art_token: Optional[str] = None
"""
Specifies the digital card art displayed in the user's digital wallet after
tokenization. This will be null if the tokenization was created without an
associated digital card art. See
[Flexible Card Art Guide](https://docs.lithic.com/docs/about-digital-wallets#flexible-card-art).
"""
events: Optional[List[Event]] = None
"""A list of events related to the tokenization."""
payment_account_reference_id: Optional[str] = None
"""The network's unique reference for the card that is tokenized."""